feat: Add Waveshare P4 panel device integration with display streaming and touch input, alongside core streaming engine and compiler updates.

This commit is contained in:
enzotar 2026-03-02 16:08:49 -08:00
parent f01cd10c0a
commit cc6aac8697
37 changed files with 3595 additions and 85 deletions

View file

@ -0,0 +1,7 @@
[ 45ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 2047ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 6049ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 14050ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 30052ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 60058ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 90068ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917

View file

@ -0,0 +1,39 @@
[ 62ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 2063ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 6064ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 14068ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 30074ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 60078ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 63564ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 65573ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 69594ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 77667ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 93796ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 123798ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 150080ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 152081ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 154598ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 156600ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 159117ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 161123ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 162644ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 164664ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 168688ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 176845ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 193115ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 201781ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 204796ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 208549ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 213697ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 220738ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 226255ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 234833ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 240122ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 244124ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 252125ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 268127ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 282783ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 284784ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 288790ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 296798ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 312810ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917

View file

@ -0,0 +1,20 @@
[ 35ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 2055ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 6147ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 9719ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 12217ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 16798ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 22094ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 27465ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 33207ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 43479ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 59481ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 89482ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 119484ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 147588ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 148102ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 150106ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 154109ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 162126ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 178153ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 208154ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917

View file

@ -0,0 +1,24 @@
[ 24ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 2025ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 6028ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 14031ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 30033ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 60042ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 90047ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 120049ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 150045ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 152046ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 156050ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 159564ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 161569ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 165573ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 173593ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 174115ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 176186ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 180295ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 180833ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 183067ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 186754ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 190208ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 196051ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 203460ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917

View file

@ -0,0 +1,36 @@
[ 1250ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 5619ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 12233ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 24505ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 40509ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 40539ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 41553ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 43554ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 47558ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 55563ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 71574ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 101597ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 131610ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 161619ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 191620ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 221622ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 251623ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 281626ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 311628ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 341629ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 371631ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 401632ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 431633ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 461635ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 491637ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 521640ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 551642ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 581644ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 611645ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 641646ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 671648ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 701649ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 731651ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 761652ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 791655ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917
[ 821657ms] [ERROR] WebSocket connection to 'ws://localhost:9100/peer/tetris' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/:917

Binary file not shown.

After

Width:  |  Height:  |  Size: 66 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 68 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 68 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 70 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 95 KiB

29
TODO.md Normal file
View file

@ -0,0 +1,29 @@
# DreamStack TODO
## Streaming ✅
- [x] Signal streaming, pixel streaming, delta streaming, touch/gamepad/resize input
- [x] Pixel streaming for DOM apps, Opus audio, receiver gamepad, adaptive quality
- [x] Neural frame types, remaining input types, layout serialization, WASM codec
## Streaming — Phase 2 ✅
- [x] **Stream transforms**`_streamMap`/`_streamFilter`/`_streamDebounce`/`_streamDistinct`/`_streamThrottle` pipe operators
- [x] **Replay / Time-travel** — relay replay ring buffer with configurable `replay_depth`
- [x] **Stream recording**`recording_dir` config in relay for frame-level recording
- [x] **Multi-relay federation**`federation_upstreams` relay config for cross-network forwarding
## Language ✅
- [x] **Closures / lambdas**`(x) -> x * 2` first-class functions (parser + codegen)
- [x] **Pattern destructuring**`let { count, doubled } = stream from "..."` desugars to dot-access lets
- [x] **Async/await**`await fetch("/api")` native async in handlers
- [x] **Better error messages** — source context with line + caret in parse errors
## Composition ✅
- [x] **`merge` operator** — `merge(stream1, stream2)``DS._mergeStreams()` reactive merge
- [x] **`pipe` operator** — `stream from "..." | fn` pipe syntax in parser + enhanced runtime
- [x] **Stream-to-component** — component with dynamic `stream from` param
- [x] **Channel groups**`channel_matches()` wildcard matching: `games/*` matches `games/chess`
## Architecture
- [ ] Compiler plugin system for custom output targets
- [ ] Language server (ds-lsp) for IDE integration
- [ ] Package registry for sharing .ds components

View file

@ -659,3 +659,119 @@ All infrastructure is built and tested:
- ⬜ **NFT-gated stream auth**: Check wallet ownership before allowing connection - ⬜ **NFT-gated stream auth**: Check wallet ownership before allowing connection
- ⬜ **PgFlex signal bridge**: SSE → DreamStack signal graph - ⬜ **PgFlex signal bridge**: SSE → DreamStack signal graph
- ⬜ **Local LLM integration**: Model loading + signal-based I/O - ⬜ **Local LLM integration**: Model loading + signal-based I/O
---
## Part 7 — Impossible Use Cases (Enabled by 44KB Output)
> The JS output optimizations (DOM helpers, tree-shaking, minification) reduced
> compiled DreamStack apps to **44KB** (~8KB gzipped). At this size, an interactive
> application crosses a threshold: **it stops being infrastructure and becomes data.**
> Data can travel through streams, live on-chain, or be generated on the fly.
### 1. Frame-Zero Self-Propagating Apps
**The insight**: The app is small enough to be sent through its own data stream.
```
┌──────────────┐ ┌──────────────┐ ┌──────────────┐
│ Source │ │ Relay │ │ Browser │
│ (Pi, laptop) │ │ (any relay) │ │ (viewer) │
└──────┬───────┘ └──────┬───────┘ └──────┬───────┘
│ FRAME 0: full HTML │ │
│ (44KB compiled app) │ │
│───────────────────────>│ caches it │
│ FRAME 1: {temp: 72.4} │ │
│───────────────────────>│ │
│ │ Someone opens URL │
│ │<──────── GET /s/a8f3c │
│ │ serves FRAME 0 │
│ │───────────────────────>│ renders app
│ │ WS auto-connects │
│ │<──────── upgrade ──────│
│ FRAME N: {temp: 73.1} │ │
│───────────────────────>│───────────────────────>│ live update
```
One command: `dreamstack stream sensors.ds --bootstrap`
One URL: `https://relay.dreamstack.dev/s/a8f3c`
No CDN, no hosting, no separate frontend deployment.
**Why impossible today**: React (200KB+) is too large to push through a WebSocket.
Firebase/Supabase require pre-deployed frontends. No existing framework can travel
through its own data channel.
### 2. Solana Accounts as Reactive Signal Sources
**The insight**: DreamStack signals map directly to on-chain state.
The wallet balance IS a reactive signal. A transfer IS a signal mutation.
**The blockchain replaces the relay server.**
This is NOT "store HTML on-chain" (anyone can do that). The signals themselves
are sourced from on-chain state. The app reads its state FROM the blockchain
and writes state BACK via transactions.
```
User clicks "Send 0.5 SOL"
├─→ balance signal reacts
├─→ buildTransaction(transfer(recipient, 0.5 SOL))
├─→ wallet.signAndSend(tx) ← Phantom popup
Solana validators confirm (~400ms)
├─→ accountSubscribe fires: balance changed
Sender's UI: balance spring-animates down
Recipient's UI: balance spring-animates up
Both update with zero infrastructure between them
```
**Zero infrastructure.** No relay, no WebSocket server, no database.
The blockchain IS the pub/sub layer, the state store, and the audit log.
| What you'd need | React + Firebase | **DreamStack + Solana** |
|---|---|---|
| Frontend hosting | Vercel ($20/mo) | **On-chain ($0.12 once)** |
| Real-time sync | Firebase ($25/mo)| **accountSubscribe (free)** |
| Backend server | Cloud Functions | **None** |
| Audit log | Build it yourself| **Solana Explorer** |
| Total infra | 3-4 services | **0 services** |
**Why impossible today**: No frontend framework has a signal system that maps
to Solana account bytes. React/Vue call the chain — the chain doesn't push state
back reactively. DreamStack closes the loop: signals ↔ account data ↔ subscribers.
### 3. AI-Generated Live Applications
**The insight**: An LLM generates 20 lines of `.ds` source → compiles in <100ms
→ produces 44KB HTML → pushes to relay as frame 0. The AI responds with a
**running application** instead of text.
```
User: "Show me a live dashboard of my Solana validator"
AI: [generates 30 lines of .ds — 500 bytes]
[compiles to 44KB HTML in 80ms]
[pushes to relay as frame 0]
→ Here's your dashboard: relay.dreamstack.dev/s/x9f2
(already live, already streaming real data)
```
The AI doesn't link you to a website. The AI doesn't generate code for you to
deploy. **The AI's response IS a live, streaming, interactive application.**
**Why impossible today**: ChatGPT produces static text. Code Interpreter produces
screenshots. V0/Bolt generate code you deploy yourself. No AI can respond with a
live streaming app because existing frameworks require build infrastructure and
are too large for inline delivery.
### The Unifying Principle
> When an application is small enough, it stops being infrastructure and becomes
> data. Data can travel through streams (frame-zero), live on-chain (Solana UI),
> or be generated on the fly (AI apps). DreamStack apps crossed that threshold —
> they're small enough to go anywhere data goes.

View file

@ -28,6 +28,9 @@ enum Commands {
/// Output directory (default: dist/) /// Output directory (default: dist/)
#[arg(short, long, default_value = "dist")] #[arg(short, long, default_value = "dist")]
output: PathBuf, output: PathBuf,
/// Minify JS and CSS output
#[arg(long)]
minify: bool,
}, },
/// Start a dev server with hot reload /// Start a dev server with hot reload
Dev { Dev {
@ -97,7 +100,7 @@ fn main() {
let cli = Cli::parse(); let cli = Cli::parse();
match cli.command { match cli.command {
Commands::Build { file, output } => cmd_build(&file, &output), Commands::Build { file, output, minify } => cmd_build(&file, &output, minify),
Commands::Dev { file, port } => cmd_dev(&file, port), Commands::Dev { file, port } => cmd_dev(&file, port),
Commands::Check { file } => cmd_check(&file), Commands::Check { file } => cmd_check(&file),
Commands::Stream { file, relay, mode, port } => cmd_stream(&file, &relay, &mode, port), Commands::Stream { file, relay, mode, port } => cmd_stream(&file, &relay, &mode, port),
@ -108,7 +111,7 @@ fn main() {
} }
} }
fn compile(source: &str, base_dir: &Path) -> Result<String, String> { fn compile(source: &str, base_dir: &Path, minify: bool) -> Result<String, String> {
// 1. Lex // 1. Lex
let mut lexer = ds_parser::Lexer::new(source); let mut lexer = ds_parser::Lexer::new(source);
let tokens = lexer.tokenize(); let tokens = lexer.tokenize();
@ -121,7 +124,7 @@ fn compile(source: &str, base_dir: &Path) -> Result<String, String> {
} }
// 2. Parse // 2. Parse
let mut parser = ds_parser::Parser::new(tokens); let mut parser = ds_parser::Parser::with_source(tokens, source);
let mut program = parser.parse_program().map_err(|e| e.to_string())?; let mut program = parser.parse_program().map_err(|e| e.to_string())?;
// 3. Resolve imports — inline exported declarations from imported files // 3. Resolve imports — inline exported declarations from imported files
@ -132,7 +135,7 @@ fn compile(source: &str, base_dir: &Path) -> Result<String, String> {
let views = ds_analyzer::SignalGraph::analyze_views(&program); let views = ds_analyzer::SignalGraph::analyze_views(&program);
// 5. Codegen // 5. Codegen
let html = ds_codegen::JsEmitter::emit_html(&program, &graph, &views); let html = ds_codegen::JsEmitter::emit_html(&program, &graph, &views, minify);
Ok(html) Ok(html)
} }
@ -206,8 +209,8 @@ fn resolve_imports(program: &mut ds_parser::Program, base_dir: &Path) -> Result<
Ok(()) Ok(())
} }
fn cmd_build(file: &Path, output: &Path) { fn cmd_build(file: &Path, output: &Path, minify: bool) {
println!("🔨 DreamStack build"); println!("🔨 DreamStack build{}", if minify { " (minified)" } else { "" });
println!(" source: {}", file.display()); println!(" source: {}", file.display());
let source = match fs::read_to_string(file) { let source = match fs::read_to_string(file) {
@ -219,7 +222,7 @@ fn cmd_build(file: &Path, output: &Path) {
}; };
let base_dir = file.parent().unwrap_or(Path::new(".")); let base_dir = file.parent().unwrap_or(Path::new("."));
match compile(&source, base_dir) { match compile(&source, base_dir, minify) {
Ok(html) => { Ok(html) => {
fs::create_dir_all(output).unwrap(); fs::create_dir_all(output).unwrap();
let out_path = output.join("index.html"); let out_path = output.join("index.html");
@ -307,7 +310,7 @@ fn cmd_dev(file: &Path, port: u16) {
let start = Instant::now(); let start = Instant::now();
let base_dir = file.parent().unwrap_or(Path::new(".")); let base_dir = file.parent().unwrap_or(Path::new("."));
match compile(&source, base_dir) { match compile(&source, base_dir, false) {
Ok(html) => { Ok(html) => {
let ms = start.elapsed().as_millis(); let ms = start.elapsed().as_millis();
let html_with_hmr = inject_hmr(&html); let html_with_hmr = inject_hmr(&html);
@ -393,7 +396,7 @@ h2 {{ color: #f87171; margin-bottom: 16px; }}
// Recompile // Recompile
if let Ok(src) = fs::read_to_string(&watch_file) { if let Ok(src) = fs::read_to_string(&watch_file) {
let start = Instant::now(); let start = Instant::now();
match compile(&src, watch_file.parent().unwrap_or(Path::new("."))) { match compile(&src, watch_file.parent().unwrap_or(Path::new(".")), false) {
Ok(html) => { Ok(html) => {
let ms = start.elapsed().as_millis(); let ms = start.elapsed().as_millis();
let new_version = v_watcher.fetch_add(1, Ordering::SeqCst) + 1; let new_version = v_watcher.fetch_add(1, Ordering::SeqCst) + 1;
@ -505,7 +508,7 @@ fn cmd_check(file: &Path) {
} }
// Parse // Parse
let mut parser = ds_parser::Parser::new(tokens); let mut parser = ds_parser::Parser::with_source(tokens, &source);
let program = match parser.parse_program() { let program = match parser.parse_program() {
Ok(p) => p, Ok(p) => p,
Err(e) => { Err(e) => {
@ -612,7 +615,7 @@ fn cmd_stream(file: &Path, relay: &str, mode: &str, port: u16) {
) )
}; };
match compile(&stream_source, file.parent().unwrap_or(Path::new("."))) { match compile(&stream_source, file.parent().unwrap_or(Path::new(".")), false) {
Ok(html) => { Ok(html) => {
let html_with_hmr = inject_hmr(&html); let html_with_hmr = inject_hmr(&html);
println!("✅ Compiled with streaming enabled"); println!("✅ Compiled with streaming enabled");

File diff suppressed because it is too large Load diff

View file

@ -69,7 +69,7 @@ impl IncrementalCompiler {
fn full_compile(program: &Program) -> String { fn full_compile(program: &Program) -> String {
let graph = SignalGraph::from_program(program); let graph = SignalGraph::from_program(program);
let views = SignalGraph::analyze_views(program); let views = SignalGraph::analyze_views(program);
JsEmitter::emit_html(program, &graph, &views) JsEmitter::emit_html(program, &graph, &views, false)
} }
/// Compile source code, returning either a full HTML recompile or a JS patch. /// Compile source code, returning either a full HTML recompile or a JS patch.

View file

@ -308,6 +308,10 @@ pub enum Expr {
MethodCall(Box<Expr>, String, Vec<Expr>), MethodCall(Box<Expr>, String, Vec<Expr>),
/// Slot: renders children passed to a component /// Slot: renders children passed to a component
Slot, Slot,
/// Await: `await fetchJSON("/api")`
Await(Box<Expr>),
/// Merge streams: `merge(stream1, stream2, ...)`
Merge(Vec<Expr>),
} }
/// String literal with interpolation segments. /// String literal with interpolation segments.

View file

@ -5,11 +5,23 @@ use crate::lexer::{Token, TokenKind};
pub struct Parser { pub struct Parser {
tokens: Vec<Token>, tokens: Vec<Token>,
pos: usize, pos: usize,
source_lines: Vec<String>,
pending_decls: Vec<Declaration>,
} }
impl Parser { impl Parser {
pub fn new(tokens: Vec<Token>) -> Self { pub fn new(tokens: Vec<Token>) -> Self {
Self { tokens, pos: 0 } Self { tokens, pos: 0, source_lines: Vec::new(), pending_decls: Vec::new() }
}
/// Create a parser with source text for enriched error messages.
pub fn with_source(tokens: Vec<Token>, source: &str) -> Self {
Self {
tokens,
pos: 0,
source_lines: source.lines().map(String::from).collect(),
pending_decls: Vec::new(),
}
} }
pub fn parse_program(&mut self) -> Result<Program, ParseError> { pub fn parse_program(&mut self) -> Result<Program, ParseError> {
@ -19,6 +31,8 @@ impl Parser {
while !self.is_at_end() { while !self.is_at_end() {
let decl = self.parse_declaration()?; let decl = self.parse_declaration()?;
declarations.push(decl); declarations.push(decl);
// Drain any pending declarations from destructuring desugaring
declarations.extend(self.pending_decls.drain(..));
self.skip_newlines(); self.skip_newlines();
} }
@ -133,10 +147,16 @@ impl Parser {
fn error(&self, msg: String) -> ParseError { fn error(&self, msg: String) -> ParseError {
let tok = self.current_token(); let tok = self.current_token();
let source_line = if tok.line > 0 && tok.line <= self.source_lines.len() {
Some(self.source_lines[tok.line - 1].clone())
} else {
None
};
ParseError { ParseError {
message: msg, message: msg,
line: tok.line, line: tok.line,
col: tok.col, col: tok.col,
source_line,
} }
} }
@ -263,6 +283,89 @@ impl Parser {
fn parse_let_decl(&mut self) -> Result<Declaration, ParseError> { fn parse_let_decl(&mut self) -> Result<Declaration, ParseError> {
let line = self.current_token().line; let line = self.current_token().line;
self.advance(); // consume 'let' self.advance(); // consume 'let'
// Object destructuring: `let { a, b } = expr`
if self.check(&TokenKind::LBrace) {
self.advance(); // consume '{'
let mut names = Vec::new();
loop {
self.skip_newlines();
if self.check(&TokenKind::RBrace) { break; }
names.push(self.expect_ident()?);
self.skip_newlines();
if self.check(&TokenKind::Comma) { self.advance(); }
}
self.expect(&TokenKind::RBrace)?;
self.expect(&TokenKind::Eq)?;
let value = self.parse_expr()?;
if names.is_empty() {
return Err(self.error("empty destructuring pattern".to_string()));
}
// Desugar: let { a, b } = expr → let _tmp = expr; let a = _tmp.a; let b = _tmp.b;
let temp = format!("_ds_{}", names.join("_"));
let mut all_decls = vec![Declaration::Let(LetDecl {
name: temp.clone(),
type_annotation: None,
value,
span: Span { start: 0, end: 0, line },
})];
for field in &names {
all_decls.push(Declaration::Let(LetDecl {
name: field.clone(),
type_annotation: None,
value: Expr::DotAccess(Box::new(Expr::Ident(temp.clone())), field.clone()),
span: Span { start: 0, end: 0, line },
}));
}
// Return first decl, queue rest for injection by parse_program
self.pending_decls.extend(all_decls.drain(1..));
return Ok(all_decls.into_iter().next().unwrap());
}
// Array destructuring: `let [a, b] = expr`
if self.check(&TokenKind::LBracket) {
self.advance(); // consume '['
let mut names = Vec::new();
loop {
self.skip_newlines();
if self.check(&TokenKind::RBracket) { break; }
names.push(self.expect_ident()?);
self.skip_newlines();
if self.check(&TokenKind::Comma) { self.advance(); }
}
self.expect(&TokenKind::RBracket)?;
self.expect(&TokenKind::Eq)?;
let value = self.parse_expr()?;
if names.is_empty() {
return Err(self.error("empty array destructuring pattern".to_string()));
}
// Desugar: let [a, b] = expr → let _tmp = expr; let a = _tmp[0]; let b = _tmp[1];
let temp = format!("_ds_{}", names.join("_"));
let mut all_decls = vec![Declaration::Let(LetDecl {
name: temp.clone(),
type_annotation: None,
value,
span: Span { start: 0, end: 0, line },
})];
for (i, name) in names.iter().enumerate() {
all_decls.push(Declaration::Let(LetDecl {
name: name.clone(),
type_annotation: None,
value: Expr::Index(
Box::new(Expr::Ident(temp.clone())),
Box::new(Expr::IntLit(i as i64)),
),
span: Span { start: 0, end: 0, line },
}));
}
self.pending_decls.extend(all_decls.drain(1..));
return Ok(all_decls.into_iter().next().unwrap());
}
let name = self.expect_ident()?; let name = self.expect_ident()?;
// Optional type annotation: `let name: Type = value` // Optional type annotation: `let name: Type = value`
@ -1245,6 +1348,18 @@ impl Parser {
return Ok(Expr::Slot); return Ok(Expr::Slot);
} }
// Await: `await fetchJSON("/api")`
if name == "await" {
let inner = self.parse_primary()?;
return Ok(Expr::Await(Box::new(inner)));
}
// Merge: `merge(stream1, stream2, ...)`
if name == "merge" && self.check(&TokenKind::LParen) {
let args = self.parse_call_args()?;
return Ok(Expr::Merge(args));
}
// Component use: `Button { label: "hello" }` — capitalized name + `{` // Component use: `Button { label: "hello" }` — capitalized name + `{`
if name.chars().next().map_or(false, |c| c.is_uppercase()) if name.chars().next().map_or(false, |c| c.is_uppercase())
&& self.check(&TokenKind::LBrace) && self.check(&TokenKind::LBrace)
@ -1626,11 +1741,21 @@ pub struct ParseError {
pub message: String, pub message: String,
pub line: usize, pub line: usize,
pub col: usize, pub col: usize,
pub source_line: Option<String>,
} }
impl std::fmt::Display for ParseError { impl std::fmt::Display for ParseError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "Parse error at line {}:{}: {}", self.line, self.col, self.message) writeln!(f, "Parse error at line {}:{}: {}", self.line, self.col, self.message)?;
if let Some(ref src) = self.source_line {
let line_num = format!("{}", self.line);
let pad = " ".repeat(line_num.len());
writeln!(f, "{pad} |")?;
writeln!(f, "{line_num} | {src}")?;
let caret_offset = if self.col > 0 { self.col - 1 } else { 0 };
writeln!(f, "{pad} | {}^", " ".repeat(caret_offset))?;
}
Ok(())
} }
} }

View file

@ -0,0 +1,7 @@
cmake_minimum_required(VERSION 3.16)
# Point to ESP-IDF
set(EXTRA_COMPONENT_DIRS $ENV{IDF_PATH}/components)
include($ENV{IDF_PATH}/tools/cmake/project.cmake)
project(ds-thin-client)

View file

@ -0,0 +1,76 @@
# DreamStack Thin Client — Waveshare ESP32-P4 10.1" Panel
A 10.1" touchscreen that acts as a **dumb pixel display with touch input**. All rendering happens on the source device (laptop, Pi, server). The panel just shows pixels and reports touches.
## Architecture
```
SOURCE (laptop/Pi) RELAY (:9100) THIS DEVICE
─────────────── ───────────── ───────────
DreamStack app renders WebSocket hub Waveshare ESP32-P4
800×1280 canvas 10.1" IPS + touch
pixels ──→ XOR delta ──→ RLE ──→ relay ────────────────→ RLE decode
XOR apply
blit to screen
←── touch {id,x,y,phase} ← GT9271 touch
```
## Hardware
| Component | Spec |
|---|---|
| Board | Waveshare ESP32-P4-WIFI6 |
| Display | 10.1" IPS, 800×1280, MIPI DSI |
| Touch | GT9271, 10-point capacitive, toughened glass |
| CPU | ESP32-P4 RISC-V 400MHz |
| RAM | 32MB PSRAM (framebuffer lives here) |
| WiFi | WiFi 6 (ESP32-C6) |
| Display driver | `waveshare/esp_lcd_jd9365_10_1` (JD9365) |
## Project Structure
```
devices/waveshare-p4-panel/
├── CMakeLists.txt # ESP-IDF project
├── README.md # This file
└── main/
├── CMakeLists.txt # Component registration
├── idf_component.yml # Dependencies (display driver, WebSocket)
├── Kconfig.projbuild # WiFi SSID/password, relay URL
├── main.c # Entry point: WiFi → WS → receive/blit/touch
├── ds_codec.h # RLE decode + XOR apply (header)
├── ds_codec.c # RLE decode + XOR apply (implementation)
└── ds_protocol.h # Bitstream header parsing + touch encoding
```
## Building
### Prerequisites
- ESP-IDF v5.3+ ([install guide](https://docs.espressif.com/projects/esp-idf/en/latest/esp32p4/get-started/))
- VSCode + ESP-IDF extension (recommended)
### Build & Flash
```bash
# Set target to ESP32-P4
idf.py set-target esp32p4
# Configure WiFi and relay URL
idf.py menuconfig
# → DreamStack Thin Client → WiFi SSID, Password, Relay URL
# Build
idf.py build
# Flash (connect via USB-C)
idf.py -p /dev/ttyUSB0 flash monitor
```
## TODO (pending hardware arrival)
- [ ] Wire up `display_init()` with actual MIPI DSI + JD9365 driver
- [ ] Wire up `touch_task()` with GT9271 I2C touch driver
- [ ] Test framebuffer allocation in PSRAM (800×1280×2 = 2MB)
- [ ] Measure end-to-end latency (target: <50ms)
- [ ] Add reconnection logic for WiFi + WebSocket drops

View file

@ -0,0 +1,11 @@
idf_component_register(
SRCS "main.c" "ds_codec.c"
INCLUDE_DIRS "."
REQUIRES
esp_wifi
esp_event
esp_lcd
esp_timer
nvs_flash
esp_psram
)

View file

@ -0,0 +1,22 @@
menu "DreamStack Thin Client"
config WIFI_SSID
string "WiFi SSID"
default "HomeNetwork"
help
WiFi network name to connect to.
config WIFI_PASS
string "WiFi Password"
default ""
help
WiFi password.
config RELAY_URL
string "Relay WebSocket URL"
default "ws://192.168.1.100:9100/stream/home"
help
WebSocket URL of the DreamStack relay server.
Format: ws://HOST:PORT/stream/CHANNEL_NAME
endmenu

View file

@ -0,0 +1,56 @@
/**
* DreamStack Delta Codec C port for ESP32-P4
*
* Ported from engine/ds-stream/src/codec.rs (lines 109-179)
* Exact same encoding: 0x00 + 2-byte LE count = zero run.
*/
#include "ds_codec.h"
#include <string.h>
size_t ds_rle_decode(const uint8_t *compressed, size_t comp_len,
uint8_t *output, size_t out_cap) {
size_t i = 0; // input position
size_t o = 0; // output position
while (i < comp_len) {
if (compressed[i] == 0x00) {
// Zero run: 0x00 + count_lo + count_hi
if (i + 2 >= comp_len) break;
uint16_t count = (uint16_t)compressed[i + 1]
| ((uint16_t)compressed[i + 2] << 8);
if (o + count > out_cap) return 0; // overflow
memset(output + o, 0, count);
o += count;
i += 3;
} else {
// Literal byte
if (o >= out_cap) return 0;
output[o++] = compressed[i++];
}
}
return o;
}
void ds_xor_apply(uint8_t *framebuffer, const uint8_t *delta, size_t len) {
// Process 4 bytes at a time for speed on 32-bit RISC-V
size_t i = 0;
size_t aligned = len & ~3u;
for (; i < aligned; i += 4) {
*(uint32_t *)(framebuffer + i) ^= *(const uint32_t *)(delta + i);
}
for (; i < len; i++) {
framebuffer[i] ^= delta[i];
}
}
int ds_apply_delta_rle(uint8_t *framebuffer, size_t fb_len,
const uint8_t *compressed, size_t comp_len,
uint8_t *scratch) {
size_t decoded_len = ds_rle_decode(compressed, comp_len, scratch, fb_len);
if (decoded_len == 0 || decoded_len != fb_len) {
return -1;
}
ds_xor_apply(framebuffer, scratch, fb_len);
return 0;
}

View file

@ -0,0 +1,54 @@
/**
* DreamStack Delta Codec C port for ESP32-P4
*
* Ported from engine/ds-stream/src/codec.rs
* Handles XOR+RLE delta decompression for the thin client.
*/
#pragma once
#include <stdint.h>
#include <stddef.h>
/**
* RLE-decode a compressed delta buffer (in-place to output).
*
* Encoding: 0x00 followed by 2-byte LE count = zero run.
* Non-zero bytes pass through literally.
*
* @param compressed RLE-compressed data from relay
* @param comp_len Length of compressed data
* @param output Pre-allocated output buffer
* @param out_cap Capacity of output buffer
* @return Actual decoded length, or 0 on error
*/
size_t ds_rle_decode(const uint8_t *compressed, size_t comp_len,
uint8_t *output, size_t out_cap);
/**
* Apply XOR delta to reconstruct current frame.
*
* framebuffer[i] ^= delta[i] for each byte.
* Modifies framebuffer in-place.
*
* @param framebuffer Current framebuffer (modified in-place)
* @param delta Decoded delta buffer
* @param len Length (must match for both buffers)
*/
void ds_xor_apply(uint8_t *framebuffer, const uint8_t *delta, size_t len);
/**
* Decode an RLE-compressed delta and apply it to the framebuffer.
* Convenience function combining rle_decode + xor_apply.
*
* Uses a scratch buffer for the intermediate decoded delta.
*
* @param framebuffer Current framebuffer (modified in-place)
* @param fb_len Framebuffer length
* @param compressed RLE-compressed delta from relay
* @param comp_len Length of compressed data
* @param scratch Temporary buffer (must be >= fb_len)
* @return 0 on success, -1 on error
*/
int ds_apply_delta_rle(uint8_t *framebuffer, size_t fb_len,
const uint8_t *compressed, size_t comp_len,
uint8_t *scratch);

View file

@ -0,0 +1,94 @@
/**
* DreamStack Bitstream Protocol C port for ESP32-P4
*
* Ported from engine/ds-stream/src/protocol.rs
* Header format and frame types for the thin client.
*/
#pragma once
#include <stdint.h>
// ─── Magic bytes ───
#define DS_MAGIC_0 0xD5
#define DS_MAGIC_1 0x7A
// ─── Header size ───
#define DS_HEADER_SIZE 16
// ─── Frame types (output: source → receiver) ───
#define DS_FRAME_PIXELS 0x01 // Raw RGBA pixel data
#define DS_FRAME_COMPRESSED 0x02 // PNG/WebP compressed
#define DS_FRAME_DELTA 0x03 // XOR delta from previous
#define DS_FRAME_SIGNAL_SYNC 0x10 // Full signal state (JSON)
#define DS_FRAME_SIGNAL_DIFF 0x11 // Changed signals (JSON)
#define DS_FRAME_SCHEMA 0x12 // Schema announcement
#define DS_FRAME_KEYFRAME 0xF0 // Reset state
#define DS_FRAME_PING 0xFE // Heartbeat
#define DS_FRAME_END 0xFF // Stream end
// ─── Input types (receiver → source) ───
#define DS_INPUT_POINTER 0x01
#define DS_INPUT_KEY 0x02
#define DS_INPUT_SCROLL 0x03
#define DS_INPUT_TOUCH 0x04
#define DS_INPUT_GAMEPAD_AXIS 0x10
#define DS_INPUT_GAMEPAD_BTN 0x11
#define DS_INPUT_TEXT 0x20
// ─── Flags ───
#define DS_FLAG_INPUT 0x01
#define DS_FLAG_KEYFRAME 0x02
#define DS_FLAG_COMPRESSED 0x04
// ─── Frame header (16 bytes, little-endian) ───
typedef struct __attribute__((packed)) {
uint8_t magic[2]; // DS_MAGIC_0, DS_MAGIC_1
uint8_t frame_type; // DS_FRAME_* or DS_INPUT_*
uint8_t flags; // DS_FLAG_*
uint16_t seq; // Sequence number
uint32_t timestamp; // Milliseconds
uint16_t width; // Frame width (pixels)
uint16_t height; // Frame height (pixels)
uint16_t payload_len; // Following payload length
} ds_header_t;
// ─── Touch event (6 bytes) ───
typedef struct __attribute__((packed)) {
uint8_t id; // Touch identifier (multi-touch)
uint16_t x; // X coordinate
uint16_t y; // Y coordinate
uint8_t phase; // 0=start/move, 1=end, 2=cancel
} ds_touch_event_t;
/**
* Parse a header from raw bytes.
* Returns 0 on success, -1 if magic doesn't match.
*/
static inline int ds_parse_header(const uint8_t *buf, ds_header_t *hdr) {
if (buf[0] != DS_MAGIC_0 || buf[1] != DS_MAGIC_1) return -1;
*hdr = *(const ds_header_t *)buf;
return 0;
}
/**
* Encode a touch input message into a buffer.
* Buffer must be at least DS_HEADER_SIZE + sizeof(ds_touch_event_t) = 22 bytes.
* Returns total message size.
*/
static inline size_t ds_encode_touch(uint8_t *buf, uint16_t seq,
uint32_t timestamp,
const ds_touch_event_t *touch) {
ds_header_t *hdr = (ds_header_t *)buf;
hdr->magic[0] = DS_MAGIC_0;
hdr->magic[1] = DS_MAGIC_1;
hdr->frame_type = DS_INPUT_TOUCH;
hdr->flags = DS_FLAG_INPUT;
hdr->seq = seq;
hdr->timestamp = timestamp;
hdr->width = 0;
hdr->height = 0;
hdr->payload_len = sizeof(ds_touch_event_t);
*(ds_touch_event_t *)(buf + DS_HEADER_SIZE) = *touch;
return DS_HEADER_SIZE + sizeof(ds_touch_event_t);
}

View file

@ -0,0 +1,12 @@
## IDF Component Manager Manifest File
## https://docs.espressif.com/projects/esp-idf/en/latest/esp32p4/api-guides/tools/idf-component-manager.html
dependencies:
# Waveshare 10.1" MIPI DSI display driver (JD9365)
waveshare/esp_lcd_jd9365_10_1: "*"
# WebSocket client
espressif/esp_websocket_client: "^1.0.0"
# ESP-IDF (implicit but declaring minimum)
idf:
version: ">=5.3.0"

View file

@ -0,0 +1,235 @@
/**
* DreamStack Thin Client Waveshare ESP32-P4 10.1" Panel
*
* Firmware that turns the panel into a dumb pixel display
* with touch input. All rendering happens on the source device.
*
* Flow: WiFi WebSocket receive delta frames blit to display
* Touch encode event send over WebSocket
*
* Dependencies (via ESP Component Registry):
* - waveshare/esp_lcd_jd9365_10_1 (10.1" MIPI DSI display driver)
* - espressif/esp_websocket_client (WebSocket client)
*/
#include <stdio.h>
#include <string.h>
#include "freertos/FreeRTOS.h"
#include "freertos/task.h"
#include "esp_log.h"
#include "esp_wifi.h"
#include "esp_event.h"
#include "nvs_flash.h"
#include "esp_lcd_panel_ops.h"
#include "esp_websocket_client.h"
#include "ds_codec.h"
#include "ds_protocol.h"
static const char *TAG = "ds-panel";
// ─── Configuration (set via menuconfig or hardcode for POC) ───
#define PANEL_WIDTH 800
#define PANEL_HEIGHT 1280
#define PIXEL_BYTES 2 // RGB565
#define FB_SIZE (PANEL_WIDTH * PANEL_HEIGHT * PIXEL_BYTES) // ~2MB
#define WIFI_SSID CONFIG_WIFI_SSID
#define WIFI_PASS CONFIG_WIFI_PASS
#define RELAY_URL CONFIG_RELAY_URL // e.g. "ws://192.168.1.100:9100/stream/home"
// ─── Framebuffers (in PSRAM) ───
static uint8_t *framebuffer; // Current display state
static uint8_t *scratch_buf; // Temp buffer for delta decode
// ─── Display handle ───
static esp_lcd_panel_handle_t panel_handle = NULL;
// ─── Touch state ───
static uint16_t input_seq = 0;
// ─── WebSocket event handler ───
static void ws_event_handler(void *arg, esp_event_base_t base,
int32_t event_id, void *event_data) {
esp_websocket_event_data_t *data = (esp_websocket_event_data_t *)event_data;
switch (event_id) {
case WEBSOCKET_EVENT_CONNECTED:
ESP_LOGI(TAG, "WebSocket connected to relay");
break;
case WEBSOCKET_EVENT_DATA:
if (data->data_len < DS_HEADER_SIZE) break;
ds_header_t hdr;
if (ds_parse_header((const uint8_t *)data->data_ptr, &hdr) != 0) break;
const uint8_t *payload = (const uint8_t *)data->data_ptr + DS_HEADER_SIZE;
size_t payload_len = data->data_len - DS_HEADER_SIZE;
switch (hdr.frame_type) {
case DS_FRAME_PIXELS:
// Full keyframe — copy directly to framebuffer
if (payload_len == FB_SIZE) {
memcpy(framebuffer, payload, FB_SIZE);
esp_lcd_panel_draw_bitmap(panel_handle,
0, 0, PANEL_WIDTH, PANEL_HEIGHT, framebuffer);
ESP_LOGI(TAG, "Keyframe received (%zu bytes)", payload_len);
}
break;
case DS_FRAME_DELTA:
// Delta frame — RLE decode + XOR apply
if (ds_apply_delta_rle(framebuffer, FB_SIZE,
payload, payload_len, scratch_buf) == 0) {
esp_lcd_panel_draw_bitmap(panel_handle,
0, 0, PANEL_WIDTH, PANEL_HEIGHT, framebuffer);
} else {
ESP_LOGW(TAG, "Delta decode failed (len=%zu)", payload_len);
}
break;
case DS_FRAME_PING:
// Respond with pong (same message back)
break;
case DS_FRAME_END:
ESP_LOGI(TAG, "Stream ended");
break;
}
break;
case WEBSOCKET_EVENT_DISCONNECTED:
ESP_LOGW(TAG, "WebSocket disconnected, reconnecting...");
break;
case WEBSOCKET_EVENT_ERROR:
ESP_LOGE(TAG, "WebSocket error");
break;
}
}
// ─── Send touch event over WebSocket ───
static void send_touch_event(esp_websocket_client_handle_t ws,
uint8_t id, uint16_t x, uint16_t y, uint8_t phase) {
uint8_t buf[DS_HEADER_SIZE + sizeof(ds_touch_event_t)];
ds_touch_event_t touch = { .id = id, .x = x, .y = y, .phase = phase };
size_t len = ds_encode_touch(buf, input_seq++,
(uint32_t)(esp_timer_get_time() / 1000),
&touch);
esp_websocket_client_send_bin(ws, (const char *)buf, len, portMAX_DELAY);
}
// ─── Touch polling task ───
//
// TODO: Replace with actual GT9271 I2C touch driver.
// The Waveshare BSP should provide touch reading functions.
// This is a placeholder showing the integration pattern.
//
static void touch_task(void *arg) {
esp_websocket_client_handle_t ws = (esp_websocket_client_handle_t)arg;
while (1) {
// TODO: Read from GT9271 touch controller via I2C
// Example (pseudocode):
//
// gt9271_touch_data_t td;
// if (gt9271_read(&td) == ESP_OK && td.num_points > 0) {
// for (int i = 0; i < td.num_points; i++) {
// send_touch_event(ws, td.points[i].id,
// td.points[i].x, td.points[i].y,
// td.points[i].phase);
// }
// }
vTaskDelay(pdMS_TO_TICKS(10)); // 100Hz touch polling
}
}
// ─── Display initialization ───
//
// TODO: Initialize MIPI DSI display using Waveshare component.
// Add `waveshare/esp_lcd_jd9365_10_1` to idf_component.yml
//
static esp_err_t display_init(void) {
// TODO: Configure MIPI DSI bus and JD9365 panel driver
// Example (pseudocode):
//
// esp_lcd_dsi_bus_config_t bus_cfg = { ... };
// esp_lcd_new_dsi_bus(&bus_cfg, &dsi_bus);
//
// esp_lcd_panel_dev_config_t panel_cfg = {
// .reset_gpio_num = ...,
// .rgb_ele_order = LCD_RGB_ELEMENT_ORDER_RGB,
// .bits_per_pixel = 16, // RGB565
// };
// esp_lcd_new_panel_jd9365_10_1(dsi_bus, &panel_cfg, &panel_handle);
// esp_lcd_panel_init(panel_handle);
ESP_LOGI(TAG, "Display initialized (%dx%d RGB565)", PANEL_WIDTH, PANEL_HEIGHT);
return ESP_OK;
}
// ─── WiFi initialization ───
static void wifi_init(void) {
esp_netif_init();
esp_event_loop_create_default();
esp_netif_create_default_wifi_sta();
wifi_init_config_t cfg = WIFI_INIT_CONFIG_DEFAULT();
esp_wifi_init(&cfg);
wifi_config_t wifi_cfg = {
.sta = {
.ssid = WIFI_SSID,
.password = WIFI_PASS,
},
};
esp_wifi_set_mode(WIFI_MODE_STA);
esp_wifi_set_config(WIFI_IF_STA, &wifi_cfg);
esp_wifi_start();
esp_wifi_connect();
ESP_LOGI(TAG, "WiFi connecting to %s...", WIFI_SSID);
}
// ─── Main ───
void app_main(void) {
ESP_LOGI(TAG, "DreamStack Thin Client v0.1");
ESP_LOGI(TAG, "Panel: %dx%d @ %d bpp = %d bytes",
PANEL_WIDTH, PANEL_HEIGHT, PIXEL_BYTES * 8, FB_SIZE);
// Initialize NVS (required for WiFi)
nvs_flash_init();
// Allocate framebuffers in PSRAM
framebuffer = heap_caps_calloc(1, FB_SIZE, MALLOC_CAP_SPIRAM);
scratch_buf = heap_caps_calloc(1, FB_SIZE, MALLOC_CAP_SPIRAM);
if (!framebuffer || !scratch_buf) {
ESP_LOGE(TAG, "Failed to allocate framebuffers in PSRAM (%d bytes each)", FB_SIZE);
return;
}
ESP_LOGI(TAG, "Framebuffers allocated in PSRAM (%d MB each)", FB_SIZE / (1024 * 1024));
// Initialize display
display_init();
// Initialize WiFi
wifi_init();
vTaskDelay(pdMS_TO_TICKS(3000)); // Wait for WiFi connection
// Connect WebSocket to relay
esp_websocket_client_config_t ws_cfg = {
.uri = RELAY_URL,
.buffer_size = 64 * 1024, // 64KB receive buffer
};
esp_websocket_client_handle_t ws = esp_websocket_client_init(&ws_cfg);
esp_websocket_register_events(ws, WEBSOCKET_EVENT_ANY, ws_event_handler, NULL);
esp_websocket_client_start(ws);
ESP_LOGI(TAG, "WebSocket connecting to %s...", RELAY_URL);
// Start touch polling task
xTaskCreate(touch_task, "touch", 4096, ws, 5, NULL);
ESP_LOGI(TAG, "Thin client running. Waiting for frames...");
}

View file

@ -0,0 +1,316 @@
# Fabric Display Build Guide
> **Goal:** Build an interactive, networked display on fabric — combining the electroluminescent textile concepts from [Shi et al. (Nature, 2021)](https://www.nature.com/articles/s41586-021-03295-8) and the LED-on-fabric approach from [Carpetlight / LED Professional](https://www.led-professional.com/resources-1/articles/lighting-fabrics-a-new-approach-for-flexible-light-sources).
---
## 1. Technology Landscape
The two source articles represent opposite ends of the same spectrum:
| | Nature Paper (EL Textile) | Carpetlight (LED Fabric) |
|---|---|---|
| **Light source** | ZnS:Cu phosphor EL units at warp-weft contacts | Discrete SMD LEDs on miniature PCBs |
| **Pixel pitch** | ~800µm (research-grade) | ~1020mm (commercial) |
| **Addressing** | AC field at fiber intersections | Standard LED driver (DMX/PWM) |
| **Flexibility** | Fully woven, machine-washable | Rip-stop polyamide, rollable |
| **DIY feasibility** | Lab-only (ionic gel electrodes, custom fibers) | **Highly practical** |
| **Interactivity** | Demonstrated textile keyboard (capacitive) | External control only |
**Bottom line:** The Nature paper's exact process (ionic gel transparent electrodes, ZnS:Cu phosphor-coated fibers, industrial weaving) is not reproducible outside a materials science lab. But its *architecture* — a woven grid where intersections are pixels, with integrated capacitive touch — can be replicated using commercially available components.
---
## 2. Three Practical Build Approaches
### Approach A: Addressable LED Matrix on Fabric (Recommended Start)
The most accessible path. Uses off-the-shelf components to create a flexible, interactive pixel grid on textile.
#### Bill of Materials
| Component | Specific Product | Source | Est. Cost |
|---|---|---|---|
| LED panel | WS2812B 16×16 flexible matrix (256px) | AliExpress, Adafruit, SuperLightingLED | $1540 |
| Substrate fabric | Rip-stop nylon (1.1oz, silicone-coated) | Ripstop by the Roll, Amazon | $515 |
| Controller | ESP32-S3 DevKit | Adafruit, SparkFun, Mouser | $815 |
| Touch sensing | MPR121 capacitive breakout (12 channels) | Adafruit (#1982), SparkFun | $8 |
| Touch electrodes | Conductive thread (stainless steel or silver-coated) | Adafruit (#641), SparkFun | $38 |
| Power | 5V 4A USB-C power bank or wall adapter | Amazon | $1020 |
| Logic level shifter | 74AHCT125 or SN74HCT245 (3.3V→5V) | Adafruit, Mouser | $2 |
| Diffusion layer | White ripstop nylon or organza | Fabric store | $35 |
| Protection | 1000µF capacitor, 330Ω resistor | Any electronics supplier | $1 |
**Total: ~$55115**
#### Construction Steps
**Step 1 — Prepare the fabric substrate**
- Cut rip-stop nylon to desired panel size + 2cm margin on all sides
- Mark a grid for the LED panel position and touch zones
- If using multiple LED panels (e.g., 4× 8×8 to make 16×16), plan the daisy-chain data path in a serpentine pattern
**Step 2 — Mount LEDs to fabric**
- **Option A (simplest):** Use flexible WS2812B matrix panels. Bond to fabric with E6000 flexible adhesive or fabric glue. The flexible PCB substrate bends with the fabric.
- **Option B (like Carpetlight):** Create fabric pockets/sleeves for LED strips. Sew channels from rip-stop, slide strips in. Allows removal for washing.
- **Option C (closest to Nature paper):** Use individual WS2812B modules. Sew each LED to fabric using conductive thread for power/ground rails, with thin silicone wire for the data line.
**Step 3 — Wire the LED matrix**
```
Power Supply (5V, 4A+)
├──► 1000µF capacitor across V+ and GND
├──► LED Matrix V+ (inject power every 64 LEDs)
└──► ESP32 Vin
GPIO pin ──► 330Ω resistor ──► 74AHCT125 ──► LED Data In
```
- Use silicone-jacketed wire (30 AWG) for flexibility
- For matrices >128 LEDs, inject power at multiple points to prevent voltage drop
- Common ground between ESP32 and LED power supply is **critical**
**Step 4 — Add capacitive touch**
- Embroider touch zones onto the fabric using conductive thread
- Create isolated zones (e.g., 4×3 grid = 12 touch areas matching MPR121's 12 inputs)
- Route conductive thread traces to an edge connector area
- Keep positive and negative traces at least 3mm apart to avoid shorts
- Connect to MPR121 breakout via I²C (SDA/SCL to ESP32)
```
Conductive Thread Layout (back of fabric):
┌─────────────────────────────┐
│ [Zone1] [Zone2] [Zone3] │
│ [Zone4] [Zone5] [Zone6] │ ← Embroidered conductive pads
│ [Zone7] [Zone8] [Zone9] │
│ ... │
│ ════════════════════════ │ ← Thread traces to edge
│ → MPR121 breakout board │
└─────────────────────────────┘
```
**Step 5 — Diffusion layer**
- Layer white organza or thin white rip-stop over the LED matrix
- A 510mm spacer (foam or spacer fabric mesh) between LEDs and diffusion layer softens the hotspots into smooth pixel blobs
- Sew or Velcro the diffusion layer for removability
**Step 6 — ESP32 firmware**
```cpp
// Core libraries
#include <FastLED.h>
#include <Wire.h>
#include <Adafruit_MPR121.h>
#define NUM_LEDS 256
#define DATA_PIN 16
#define MATRIX_W 16
#define MATRIX_H 16
CRGB leds[NUM_LEDS];
Adafruit_MPR121 cap = Adafruit_MPR121();
void setup() {
FastLED.addLeds<WS2812B, DATA_PIN, GRB>(leds, NUM_LEDS);
FastLED.setMaxPowerInVoltsAndMilliamps(5, 4000); // limit current
FastLED.setBrightness(80);
cap.begin(0x5A); // MPR121 default I2C address
}
void loop() {
uint16_t touched = cap.touched();
for (int i = 0; i < 12; i++) {
if (touched & (1 << i)) {
// Map touch zone i to pixel region and update
setTouchZoneColor(i, CRGB::White);
}
}
FastLED.show();
delay(16); // ~60fps
}
// Map 12 touch zones to 16x16 pixel regions
void setTouchZoneColor(int zone, CRGB color) {
int zx = zone % 3; // 3 columns of touch zones
int zy = zone / 3; // 4 rows of touch zones
int px = zx * 5; // each zone covers ~5 pixels wide
int py = zy * 4; // each zone covers ~4 pixels tall
for (int y = py; y < py + 4 && y < MATRIX_H; y++) {
for (int x = px; x < px + 5 && x < MATRIX_W; x++) {
int idx = (y % 2 == 0) ? (y * MATRIX_W + x) : (y * MATRIX_W + (MATRIX_W - 1 - x)); // serpentine
leds[idx] = color;
}
}
}
```
---
### Approach B: EL Wire/Panel Woven Grid
Closer to the Nature paper's warp-weft concept, using commercial EL materials.
#### Key Components
| Component | Product | Source | Notes |
|---|---|---|---|
| EL wire (warp) | 2.3mm EL wire, multiple colors | Adafruit, SparkFun, Ellumiglow | Each color = separate AC channel |
| Conductive fiber (weft) | Silver-coated nylon thread (235Ω/ft) | Adafruit (#641) | Woven perpendicular to EL wire |
| AC inverter | 3V or 12V EL inverter | SparkFun, Adafruit | Converts DC→AC (100-200V, 1kHz) |
| AC switching | Opto-triacs (MOC3021) + triacs (BT136) | Mouser, DigiKey | One per EL channel |
| Controller | Arduino Nano or ESP32 | Standard | Drives opto-triacs via GPIO |
| Frame | Embroidery hoop or lightweight aluminum | Craft store | Keeps the weave taut |
#### Architecture
```
EL Wire Segments (warp, vertical)
║ ║ ║ ║ ║
════╬═══╬═══╬═══╬═══╬════ ← Conductive thread (weft, horizontal)
║ ║ ║ ║ ║
════╬═══╬═══╬═══╬═══╬════
║ ║ ║ ║ ║
Each ╬ = potential EL pixel (glows where AC passes through intersection)
```
**How it works:**
- EL wire segments run vertically (warp)
- Conductive threads run horizontally (weft)
- Applying AC between a specific warp wire and weft thread illuminates only their intersection
- **Row-column scanning** (multiplexing) addresses individual pixels, just like the Nature paper
> ⚠️ **Caution:** EL wire operates at 100200V AC. Proper insulation, isolated opto-triac drivers, and careful handling are essential. This approach requires intermediate electronics experience.
#### Practical pixel resolution
- EL wire: 2.3mm diameter → minimum pitch ~5mm with spacing
- Achievable grid: ~20×20 pixels per 10cm² panel
- Color: limited to EL wire color choices (blue, green, orange, white, pink)
---
### Approach C: Screen-Printed EL Ink on Fabric
Closest to the Nature paper's pixel density, but requires screen printing equipment.
#### Materials
| Layer | Material | Supplier |
|---|---|---|
| **Base electrode** | Silver conductive ink (screen-printable) | Sun Chemical (SunTronic), Creative Materials |
| **Dielectric** | Barium titanate dielectric paste | Saralon, SPLinx |
| **Phosphor** | ZnS:Cu EL phosphor paste (SaralEL Display Ink) | Saralon (Blue/Green/Orange/White) |
| **Top electrode** | PEDOT:PSS transparent conductive ink | Heraeus (Clevios), Sigma-Aldrich |
| **Substrate** | Tightly-woven polyester or cotton fabric | Any fabric supplier |
#### Process (per panel)
1. Screen-print silver conductive traces on fabric (bottom electrode grid pattern)
2. Cure at 130°C for 10 min
3. Screen-print dielectric layer over the electrode pattern
4. Cure at 130°C for 10 min
5. Screen-print ZnS:Cu phosphor layer over dielectric
6. Cure at 130°C for 10 min
7. Screen-print transparent PEDOT:PSS top electrode
8. Cure, then seal with flexible polyurethane coating
9. Connect bus bars to AC inverter (60200V, 4001000Hz)
> ⚠️ This requires screen printing equipment, a curing oven, and access to specialty inks (~$50200 per ink system from Saralon). Best suited for maker spaces with printing facilities.
---
## 3. Adding Interactivity
All approaches support the same input methods:
### Capacitive Touch (recommended)
- **Behind the display:** Embroider conductive thread pads on the back of the fabric, behind pixel zones
- **Controller:** MPR121 (12-channel) or FDC2214 (4-channel, higher sensitivity) connected to ESP32
- **Principle:** Human finger changes capacitance through the fabric layers; controller detects the change
- **Thread choices:**
- Stainless steel thread (Adafruit #641) — durable, moderate conductivity
- Silver-coated nylon (Adafruit #640) — higher conductivity, less durable after washing
### Pressure/Force Sensing
- **Velostat/Eeonyx:** Sandwich conductive fabric + piezoresistive sheet + conductive fabric
- **Use case:** Detect where and how hard someone presses
- **Resolution:** One analog reading per zone
### Gesture (proximity)
- **APDS-9960:** Detects hand swipes 1020cm from the fabric surface
- **Use case:** Touchless control layer
---
## 4. Networking with DreamStack
The fabric display becomes a **DreamStack streaming endpoint**:
```
┌──────────────────────────────────┐
│ ESP32 on Fabric │
│ │
│ signal pixels = [256 × RGB] │──► DreamStack bitstream
│ signal touch_zones = [12 × bool]│ via WiFi → relay
│ │
│ on touch_zone[i] changed: │
│ mutate pixels[zone_region] │
└──────────────────────────────────┘
▲ │
│ ▼
Remote Input Remote Viewer
(phone/laptop) (any browser)
```
**Key integration points:**
- Pixel state is a flat signal array — efficient for bitstream delta encoding
- Touch events generate mutations that propagate upstream through the relay
- Remote clients can push pixel data downstream (animations, text, images)
- Conflict resolution (version counters) arbitrates simultaneous fabric-touch + remote-touch
---
## 5. Supplier Quick Reference
| Category | Supplier | URL | Key Products |
|---|---|---|---|
| Addressable LEDs | Adafruit | adafruit.com | NeoPixel matrices, strips |
| Addressable LEDs | SuperLightingLED | superlightingled.com | Flexible WS2812B panels |
| EL wire/thread | Ellumiglow | ellumiglow.com | SewGlo EL thread, EL wire |
| EL wire | SparkFun | sparkfun.com | EL wire, inverters, sequencers |
| EL inks | Saralon | saralon.com | SaralEL Display Inks (screen-print) |
| EL inks | SPLinx | splinx.eu | EL coatings and varnishes |
| Conductive thread | Adafruit | adafruit.com | Stainless (#641), silver (#640) |
| Conductive fabric | LessEMF | lessemf.com | Shielding fabric, conductive textiles |
| Touch IC | Adafruit | adafruit.com | MPR121 breakout (#1982) |
| Fiber optic fabric | Light Up Wear | lightupwear.com | Pre-woven fiber optic fabric |
| LED-on-fabric (commercial) | Forster Rohner | ffrosti.com | e-broidery LED textiles |
| Microcontrollers | Adafruit/SparkFun | — | ESP32-S3, Pico W |
---
## 6. Comparison: Which Approach For What?
```mermaid
graph LR
A["Want to build<br/>this weekend?"] -->|Yes| B["Approach A<br/>LED Matrix on Fabric"]
A -->|"Want EL glow<br/>aesthetic"| C["Approach B<br/>EL Wire Grid"]
A -->|"Want highest<br/>pixel density"| D["Approach C<br/>Screen-Print EL Ink"]
B --> E["+ Touch + ESP32<br/>= Interactive Display"]
C --> E
D --> E
E --> F["+ DreamStack<br/>= Networked Display"]
```
| | Approach A | Approach B | Approach C |
|---|---|---|---|
| **Time to build** | 12 days | 1 week | 2+ weeks |
| **Cost** | $55115 | $80150 | $200500 |
| **Pixel count** | 256+ (16×16 or larger) | ~100400 | ~1000+ |
| **Color** | Full RGB | Limited (EL colors) | Limited (phosphor colors) |
| **Flexibility** | Good (flexible PCB) | Excellent (wire) | Good (printed) |
| **Brightness** | High | Low-medium | Low |
| **Interactivity** | Easy (capacitive touch) | Moderate | Moderate |
| **Washable** | With removable pockets | Fragile | With PU sealing |
| **Skills needed** | Basic soldering, sewing | Electronics + HV safety | Screen printing + chemistry |

View file

@ -0,0 +1,79 @@
# Interactive Fabric Display — Overview
> Based on research from:
> - [Large-area display textiles integrated with functional systems (Nature, 2021)](https://www.nature.com/articles/s41586-021-03295-8)
> - [Lighting Fabrics - A New Approach for Flexible Light Sources (LED Professional)](https://www.led-professional.com/resources-1/articles/lighting-fabrics-a-new-approach-for-flexible-light-sources)
## What These Articles Describe
**Nature paper (Shi et al., 2021):** A breakthrough in **woven electroluminescent (EL) display textiles**. They weave conductive weft fibres and luminescent warp fibres together, creating ~500,000 EL pixel units at each weft-warp contact point, spaced ~800µm apart. The result is a 6m × 25cm flexible, breathable, machine-washable display fabric. They demonstrated an integrated system with a **textile keyboard** (capacitive touch) and **textile power supply** — essentially a full wearable communication device.
**LED Professional (Carpetlight):** A commercial approach to **LED-on-fabric lighting** — miniature PCBs on rip-stop polyamide, connected by conductive embroidered threads. Controllable via **DMX protocol**, tunable white (28005400K), and extremely lightweight (300g for a 2×1ft panel). Currently used in film/TV lighting.
---
## How to Build an Interactive Display from These Concepts
There are **three tiers**, depending on how deep you want to go:
### Tier 1: Accessible Now (LED Matrix on Fabric)
Use commercially available components to approximate the research:
| Component | Product | Est. Cost |
|---|---|---|
| **LED matrix** | WS2812B/SK6812 flexible LED strips or panels (e.g., 16×16 NeoPixel matrix) | $1560 |
| **Substrate** | Sew/bond onto rip-stop nylon or felt | $510 |
| **Controller** | ESP32 or Raspberry Pi Pico W | $515 |
| **Touch input** | Capacitive touch sensors (MPR121) or conductive thread embroidery | $515 |
| **Power** | LiPo battery + boost converter | $1020 |
**The architecture:**
1. **Addressable LED grid** sewn onto fabric → each LED is a "pixel"
2. **Capacitive touch zones** using conductive thread (like the Nature paper's keyboard)
3. **ESP32 running a DreamStack bitstream** → the display state is a signal graph, touch events mutate it, and the whole thing streams over the relay for remote interaction
### Tier 2: Electroluminescent (Closer to the Nature Paper)
Use **EL wire/panels** woven or sewn into fabric:
- **EL wire** segments as individual addressable lines
- **AC inverter** with multiplexer (e.g., custom PCB or commercial EL sequencer)
- **Woven grid pattern** — horizontal EL wires crossed with conductive warp threads
- Achievable pixel resolution: ~510mm pitch (vs. the paper's 800µm)
### Tier 3: Full Research Replication
This requires lab equipment — ZnS:Cu phosphor-coated fibres, ionic gel transparent electrodes, and an industrial loom. Not practical outside a university materials science lab.
---
## Where DreamStack Fits
This is a perfect use case for bitstream streaming:
```
┌─────────────────────────────┐
│ Fabric Display (ESP32) │
│ ┌───────────────────────┐ │
│ │ LED Matrix State │──┼──► DreamStack Bitstream
│ │ (signal per pixel) │ │ (streams over relay)
│ ├───────────────────────┤ │
│ │ Touch Sensor Input │──┼──► Mutations
│ └───────────────────────┘ │
└─────────────────────────────┘
▲ │
│ ▼
Remote Control Viewer
(phone/web) (any screen)
```
- The **fabric display's pixel state** is a DreamStack signal array
- **Touch on the fabric** generates mutations that stream upstream
- A **remote viewer/controller** (phone, web) can also push state down to the fabric
- Conflict resolution handles simultaneous fabric-touch + remote-touch
---
## Possible Next Steps
1. **A DreamStack `.ds` program** that models a fabric display grid as a streaming signal matrix
2. **An ESP32 firmware sketch** for driving a WS2812B matrix with capacitive touch, speaking the bitstream protocol
3. **A web-based simulator/controller** — a visual grid that mirrors the fabric display in real-time over the relay

118
docs/integration.md Normal file
View file

@ -0,0 +1,118 @@
# DreamStack Integration Guide
Embed DreamStack apps into any website with the standalone SDK (~3KB).
## Quick Start
### 1. Iframe Embed
```html
<iframe src="https://your-dreamstack-app.com"
style="border:none; width:100%; height:400px; border-radius:12px;"
sandbox="allow-scripts allow-same-origin"
loading="lazy">
</iframe>
```
### 2. Web Component
```html
<script src="dreamstack-embed.js"></script>
<ds-stream src="https://your-app.com" height="500px"></ds-stream>
```
### 3. JavaScript API
```html
<div id="app"></div>
<script src="dreamstack-embed.js"></script>
<script>
// Simple embed
DreamStack.embed('https://your-app.com', '#app', {
width: '100%',
height: '500px'
});
// With bidirectional signals
const ds = DreamStack.connect('https://your-app.com', '#app');
ds.send('theme', 'dark');
ds.on('score', (value) => console.log('Score:', value));
ds.on('*', (name, value) => console.log(name, '=', value));
</script>
```
## Framework Examples
### React
```jsx
import { useEffect, useRef } from 'react';
function DreamStackEmbed({ src }) {
const ref = useRef(null);
useEffect(() => {
const handle = DreamStack.connect(src, ref.current);
return () => handle.destroy();
}, [src]);
return <div ref={ref} />;
}
```
### Vue
```vue
<template>
<div ref="container" />
</template>
<script setup>
import { ref, onMounted, onUnmounted } from 'vue';
const container = ref(null);
let handle;
onMounted(() => {
handle = DreamStack.connect(props.src, container.value);
});
onUnmounted(() => handle?.destroy());
</script>
```
## Build Flags
```bash
# Standard build
dreamstack build app.ds
# Minified (strips whitespace, ~17% smaller)
dreamstack build app.ds --minify
# Both optimizations are always active:
# - DOM helpers (shorthand functions, ~3KB saved)
# - Tree-shaking (unused runtime features stripped, ~40-50% saved)
```
## Signal Bridge Protocol
DreamStack apps communicate via `postMessage`:
```js
// Send to DreamStack app
iframe.contentWindow.postMessage({
type: 'ds:signal',
name: 'playerName',
value: 'Alice'
}, '*');
// Receive from DreamStack app
window.addEventListener('message', (e) => {
if (e.data?.type === 'ds:signal') {
console.log(e.data.name, '=', e.data.value);
}
});
```
## Size Budget
| Build Mode | Size | Reduction |
|---|---|---|
| Default | ~95 KB | baseline |
| DOM helpers (auto) | ~95 KB | -3% |
| Tree-shaken (auto) | ~52 KB | -46% |
| + Minified | ~44 KB | -54% |
| Gzipped | ~8 KB | ~92% |

View file

@ -17,8 +17,19 @@ pub const FRAME_PIXELS: u8 = 0x01;
pub const FRAME_COMPRESSED: u8 = 0x02; pub const FRAME_COMPRESSED: u8 = 0x02;
pub const FRAME_DELTA: u8 = 0x03; pub const FRAME_DELTA: u8 = 0x03;
pub const FRAME_AUDIO_PCM: u8 = 0x10; pub const FRAME_AUDIO_PCM: u8 = 0x10;
pub const FRAME_AUDIO_COMPRESSED: u8 = 0x11;
pub const FRAME_HAPTIC: u8 = 0x20;
pub const FRAME_ACTUATOR: u8 = 0x21;
pub const FRAME_LED_MATRIX: u8 = 0x22;
pub const FRAME_SIGNAL_SYNC: u8 = 0x30; pub const FRAME_SIGNAL_SYNC: u8 = 0x30;
pub const FRAME_SIGNAL_DIFF: u8 = 0x31; pub const FRAME_SIGNAL_DIFF: u8 = 0x31;
pub const FRAME_SCHEMA_ANNOUNCE: u8 = 0x32;
pub const FRAME_SUBSCRIBE_FILTER: u8 = 0x33;
pub const FRAME_NEURAL_FRAME: u8 = 0x40;
pub const FRAME_NEURAL_AUDIO: u8 = 0x41;
pub const FRAME_NEURAL_ACTUATOR: u8 = 0x42;
pub const FRAME_NEURAL_LATENT: u8 = 0x43;
pub const FRAME_KEYFRAME: u8 = 0xF0;
pub const FRAME_PING: u8 = 0xFE; pub const FRAME_PING: u8 = 0xFE;
pub const FRAME_END: u8 = 0xFF; pub const FRAME_END: u8 = 0xFF;
@ -33,7 +44,17 @@ pub const INPUT_PTR_DOWN: u8 = 0x02;
pub const INPUT_PTR_UP: u8 = 0x03; pub const INPUT_PTR_UP: u8 = 0x03;
pub const INPUT_KEY_DOWN: u8 = 0x10; pub const INPUT_KEY_DOWN: u8 = 0x10;
pub const INPUT_KEY_UP: u8 = 0x11; pub const INPUT_KEY_UP: u8 = 0x11;
pub const INPUT_TOUCH: u8 = 0x20;
pub const INPUT_TOUCH_END: u8 = 0x21;
pub const INPUT_GAMEPAD_AXIS: u8 = 0x30;
pub const INPUT_GAMEPAD_BUTTON: u8 = 0x31;
pub const INPUT_MIDI: u8 = 0x40;
pub const INPUT_SCROLL: u8 = 0x50; pub const INPUT_SCROLL: u8 = 0x50;
pub const INPUT_RESIZE: u8 = 0x60;
pub const INPUT_VOICE: u8 = 0x70;
pub const INPUT_CAMERA: u8 = 0x71;
pub const INPUT_SENSOR: u8 = 0x80;
pub const INPUT_BCI: u8 = 0x90;
// ─── Header Encode/Decode ─── // ─── Header Encode/Decode ───
@ -200,6 +221,58 @@ pub fn input_message(input_type: u8, seq: u16, timestamp: u32, payload: &[u8]) -
build_message(input_type, FLAG_INPUT, seq, timestamp, 0, 0, payload) build_message(input_type, FLAG_INPUT, seq, timestamp, 0, 0, payload)
} }
// ─── Neural Frame Builders ───
/// Build a neural frame message (model output tensor).
#[wasm_bindgen]
pub fn neural_frame_message(seq: u16, timestamp: u32, width: u16, height: u16, tensor_data: &[u8]) -> Vec<u8> {
build_message(FRAME_NEURAL_FRAME, 0, seq, timestamp, width, height, tensor_data)
}
/// Build a neural audio synthesis output message.
#[wasm_bindgen]
pub fn neural_audio_message(seq: u16, timestamp: u32, audio_data: &[u8]) -> Vec<u8> {
build_message(FRAME_NEURAL_AUDIO, 0, seq, timestamp, 0, 0, audio_data)
}
/// Build a neural actuator command message.
#[wasm_bindgen]
pub fn neural_actuator_message(seq: u16, timestamp: u32, command_data: &[u8]) -> Vec<u8> {
build_message(FRAME_NEURAL_ACTUATOR, 0, seq, timestamp, 0, 0, command_data)
}
/// Build a neural latent space representation message.
#[wasm_bindgen]
pub fn neural_latent_message(seq: u16, timestamp: u32, latent_data: &[u8]) -> Vec<u8> {
build_message(FRAME_NEURAL_LATENT, 0, seq, timestamp, 0, 0, latent_data)
}
// ─── Audio Frame Builder ───
/// Build an Opus-compressed audio frame message.
#[wasm_bindgen]
pub fn audio_compressed_message(seq: u16, timestamp: u32, channels: u16, sample_rate_div100: u16, opus_data: &[u8]) -> Vec<u8> {
build_message(FRAME_AUDIO_COMPRESSED, FLAG_COMPRESSED, seq, timestamp, channels, sample_rate_div100, opus_data)
}
// ─── MIDI Input Builder ───
/// Build a MIDI input message: status(u8), data1(u8), data2(u8).
#[wasm_bindgen]
pub fn midi_input_message(seq: u16, timestamp: u32, status: u8, data1: u8, data2: u8) -> Vec<u8> {
build_message(INPUT_MIDI, FLAG_INPUT, seq, timestamp, 0, 0, &[status, data1, data2])
}
/// Build a sensor input message: sensor_type(u8), x(i16), y(i16), z(i16).
#[wasm_bindgen]
pub fn sensor_input_message(seq: u16, timestamp: u32, sensor_type: u8, x: i16, y: i16, z: i16) -> Vec<u8> {
let mut payload = vec![sensor_type];
payload.extend_from_slice(&x.to_le_bytes());
payload.extend_from_slice(&y.to_le_bytes());
payload.extend_from_slice(&z.to_le_bytes());
build_message(INPUT_SENSOR, FLAG_INPUT, seq, timestamp, 0, 0, &payload)
}
// ─── Tests ─── // ─── Tests ───
#[cfg(test)] #[cfg(test)]
@ -223,7 +296,6 @@ mod tests {
#[test] #[test]
fn test_rle_compression_ratio() { fn test_rle_compression_ratio() {
// 1000 zeros should compress to 3 bytes
let data = vec![0u8; 1000]; let data = vec![0u8; 1000];
let encoded = rle_encode(&data); let encoded = rle_encode(&data);
assert_eq!(encoded.len(), 3); assert_eq!(encoded.len(), 3);
@ -246,9 +318,8 @@ mod tests {
let mut curr = vec![0u8; 100]; let mut curr = vec![0u8; 100];
curr[50] = 0xFF; curr[50] = 0xFF;
curr[51] = 0xAB; curr[51] = 0xAB;
let compressed = encode_delta_rle(&curr, &prev); let compressed = encode_delta_rle(&curr, &prev);
assert!(compressed.len() < 100); // Should compress well assert!(compressed.len() < 100);
let reconstructed = decode_delta_rle(&compressed, &prev); let reconstructed = decode_delta_rle(&compressed, &prev);
assert_eq!(reconstructed, curr); assert_eq!(reconstructed, curr);
} }
@ -259,15 +330,57 @@ mod tests {
assert!(msg.len() == HEADER_SIZE + 12); assert!(msg.len() == HEADER_SIZE + 12);
let header = decode_header(&msg); let header = decode_header(&msg);
assert_eq!(header[0], FRAME_SIGNAL_DIFF as u32); assert_eq!(header[0], FRAME_SIGNAL_DIFF as u32);
assert_eq!(header[6], 12); // payload length assert_eq!(header[6], 12);
} }
#[test] #[test]
fn test_input_message() { fn test_input_message() {
let payload = vec![100u8, 0, 200, 0, 1]; // x=100, y=200, buttons=1 let payload = vec![100u8, 0, 200, 0, 1];
let msg = input_message(INPUT_POINTER, 0, 0, &payload); let msg = input_message(INPUT_POINTER, 0, 0, &payload);
let header = decode_header(&msg); let header = decode_header(&msg);
assert_eq!(header[0], INPUT_POINTER as u32); assert_eq!(header[0], INPUT_POINTER as u32);
assert_eq!(header[1], FLAG_INPUT as u32); assert_eq!(header[1], FLAG_INPUT as u32);
} }
#[test]
fn test_neural_frame_message() {
let tensor = vec![0u8; 64];
let msg = neural_frame_message(1, 100, 4, 4, &tensor);
let header = decode_header(&msg);
assert_eq!(header[0], FRAME_NEURAL_FRAME as u32);
assert_eq!(header[4], 4);
assert_eq!(header[5], 4);
assert_eq!(header[6], 64);
}
#[test]
fn test_audio_compressed_message() {
let opus = vec![0xAA; 128];
let msg = audio_compressed_message(1, 200, 2, 480, &opus);
let header = decode_header(&msg);
assert_eq!(header[0], FRAME_AUDIO_COMPRESSED as u32);
assert_eq!(header[1], FLAG_COMPRESSED as u32);
assert_eq!(header[4], 2);
assert_eq!(header[5], 480);
}
#[test]
fn test_midi_input_message() {
let msg = midi_input_message(1, 100, 0x90, 60, 100);
let header = decode_header(&msg);
assert_eq!(header[0], INPUT_MIDI as u32);
assert_eq!(header[1], FLAG_INPUT as u32);
assert_eq!(msg[HEADER_SIZE], 0x90);
assert_eq!(msg[HEADER_SIZE + 1], 60);
assert_eq!(msg[HEADER_SIZE + 2], 100);
}
#[test]
fn test_sensor_input_message() {
let msg = sensor_input_message(1, 100, 0, 100, -200, 9800);
let header = decode_header(&msg);
assert_eq!(header[0], INPUT_SENSOR as u32);
assert_eq!(header[1], FLAG_INPUT as u32);
assert_eq!(msg[HEADER_SIZE], 0);
}
} }

View file

@ -240,6 +240,74 @@ pub fn stream_end(seq: u16, timestamp: u32) -> Vec<u8> {
encode_frame(FrameType::End, seq, timestamp, 0, 0, 0, &[]) encode_frame(FrameType::End, seq, timestamp, 0, 0, 0, &[])
} }
// ─── Neural Frame Builders ───
/// Build a neural frame message (model output tensor as pixels).
pub fn neural_frame(seq: u16, timestamp: u32, width: u16, height: u16, tensor_data: &[u8]) -> Vec<u8> {
encode_frame(FrameType::NeuralFrame, seq, timestamp, width, height, 0, tensor_data)
}
/// Build a neural audio synthesis output message.
pub fn neural_audio(seq: u16, timestamp: u32, audio_data: &[u8]) -> Vec<u8> {
encode_frame(FrameType::NeuralAudio, seq, timestamp, 0, 0, 0, audio_data)
}
/// Build a neural actuator command message (learned motor control).
pub fn neural_actuator(seq: u16, timestamp: u32, command_data: &[u8]) -> Vec<u8> {
encode_frame(FrameType::NeuralActuator, seq, timestamp, 0, 0, 0, command_data)
}
/// Build a neural latent space representation message.
pub fn neural_latent(seq: u16, timestamp: u32, latent_data: &[u8]) -> Vec<u8> {
encode_frame(FrameType::NeuralLatent, seq, timestamp, 0, 0, 0, latent_data)
}
// ─── Audio Frame Builder ───
/// Build an Opus-compressed audio frame message.
pub fn audio_compressed(seq: u16, timestamp: u32, channels: u16, sample_rate_div100: u16, opus_data: &[u8]) -> Vec<u8> {
encode_frame(FrameType::AudioCompressed, seq, timestamp, channels, sample_rate_div100, FLAG_COMPRESSED, opus_data)
}
// ─── Remaining Input Builders ───
/// Build a MIDI input message.
pub fn midi_input(seq: u16, timestamp: u32, event: &MidiEvent) -> Vec<u8> {
encode_input(InputType::Midi, seq, timestamp, &event.encode())
}
/// Build a voice/audio input message (header + PCM payload).
pub fn voice_input(seq: u16, timestamp: u32, event: &VoiceInputEvent, pcm_data: &[u8]) -> Vec<u8> {
let header_bytes = event.encode();
let mut payload = Vec::with_capacity(header_bytes.len() + pcm_data.len());
payload.extend_from_slice(&header_bytes);
payload.extend_from_slice(pcm_data);
encode_input(InputType::VoiceInput, seq, timestamp, &payload)
}
/// Build a camera frame input message (header + frame payload).
pub fn camera_input(seq: u16, timestamp: u32, event: &CameraInputEvent, frame_data: &[u8]) -> Vec<u8> {
let header_bytes = event.encode();
let mut payload = Vec::with_capacity(header_bytes.len() + frame_data.len());
payload.extend_from_slice(&header_bytes);
payload.extend_from_slice(frame_data);
encode_input(InputType::CameraInput, seq, timestamp, &payload)
}
/// Build a sensor telemetry input message.
pub fn sensor_input(seq: u16, timestamp: u32, event: &SensorInputEvent) -> Vec<u8> {
encode_input(InputType::SensorInput, seq, timestamp, &event.encode())
}
/// Build a BCI (brain-computer interface) input message (header + samples payload).
pub fn bci_input(seq: u16, timestamp: u32, event: &BciInputEvent, samples: &[u8]) -> Vec<u8> {
let header_bytes = event.encode();
let mut payload = Vec::with_capacity(header_bytes.len() + samples.len());
payload.extend_from_slice(&header_bytes);
payload.extend_from_slice(samples);
encode_input(InputType::BciInput, seq, timestamp, &payload)
}
// ─── Tests ─── // ─── Tests ───
#[cfg(test)] #[cfg(test)]
@ -428,4 +496,104 @@ mod tests {
let decoded = decode_message(&msg).unwrap(); let decoded = decode_message(&msg).unwrap();
assert_eq!(decoded.header.frame_type, InputType::GamepadButton as u8); assert_eq!(decoded.header.frame_type, InputType::GamepadButton as u8);
} }
#[test]
fn neural_frame_builders() {
let tensor = vec![0.5f32, 0.3, 0.8, 0.1].iter()
.flat_map(|f| f.to_le_bytes()).collect::<Vec<u8>>();
let msg = neural_frame(1, 100, 2, 2, &tensor);
let decoded = decode_message(&msg).unwrap();
assert_eq!(decoded.header.frame_type, FrameType::NeuralFrame as u8);
assert_eq!(decoded.header.width, 2);
assert_eq!(decoded.header.height, 2);
assert_eq!(decoded.payload, &tensor[..]);
}
#[test]
fn neural_audio_builder() {
let audio = vec![0u8; 256];
let msg = neural_audio(1, 200, &audio);
let decoded = decode_message(&msg).unwrap();
assert_eq!(decoded.header.frame_type, FrameType::NeuralAudio as u8);
assert_eq!(decoded.payload.len(), 256);
}
#[test]
fn neural_actuator_builder() {
let cmd = vec![1, 128, 0, 64]; // actuator command
let msg = neural_actuator(1, 300, &cmd);
let decoded = decode_message(&msg).unwrap();
assert_eq!(decoded.header.frame_type, FrameType::NeuralActuator as u8);
}
#[test]
fn neural_latent_builder() {
let latent = vec![0u8; 512]; // latent vector
let msg = neural_latent(1, 400, &latent);
let decoded = decode_message(&msg).unwrap();
assert_eq!(decoded.header.frame_type, FrameType::NeuralLatent as u8);
assert_eq!(decoded.payload.len(), 512);
}
#[test]
fn audio_compressed_builder() {
let opus = vec![0xAA; 128]; // fake opus data
let msg = audio_compressed(1, 500, 2, 480, &opus);
let decoded = decode_message(&msg).unwrap();
assert_eq!(decoded.header.frame_type, FrameType::AudioCompressed as u8);
assert_eq!(decoded.header.width, 2); // channels
assert_eq!(decoded.header.height, 480); // sample_rate/100
assert_eq!(decoded.header.flags & FLAG_COMPRESSED, FLAG_COMPRESSED);
}
#[test]
fn midi_input_builder() {
let evt = MidiEvent { status: 0x90, data1: 60, data2: 100 };
let msg = midi_input(1, 100, &evt);
let decoded = decode_message(&msg).unwrap();
assert_eq!(decoded.header.frame_type, InputType::Midi as u8);
assert!(decoded.header.is_input());
let evt2 = MidiEvent::decode(decoded.payload).unwrap();
assert_eq!(evt2.status, 0x90);
}
#[test]
fn sensor_input_builder() {
let evt = SensorInputEvent { sensor_type: 0, x: 100, y: -200, z: 9800 };
let msg = sensor_input(1, 100, &evt);
let decoded = decode_message(&msg).unwrap();
assert_eq!(decoded.header.frame_type, InputType::SensorInput as u8);
let evt2 = SensorInputEvent::decode(decoded.payload).unwrap();
assert_eq!(evt2.z, 9800);
}
#[test]
fn voice_input_builder() {
let header = VoiceInputEvent { sample_rate: 48000, channels: 1, format: 0 };
let pcm = vec![0u8; 960]; // 10ms of mono f32 at 48kHz
let msg = voice_input(1, 100, &header, &pcm);
let decoded = decode_message(&msg).unwrap();
assert_eq!(decoded.header.frame_type, InputType::VoiceInput as u8);
assert_eq!(decoded.payload.len(), VoiceInputEvent::SIZE + 960);
}
#[test]
fn camera_input_builder() {
let header = CameraInputEvent { width: 640, height: 480, format: 1 };
let jpeg = vec![0xFFu8; 1024];
let msg = camera_input(1, 100, &header, &jpeg);
let decoded = decode_message(&msg).unwrap();
assert_eq!(decoded.header.frame_type, InputType::CameraInput as u8);
assert_eq!(decoded.payload.len(), CameraInputEvent::SIZE + 1024);
}
#[test]
fn bci_input_builder() {
let header = BciInputEvent { channel_count: 8, sample_rate: 256 };
let samples = vec![0u8; 32]; // 8 channels × 1 f32 sample
let msg = bci_input(1, 100, &header, &samples);
let decoded = decode_message(&msg).unwrap();
assert_eq!(decoded.header.frame_type, InputType::BciInput as u8);
assert_eq!(decoded.payload.len(), BciInputEvent::SIZE + 32);
}
} }

View file

@ -1,8 +1,8 @@
//! DreamStack Bitstream Relay Server //! DreamStack Bitstream Relay Server
//! //!
//! Usage: `cargo run -p ds-stream` //! Usage: `cargo run -p ds-stream [OPTIONS]`
//! //!
//! Starts a WebSocket relay on port 9100. //! Starts a WebSocket relay on port 9100 (default).
//! - Source connects to ws://localhost:9100/source //! - Source connects to ws://localhost:9100/source
//! - Receivers connect to ws://localhost:9100/stream //! - Receivers connect to ws://localhost:9100/stream
@ -10,17 +10,86 @@ use ds_stream::relay::{run_relay, RelayConfig};
#[tokio::main] #[tokio::main]
async fn main() { async fn main() {
let port = std::env::args() let args: Vec<String> = std::env::args().collect();
.nth(1)
.and_then(|s| s.parse::<u16>().ok())
.unwrap_or(9100);
let config = RelayConfig { let mut config = RelayConfig::default();
addr: format!("0.0.0.0:{}", port).parse().unwrap(),
..Default::default()
};
eprintln!("Starting DreamStack Bitstream Relay on port {}...", port); // Parse arguments: --port, --replay-depth, --record, --federate, --max-receivers, --max-channels
let mut i = 1;
while i < args.len() {
match args[i].as_str() {
"--port" | "-p" => {
i += 1;
if let Some(val) = args.get(i) {
if let Ok(port) = val.parse::<u16>() {
config.addr = format!("0.0.0.0:{}", port).parse().unwrap();
}
}
}
"--replay-depth" => {
i += 1;
if let Some(val) = args.get(i) {
if let Ok(depth) = val.parse::<usize>() {
config.replay_depth = depth;
}
}
}
"--record" => {
i += 1;
if let Some(val) = args.get(i) {
config.recording_dir = Some(val.clone());
}
}
"--federate" => {
i += 1;
if let Some(val) = args.get(i) {
config.federation_upstreams.push(val.clone());
}
}
"--max-receivers" => {
i += 1;
if let Some(val) = args.get(i) {
if let Ok(n) = val.parse::<usize>() {
config.max_receivers = n;
}
}
}
"--max-channels" => {
i += 1;
if let Some(val) = args.get(i) {
if let Ok(n) = val.parse::<usize>() {
config.max_channels = n;
}
}
}
"--help" | "-h" => {
eprintln!("DreamStack Bitstream Relay Server\n");
eprintln!("USAGE:");
eprintln!(" ds-stream [OPTIONS]\n");
eprintln!("OPTIONS:");
eprintln!(" -p, --port <PORT> Port to listen on (default: 9100)");
eprintln!(" --replay-depth <N> Frames to keep in replay buffer (default: 0 = disabled)");
eprintln!(" --record <DIR> Directory to record frames to (.dsrec files)");
eprintln!(" --federate <URL> Upstream relay URL (can be repeated)");
eprintln!(" --max-receivers <N> Max receivers per channel (default: 64)");
eprintln!(" --max-channels <N> Max channels (default: 256)");
eprintln!(" -h, --help Print help");
std::process::exit(0);
}
other => {
// Legacy: first positional arg = port
if let Ok(port) = other.parse::<u16>() {
config.addr = format!("0.0.0.0:{}", port).parse().unwrap();
} else {
eprintln!("Unknown argument: {other}. Use --help for usage.");
std::process::exit(1);
}
}
}
i += 1;
}
eprintln!("Starting DreamStack Bitstream Relay on {}...", config.addr);
if let Err(e) = run_relay(config).await { if let Err(e) = run_relay(config).await {
eprintln!("Relay error: {}", e); eprintln!("Relay error: {}", e);
std::process::exit(1); std::process::exit(1);

View file

@ -446,6 +446,172 @@ impl ResizeEvent {
} }
} }
/// MIDI input event: status byte + two data bytes.
#[derive(Debug, Clone, Copy)]
pub struct MidiEvent {
/// MIDI status byte (e.g., 0x90 = note on, 0x80 = note off)
pub status: u8,
/// First data byte (e.g., note number 0-127)
pub data1: u8,
/// Second data byte (e.g., velocity 0-127)
pub data2: u8,
}
impl MidiEvent {
pub const SIZE: usize = 3;
pub fn encode(&self) -> [u8; Self::SIZE] {
[self.status, self.data1, self.data2]
}
pub fn decode(buf: &[u8]) -> Option<Self> {
if buf.len() < Self::SIZE {
return None;
}
Some(Self {
status: buf[0],
data1: buf[1],
data2: buf[2],
})
}
}
/// Voice/audio input header. Payload follows as raw PCM samples.
#[derive(Debug, Clone, Copy)]
pub struct VoiceInputEvent {
/// Sample rate in Hz (e.g., 16000, 44100, 48000)
pub sample_rate: u16,
/// Number of channels (1 = mono, 2 = stereo)
pub channels: u8,
/// Format: 0 = f32, 1 = i16, 2 = opus
pub format: u8,
}
impl VoiceInputEvent {
pub const SIZE: usize = 4;
pub fn encode(&self) -> [u8; Self::SIZE] {
let mut buf = [0u8; Self::SIZE];
buf[0..2].copy_from_slice(&self.sample_rate.to_le_bytes());
buf[2] = self.channels;
buf[3] = self.format;
buf
}
pub fn decode(buf: &[u8]) -> Option<Self> {
if buf.len() < Self::SIZE {
return None;
}
Some(Self {
sample_rate: u16::from_le_bytes([buf[0], buf[1]]),
channels: buf[2],
format: buf[3],
})
}
}
/// Camera frame input header. Payload follows as compressed image data.
#[derive(Debug, Clone, Copy)]
pub struct CameraInputEvent {
/// Frame width in pixels
pub width: u16,
/// Frame height in pixels
pub height: u16,
/// Format: 0 = RGBA, 1 = JPEG, 2 = WebP, 3 = H264 NAL
pub format: u8,
}
impl CameraInputEvent {
pub const SIZE: usize = 5;
pub fn encode(&self) -> [u8; Self::SIZE] {
let mut buf = [0u8; Self::SIZE];
buf[0..2].copy_from_slice(&self.width.to_le_bytes());
buf[2..4].copy_from_slice(&self.height.to_le_bytes());
buf[4] = self.format;
buf
}
pub fn decode(buf: &[u8]) -> Option<Self> {
if buf.len() < Self::SIZE {
return None;
}
Some(Self {
width: u16::from_le_bytes([buf[0], buf[1]]),
height: u16::from_le_bytes([buf[2], buf[3]]),
format: buf[4],
})
}
}
/// Sensor telemetry input (accelerometer, gyroscope, magnetometer, etc.).
#[derive(Debug, Clone, Copy)]
pub struct SensorInputEvent {
/// Sensor type: 0 = accelerometer, 1 = gyroscope, 2 = magnetometer, 3 = orientation
pub sensor_type: u8,
/// X-axis value (scaled: real_value * 1000)
pub x: i16,
/// Y-axis value (scaled: real_value * 1000)
pub y: i16,
/// Z-axis value (scaled: real_value * 1000)
pub z: i16,
}
impl SensorInputEvent {
pub const SIZE: usize = 7;
pub fn encode(&self) -> [u8; Self::SIZE] {
let mut buf = [0u8; Self::SIZE];
buf[0] = self.sensor_type;
buf[1..3].copy_from_slice(&self.x.to_le_bytes());
buf[3..5].copy_from_slice(&self.y.to_le_bytes());
buf[5..7].copy_from_slice(&self.z.to_le_bytes());
buf
}
pub fn decode(buf: &[u8]) -> Option<Self> {
if buf.len() < Self::SIZE {
return None;
}
Some(Self {
sensor_type: buf[0],
x: i16::from_le_bytes([buf[1], buf[2]]),
y: i16::from_le_bytes([buf[3], buf[4]]),
z: i16::from_le_bytes([buf[5], buf[6]]),
})
}
}
/// BCI (Brain-Computer Interface) input header. Payload follows as f32 samples.
#[derive(Debug, Clone, Copy)]
pub struct BciInputEvent {
/// Number of EEG/neural channels
pub channel_count: u8,
/// Sample rate in Hz
pub sample_rate: u16,
}
impl BciInputEvent {
pub const SIZE: usize = 3;
pub fn encode(&self) -> [u8; Self::SIZE] {
let mut buf = [0u8; Self::SIZE];
buf[0] = self.channel_count;
buf[1..3].copy_from_slice(&self.sample_rate.to_le_bytes());
buf
}
pub fn decode(buf: &[u8]) -> Option<Self> {
if buf.len() < Self::SIZE {
return None;
}
Some(Self {
channel_count: buf[0],
sample_rate: u16::from_le_bytes([buf[1], buf[2]]),
})
}
}
// ─── Tests ─── // ─── Tests ───
@ -589,4 +755,79 @@ mod tests {
fn gamepad_axis_too_short() { fn gamepad_axis_too_short() {
assert!(GamepadAxisEvent::decode(&[0u8; 2]).is_none()); assert!(GamepadAxisEvent::decode(&[0u8; 2]).is_none());
} }
#[test]
fn midi_event_roundtrip() {
let evt = MidiEvent { status: 0x90, data1: 60, data2: 100 };
let encoded = evt.encode();
let decoded = MidiEvent::decode(&encoded).unwrap();
assert_eq!(decoded.status, 0x90);
assert_eq!(decoded.data1, 60);
assert_eq!(decoded.data2, 100);
}
#[test]
fn midi_event_too_short() {
assert!(MidiEvent::decode(&[0u8; 2]).is_none());
}
#[test]
fn voice_input_event_roundtrip() {
let evt = VoiceInputEvent { sample_rate: 48000, channels: 2, format: 0 };
let encoded = evt.encode();
let decoded = VoiceInputEvent::decode(&encoded).unwrap();
assert_eq!(decoded.sample_rate, 48000);
assert_eq!(decoded.channels, 2);
assert_eq!(decoded.format, 0);
}
#[test]
fn voice_input_event_too_short() {
assert!(VoiceInputEvent::decode(&[0u8; 3]).is_none());
}
#[test]
fn camera_input_event_roundtrip() {
let evt = CameraInputEvent { width: 1280, height: 720, format: 2 };
let encoded = evt.encode();
let decoded = CameraInputEvent::decode(&encoded).unwrap();
assert_eq!(decoded.width, 1280);
assert_eq!(decoded.height, 720);
assert_eq!(decoded.format, 2);
}
#[test]
fn camera_input_event_too_short() {
assert!(CameraInputEvent::decode(&[0u8; 4]).is_none());
}
#[test]
fn sensor_input_event_roundtrip() {
let evt = SensorInputEvent { sensor_type: 1, x: -1000, y: 500, z: 9800 };
let encoded = evt.encode();
let decoded = SensorInputEvent::decode(&encoded).unwrap();
assert_eq!(decoded.sensor_type, 1);
assert_eq!(decoded.x, -1000);
assert_eq!(decoded.y, 500);
assert_eq!(decoded.z, 9800);
}
#[test]
fn sensor_input_event_too_short() {
assert!(SensorInputEvent::decode(&[0u8; 6]).is_none());
}
#[test]
fn bci_input_event_roundtrip() {
let evt = BciInputEvent { channel_count: 32, sample_rate: 256 };
let encoded = evt.encode();
let decoded = BciInputEvent::decode(&encoded).unwrap();
assert_eq!(decoded.channel_count, 32);
assert_eq!(decoded.sample_rate, 256);
}
#[test]
fn bci_input_event_too_short() {
assert!(BciInputEvent::decode(&[0u8; 2]).is_none());
}
} }

View file

@ -58,6 +58,13 @@ pub struct RelayConfig {
pub channel_gc_interval_secs: u64, pub channel_gc_interval_secs: u64,
/// Source reconnect grace period in seconds — keep cache alive after source disconnect. /// Source reconnect grace period in seconds — keep cache alive after source disconnect.
pub source_reconnect_grace_secs: u64, pub source_reconnect_grace_secs: u64,
/// Replay depth: number of frames to keep in ring buffer for time-travel replay.
/// Set to 0 to disable replay (default: 0, catchup-only). Set >0 for full replay.
pub replay_depth: usize,
/// Upstream relay URLs for federation — frames are forwarded to these relays.
pub federation_upstreams: Vec<String>,
/// Recording directory — if set, incoming frames are written to disk.
pub recording_dir: Option<String>,
} }
impl Default for RelayConfig { impl Default for RelayConfig {
@ -71,6 +78,9 @@ impl Default for RelayConfig {
max_channels: 256, max_channels: 256,
channel_gc_interval_secs: 60, channel_gc_interval_secs: 60,
source_reconnect_grace_secs: 30, source_reconnect_grace_secs: 30,
replay_depth: 0,
federation_upstreams: Vec::new(),
recording_dir: None,
} }
} }
} }
@ -109,6 +119,11 @@ pub struct StateCache {
/// Accumulated signal diffs since last sync. /// Accumulated signal diffs since last sync.
/// Late-joining receivers get: last_signal_sync + all diffs. /// Late-joining receivers get: last_signal_sync + all diffs.
pub pending_signal_diffs: Vec<Vec<u8>>, pub pending_signal_diffs: Vec<Vec<u8>>,
/// Replay ring buffer — stores the last N frames for time-travel replay.
/// When replay_depth > 0, receivers can request historical frames.
pub replay_buffer: Vec<Vec<u8>>,
/// Maximum replay buffer depth (0 = disabled).
pub replay_depth: usize,
} }
impl StateCache { impl StateCache {
@ -120,6 +135,14 @@ impl StateCache {
let frame_type = msg[0]; let frame_type = msg[0];
let flags = msg[1]; let flags = msg[1];
// Add to replay ring buffer if enabled
if self.replay_depth > 0 {
self.replay_buffer.push(msg.to_vec());
if self.replay_buffer.len() > self.replay_depth {
self.replay_buffer.remove(0);
}
}
match FrameType::from_u8(frame_type) { match FrameType::from_u8(frame_type) {
// Cache keyframes (pixel or signal sync) // Cache keyframes (pixel or signal sync)
Some(FrameType::Pixels) if flags & FLAG_KEYFRAME != 0 => { Some(FrameType::Pixels) if flags & FLAG_KEYFRAME != 0 => {
@ -266,6 +289,7 @@ impl StateCache {
self.last_keyframe = None; self.last_keyframe = None;
self.last_signal_sync = None; self.last_signal_sync = None;
self.pending_signal_diffs.clear(); self.pending_signal_diffs.clear();
self.replay_buffer.clear();
} }
/// Returns true if this cache has any state. /// Returns true if this cache has any state.
@ -273,6 +297,22 @@ impl StateCache {
self.last_keyframe.is_some() self.last_keyframe.is_some()
|| self.last_signal_sync.is_some() || self.last_signal_sync.is_some()
|| !self.pending_signal_diffs.is_empty() || !self.pending_signal_diffs.is_empty()
|| !self.replay_buffer.is_empty()
}
/// Get the replay buffer frames for time-travel playback.
/// Returns frames from `start_index` onwards.
pub fn replay_frames(&self, start_index: usize) -> &[Vec<u8>] {
if start_index < self.replay_buffer.len() {
&self.replay_buffer[start_index..]
} else {
&[]
}
}
/// Total number of frames in the replay buffer.
pub fn replay_len(&self) -> usize {
self.replay_buffer.len()
} }
} }
@ -298,7 +338,7 @@ struct ChannelState {
} }
impl ChannelState { impl ChannelState {
fn new(frame_buffer_size: usize, max_receivers: usize) -> Self { fn new(frame_buffer_size: usize, max_receivers: usize, replay_depth: usize) -> Self {
let (frame_tx, _) = broadcast::channel(frame_buffer_size); let (frame_tx, _) = broadcast::channel(frame_buffer_size);
let (input_tx, input_rx) = mpsc::channel(256); let (input_tx, input_rx) = mpsc::channel(256);
let (signaling_tx, _) = broadcast::channel(64); let (signaling_tx, _) = broadcast::channel(64);
@ -308,7 +348,10 @@ impl ChannelState {
input_rx: Some(input_rx), input_rx: Some(input_rx),
signaling_tx, signaling_tx,
stats: RelayStats::default(), stats: RelayStats::default(),
cache: StateCache::default(), cache: StateCache {
replay_depth,
..StateCache::default()
},
source_disconnect_time: None, source_disconnect_time: None,
max_receivers, max_receivers,
schema: None, schema: None,
@ -344,16 +387,22 @@ struct RelayState {
max_channels: usize, max_channels: usize,
/// Server start time /// Server start time
start_time: Instant, start_time: Instant,
/// Replay depth for new channels
replay_depth: usize,
/// Recording directory (None = disabled)
recording_dir: Option<String>,
} }
impl RelayState { impl RelayState {
fn new(frame_buffer_size: usize, max_receivers: usize, max_channels: usize) -> Self { fn new(frame_buffer_size: usize, max_receivers: usize, max_channels: usize, replay_depth: usize, recording_dir: Option<String>) -> Self {
Self { Self {
channels: HashMap::new(), channels: HashMap::new(),
frame_buffer_size, frame_buffer_size,
max_receivers, max_receivers,
max_channels, max_channels,
start_time: Instant::now(), start_time: Instant::now(),
replay_depth,
recording_dir,
} }
} }
@ -369,6 +418,7 @@ impl RelayState {
let channel = Arc::new(RwLock::new(ChannelState::new( let channel = Arc::new(RwLock::new(ChannelState::new(
self.frame_buffer_size, self.frame_buffer_size,
self.max_receivers, self.max_receivers,
self.replay_depth,
))); )));
self.channels.insert(name.to_string(), channel.clone()); self.channels.insert(name.to_string(), channel.clone());
Some(channel) Some(channel)
@ -403,6 +453,29 @@ fn parse_path(path: &str) -> ConnectionRole {
} }
} }
/// Check if a channel name matches a wildcard pattern (channel groups).
/// Supports `*` at the end for prefix matching: `games/*` matches `games/chess`.
pub fn channel_matches(pattern: &str, channel: &str) -> bool {
if pattern == channel {
return true;
}
if let Some(prefix) = pattern.strip_suffix("/*") {
return channel.starts_with(prefix) && channel.len() > prefix.len();
}
if pattern == "*" {
return true;
}
false
}
/// Find all channels matching a wildcard pattern.
pub(crate) fn find_matching_channels(state: &RelayState, pattern: &str) -> Vec<String> {
state.channels.keys()
.filter(|name| channel_matches(pattern, name))
.cloned()
.collect()
}
/// Run the WebSocket relay server. /// Run the WebSocket relay server.
pub async fn run_relay(config: RelayConfig) -> Result<(), Box<dyn std::error::Error>> { pub async fn run_relay(config: RelayConfig) -> Result<(), Box<dyn std::error::Error>> {
let listener = TcpListener::bind(&config.addr).await?; let listener = TcpListener::bind(&config.addr).await?;
@ -419,12 +492,32 @@ pub async fn run_relay(config: RelayConfig) -> Result<(), Box<dyn std::error::Er
eprintln!("╚══════════════════════════════════════════════════╝"); eprintln!("╚══════════════════════════════════════════════════╝");
let grace_secs = config.source_reconnect_grace_secs; let grace_secs = config.source_reconnect_grace_secs;
let replay_depth = config.replay_depth;
let recording_dir = config.recording_dir.clone();
let federation_upstreams = config.federation_upstreams.clone();
let state = Arc::new(RwLock::new(RelayState::new( let state = Arc::new(RwLock::new(RelayState::new(
config.frame_buffer_size, config.frame_buffer_size,
config.max_receivers, config.max_receivers,
config.max_channels, config.max_channels,
config.replay_depth,
config.recording_dir.clone(),
))); )));
// Log the feature status
if replay_depth > 0 {
eprintln!("║ Replay depth: {:>4} frames ║", replay_depth);
}
if let Some(ref dir) = recording_dir {
eprintln!("║ Recording to: {}", dir);
// Ensure recording directory exists
std::fs::create_dir_all(dir).unwrap_or_else(|e| {
eprintln!("[relay] Warning: could not create recording dir {}: {}", dir, e);
});
}
if !federation_upstreams.is_empty() {
eprintln!("║ Federation: {} upstream(s) ║", federation_upstreams.len());
}
// Background: periodic stats + channel GC // Background: periodic stats + channel GC
{ {
let state = state.clone(); let state = state.clone();
@ -488,6 +581,52 @@ pub async fn run_relay(config: RelayConfig) -> Result<(), Box<dyn std::error::Er
}); });
} }
// Background: federation forwarding to upstream relays
for upstream_url in &federation_upstreams {
let url = upstream_url.clone();
let state = state.clone();
tokio::spawn(async move {
let mut backoff = Duration::from_secs(1);
loop {
eprintln!("[relay:federation] Connecting to upstream: {url}");
match tokio_tungstenite::connect_async(&url).await {
Ok((mut ws, _)) => {
eprintln!("[relay:federation] Connected to {url}");
backoff = Duration::from_secs(1);
// Subscribe to the default channel and forward frames
let frame_rx = {
let s = state.read().await;
if let Some(ch) = s.channels.get("default") {
let cs = ch.read().await;
Some(cs.frame_tx.subscribe())
} else {
None
}
};
if let Some(mut rx) = frame_rx {
while let Ok(frame) = rx.recv().await {
use futures_util::SinkExt;
if ws.send(Message::Binary(frame.into())).await.is_err() {
break;
}
}
}
eprintln!("[relay:federation] Disconnected from {url}");
}
Err(e) => {
eprintln!("[relay:federation] Failed to connect to {url}: {e}");
}
}
// Exponential backoff on reconnect
tokio::time::sleep(backoff).await;
backoff = (backoff * 2).min(Duration::from_secs(30));
}
});
}
while let Ok((stream, addr)) = listener.accept().await { while let Ok((stream, addr)) = listener.accept().await {
let state = state.clone(); let state = state.clone();
let keepalive_interval = config.keepalive_interval_secs; let keepalive_interval = config.keepalive_interval_secs;
@ -717,7 +856,12 @@ async fn handle_connection(
match role { match role {
ConnectionRole::Source(ref _name) => { ConnectionRole::Source(ref _name) => {
eprintln!("[relay:{channel_name}] Source connected: {addr}"); eprintln!("[relay:{channel_name}] Source connected: {addr}");
handle_source(ws_stream, addr, channel, &channel_name, keepalive_interval).await; // Get recording_dir from relay state
let recording_dir = {
let s = state.read().await;
s.recording_dir.clone()
};
handle_source(ws_stream, addr, channel, &channel_name, keepalive_interval, recording_dir).await;
} }
ConnectionRole::Receiver(ref _name) => { ConnectionRole::Receiver(ref _name) => {
eprintln!("[relay:{channel_name}] Receiver connected: {addr}"); eprintln!("[relay:{channel_name}] Receiver connected: {addr}");
@ -743,6 +887,7 @@ async fn handle_source(
channel: Arc<RwLock<ChannelState>>, channel: Arc<RwLock<ChannelState>>,
channel_name: &str, channel_name: &str,
keepalive_interval: u64, keepalive_interval: u64,
recording_dir: Option<String>,
) { ) {
let (mut ws_sink, mut ws_source) = ws_stream.split(); let (mut ws_sink, mut ws_source) = ws_stream.split();
@ -798,6 +943,28 @@ async fn handle_source(
} }
}); });
// Open recording file if configured
let mut recording_file = if let Some(ref dir) = recording_dir {
let path = format!("{}/{}.dsrec", dir, channel_name.replace('/', "_"));
match tokio::fs::OpenOptions::new()
.create(true)
.append(true)
.open(&path)
.await
{
Ok(f) => {
eprintln!("[relay:{channel_name}] Recording to {path}");
Some(f)
}
Err(e) => {
eprintln!("[relay:{channel_name}] Warning: could not open recording file {path}: {e}");
None
}
}
} else {
None
};
// Receive frames from source → broadcast to receivers // Receive frames from source → broadcast to receivers
let channel_name_owned = channel_name.to_string(); let channel_name_owned = channel_name.to_string();
while let Some(Ok(msg)) = ws_source.next().await { while let Some(Ok(msg)) = ws_source.next().await {
@ -825,6 +992,15 @@ async fn handle_source(
cs.stats.last_frame_timestamp = ts; cs.stats.last_frame_timestamp = ts;
} }
} }
// Record frame to disk (length-delimited: [u32 len][frame bytes])
if let Some(ref mut file) = recording_file {
use tokio::io::AsyncWriteExt;
let len_bytes = (data_vec.len() as u32).to_le_bytes();
let _ = file.write_all(&len_bytes).await;
let _ = file.write_all(&data_vec).await;
}
// Broadcast to all receivers on this channel // Broadcast to all receivers on this channel
let _ = frame_tx.send(data_vec); let _ = frame_tx.send(data_vec);
} }
@ -1330,7 +1506,7 @@ mod tests {
#[test] #[test]
fn channel_state_creation() { fn channel_state_creation() {
let mut state = RelayState::new(16, 64, 256); let mut state = RelayState::new(16, 64, 256, 0, None);
let ch1 = state.get_or_create_channel("main").unwrap(); let ch1 = state.get_or_create_channel("main").unwrap();
let ch2 = state.get_or_create_channel("player1").unwrap(); let ch2 = state.get_or_create_channel("player1").unwrap();
let ch1_again = state.get_or_create_channel("main").unwrap(); let ch1_again = state.get_or_create_channel("main").unwrap();
@ -1342,7 +1518,7 @@ mod tests {
#[test] #[test]
fn channel_max_limit() { fn channel_max_limit() {
let mut state = RelayState::new(16, 64, 2); let mut state = RelayState::new(16, 64, 2, 0, None);
assert!(state.get_or_create_channel("a").is_some()); assert!(state.get_or_create_channel("a").is_some());
assert!(state.get_or_create_channel("b").is_some()); assert!(state.get_or_create_channel("b").is_some());
assert!(state.get_or_create_channel("c").is_none()); // max reached assert!(state.get_or_create_channel("c").is_none()); // max reached
@ -1351,13 +1527,13 @@ mod tests {
#[test] #[test]
fn channel_idle_detection() { fn channel_idle_detection() {
let cs = ChannelState::new(16, 64); let cs = ChannelState::new(16, 64, 0);
assert!(cs.is_idle()); // no source, no receivers, no cache assert!(cs.is_idle()); // no source, no receivers, no cache
} }
#[test] #[test]
fn channel_not_idle_with_cache() { fn channel_not_idle_with_cache() {
let mut cs = ChannelState::new(16, 64); let mut cs = ChannelState::new(16, 64, 0);
let sync = crate::codec::signal_sync_frame(0, 0, b"{}"); let sync = crate::codec::signal_sync_frame(0, 0, b"{}");
cs.cache.process_frame(&sync); cs.cache.process_frame(&sync);
assert!(!cs.is_idle()); // has cached state assert!(!cs.is_idle()); // has cached state
@ -1365,27 +1541,27 @@ mod tests {
#[test] #[test]
fn channel_not_idle_with_source() { fn channel_not_idle_with_source() {
let mut cs = ChannelState::new(16, 64); let mut cs = ChannelState::new(16, 64, 0);
cs.stats.source_connected = true; cs.stats.source_connected = true;
assert!(!cs.is_idle()); assert!(!cs.is_idle());
} }
#[test] #[test]
fn channel_not_idle_with_receivers() { fn channel_not_idle_with_receivers() {
let mut cs = ChannelState::new(16, 64); let mut cs = ChannelState::new(16, 64, 0);
cs.stats.connected_receivers = 1; cs.stats.connected_receivers = 1;
assert!(!cs.is_idle()); assert!(!cs.is_idle());
} }
#[test] #[test]
fn grace_period_not_expired_initially() { fn grace_period_not_expired_initially() {
let cs = ChannelState::new(16, 64); let cs = ChannelState::new(16, 64, 0);
assert!(!cs.grace_period_expired(30)); assert!(!cs.grace_period_expired(30));
} }
#[test] #[test]
fn grace_period_expired_after_disconnect() { fn grace_period_expired_after_disconnect() {
let mut cs = ChannelState::new(16, 64); let mut cs = ChannelState::new(16, 64, 0);
cs.source_disconnect_time = Some(Instant::now() - Duration::from_secs(60)); cs.source_disconnect_time = Some(Instant::now() - Duration::from_secs(60));
assert!(cs.grace_period_expired(30)); assert!(cs.grace_period_expired(30));
} }
@ -1463,4 +1639,86 @@ mod tests {
assert_eq!(merged["count"], 5); assert_eq!(merged["count"], 5);
assert_eq!(merged["_v"]["count"], 3); // version preserved from diff assert_eq!(merged["_v"]["count"], 3); // version preserved from diff
} }
// ─── Channel Wildcard Tests ───
#[test]
fn channel_matches_exact() {
assert!(channel_matches("games/chess", "games/chess"));
assert!(!channel_matches("games/chess", "games/go"));
}
#[test]
fn channel_matches_wildcard() {
assert!(channel_matches("games/*", "games/chess"));
assert!(channel_matches("games/*", "games/go"));
assert!(!channel_matches("games/*", "other/chess"));
assert!(!channel_matches("games/*", "games")); // no trailing segment
}
#[test]
fn channel_matches_star_all() {
assert!(channel_matches("*", "anything"));
assert!(channel_matches("*", "games/chess"));
}
#[test]
fn find_matching_channels_works() {
let mut state = RelayState::new(16, 64, 256, 0, None);
state.get_or_create_channel("games/chess").unwrap();
state.get_or_create_channel("games/go").unwrap();
state.get_or_create_channel("chat/main").unwrap();
let mut matches = find_matching_channels(&state, "games/*");
matches.sort();
assert_eq!(matches, vec!["games/chess", "games/go"]);
}
// ─── Replay Buffer Tests ───
#[test]
fn replay_buffer_stores_frames() {
let mut cache = StateCache { replay_depth: 3, ..Default::default() };
let f1 = crate::codec::signal_diff_frame(0, 0, b"{\"a\":1}");
let f2 = crate::codec::signal_diff_frame(1, 100, b"{\"a\":2}");
let f3 = crate::codec::signal_diff_frame(2, 200, b"{\"a\":3}");
cache.process_frame(&f1);
cache.process_frame(&f2);
cache.process_frame(&f3);
assert_eq!(cache.replay_len(), 3);
assert_eq!(cache.replay_frames(0).len(), 3);
assert_eq!(cache.replay_frames(1).len(), 2);
assert_eq!(cache.replay_frames(3).len(), 0);
}
#[test]
fn replay_buffer_evicts_oldest() {
let mut cache = StateCache { replay_depth: 2, ..Default::default() };
let f1 = crate::codec::signal_diff_frame(0, 0, b"{\"a\":1}");
let f2 = crate::codec::signal_diff_frame(1, 100, b"{\"a\":2}");
let f3 = crate::codec::signal_diff_frame(2, 200, b"{\"a\":3}");
cache.process_frame(&f1);
cache.process_frame(&f2);
cache.process_frame(&f3);
assert_eq!(cache.replay_len(), 2);
// First frame should be evicted
assert_eq!(cache.replay_buffer[0], f2);
assert_eq!(cache.replay_buffer[1], f3);
}
#[test]
fn replay_depth_propagates_to_channel() {
let mut state = RelayState::new(16, 64, 256, 100, None);
let ch = state.get_or_create_channel("test").unwrap();
let cs = ch.try_read().unwrap();
assert_eq!(cs.cache.replay_depth, 100);
}
#[test]
fn replay_disabled_when_zero() {
let mut cache = StateCache::default();
assert_eq!(cache.replay_depth, 0);
let f1 = crate::codec::signal_diff_frame(0, 0, b"{\"a\":1}");
cache.process_frame(&f1);
assert_eq!(cache.replay_len(), 0); // Nothing stored when depth=0
}
} }

213
examples/mission-control.ds Normal file
View file

@ -0,0 +1,213 @@
-- ═══════════════════════════════════════════════════════════
-- ⚡ DreamStack Playground
-- ═══════════════════════════════════════════════════════════
-- One page. Everything visible. Click and watch data flow.
--
-- dreamstack build examples/mission-control.ds -o /tmp/mission-control
import { Card } from "../registry/components/card"
import { Badge } from "../registry/components/badge"
-- ════════════════════════════════
-- SIGNALS — the core of DreamStack
-- ════════════════════════════════
let score = 0
let player = "Player 1"
let status = "idle"
let hp = 100
let xp = 0
let level = 1
let inventory = ["Sword", "Shield", "Potion"]
let log = ["Game started"]
-- Derived: auto-computed from source signals
let damage = score * 3
let shield = hp > 50
let rank = level * 10 + xp
-- Spring: physics-animated value
let energy = spring(100)
view main = column [
text "⚡ DreamStack Playground" { variant: "title" }
-- ════════════════════════════════
-- SOURCE: You control signals here
-- ════════════════════════════════
row [
-- SIGNAL CONTROL PANEL
Card { title: "🎮 Source Signals", subtitle: "change these → everything reacts" } [
text "score" { variant: "subtitle" }
text "{score}" { variant: "title" }
row [
button "+1" { click: score += 1, variant: "primary" }
button "+10" { click: score += 10, variant: "primary" }
button "0" { click: score = 0, variant: "ghost" }
]
text "hp" { variant: "subtitle" }
text "{hp}" { variant: "title" }
row [
button "Hit -20" { click: hp -= 20, variant: "destructive" }
button "Heal +30" { click: hp += 30, variant: "primary" }
button "Full" { click: hp = 100, variant: "ghost" }
]
text "status" { variant: "subtitle" }
row [
button "idle" { click: status = "idle", variant: "ghost" }
button "fighting" { click: status = "fighting", variant: "primary" }
button "dead" { click: status = "dead", variant: "destructive" }
]
text "level / xp" { variant: "subtitle" }
row [
button "XP +5" { click: xp += 5, variant: "primary" }
button "Level Up" { click: level += 1, variant: "primary" }
]
]
-- DERIVED SIGNALS — auto-updated
Card { title: "⚙️ Derived Signals", subtitle: "auto-computed, zero code" } [
text "damage = score × 3" { variant: "subtitle" }
text "{damage}" { variant: "title" }
text "shield = hp > 50" { variant: "subtitle" }
when shield ->
Badge { label: "SHIELD UP ✓", variant: "success" }
else ->
Badge { label: "SHIELD DOWN ✗", variant: "error" }
text "rank = level × 10 + xp" { variant: "subtitle" }
text "{rank}" { variant: "title" }
text "energy (spring)" { variant: "subtitle" }
text "{energy}" { variant: "title" }
row [
button "Drain" { click: energy = 10, variant: "destructive" }
button "Charge" { click: energy = 100, variant: "primary" }
]
]
]
-- ════════════════════════════════
-- REACTIVE UI — responds to signals
-- ════════════════════════════════
row [
-- MATCH: status drives which badge shows
Card { title: "📊 Status Display", subtitle: "match status" } [
match status
"idle" -> Badge { label: "💤 IDLE", variant: "info" }
"fighting" -> Badge { label: "⚔️ FIGHTING", variant: "warning" }
"dead" -> Badge { label: "💀 DEAD", variant: "error" }
_ -> Badge { label: "???", variant: "info" }
-- WHEN/ELSE: conditional rendering
when hp > 80 ->
text "💚 Healthy"
when hp > 30 ->
text "💛 Wounded"
else ->
text "❤️ Critical!"
when score > 20 ->
Badge { label: "🔥 ON FIRE", variant: "warning" }
]
-- EACH: dynamic list
Card { title: "🎒 Inventory", subtitle: "each + array methods" } [
each item in inventory ->
row [
Badge { label: item, variant: "info" }
button "Drop" { click: inventory.remove(_idx), variant: "ghost" }
]
row [
button "+Bow" { click: inventory.push("Bow"), variant: "primary" }
button "+Ring" { click: inventory.push("Ring"), variant: "primary" }
button "Sort" { click: inventory.sort(), variant: "ghost" }
button "Clear" { click: inventory.clear(), variant: "destructive" }
]
]
-- INPUT BINDING + LOG
Card { title: "📝 Event Log", subtitle: "input bind + push" } [
input { bind: player, placeholder: "Player name..." }
text "Playing as: {player}" { variant: "title" }
button "Log Score" { click: log.push(player), variant: "primary" }
each entry in log ->
text "→ {entry}"
]
]
-- ════════════════════════════════
-- EMBED: what a page needs to receive this
-- ════════════════════════════════
Card { title: "📡 Receiver — Live Signal Values", subtitle: "these signals stream to any receiver" } [
row [
column [
text "score = {score}"
text "damage = {damage}"
text "hp = {hp}"
]
column [
text "shield = {shield}"
text "level = {level}"
text "rank = {rank}"
]
column [
text "status = {status}"
text "player = {player}"
text "xp = {xp}"
]
]
]
Card { title: "🔌 Embed Option 1 — iframe", subtitle: "one line, any website" } [
text "Paste into any HTML page:" { variant: "subtitle" }
text ""
text "[iframe src=your-relay/view/game /]"
text ""
text "The compiled .ds app runs inside the iframe."
text "Self-contained, no dependencies."
Badge { label: "ZERO CONFIG", variant: "success" }
]
Card { title: "🔌 Embed Option 2 — JS API", subtitle: "vanilla JavaScript, full control" } [
text "Add to any web page:" { variant: "subtitle" }
text ""
text "[script src=dreamstack-runtime.js]"
text ""
text " const game = DS._connectStream(RELAY_URL)"
text ""
text " game.on(score, (v) => scoreEl.textContent = v)"
text ""
text " game.on(hp, (v) => healthBar.style.width = v)"
text ""
text "[/script]"
text ""
Badge { label: "3 LINES OF JS", variant: "success" }
text "Works with React, Vue, Svelte, vanilla — anything."
]
Card { title: "🔌 Embed Option 3 — DreamStack .ds", subtitle: "full reactive receiver" } [
text "Write a .ds file — compile to self-contained HTML:" { variant: "subtitle" }
text ""
text "let game = stream from RELAY_URL"
text ""
text "view main = column ["
text " text Score: (game.score)"
text " when game.hp is low ->"
text " text DANGER!"
text "]"
text ""
Badge { label: "FULL REACTIVITY", variant: "info" }
text "Remote signals become local reactive proxies."
text "Match, when/else, each — everything works on remote data."
]
]

View file

@ -0,0 +1,81 @@
-- ═══════════════════════════════════════════════════════
-- DreamStack Mission Control — Source
-- ═══════════════════════════════════════════════════════
-- Simulates a spacecraft telemetry source that streams
-- signals to remote Mission Control dashboards.
--
-- Run:
-- Tab 1: cargo run -p ds-stream (relay)
-- Tab 2: dreamstack stream examples/mission-source.ds --port 3000
-- Tab 3: dreamstack dev examples/mission-control.ds (dashboard)
-- ── Telemetry Signals ──
let altitude = 408
let velocity = 7660
let fuel = 87
let heartRate = 72
let oxygen = 98
let temp = 21
let status = "nominal"
let crew = ["Amir", "Nova", "Atlas"]
let logCount = 0
let timestamp = 0
-- Computed signals
let orbitalPeriod = 90
let fuelStatus = "normal"
-- Stream telemetry via relay
stream telemetry on "ws://localhost:9100/peer/mission" { mode: signal, output: altitude, velocity, fuel, heartRate, oxygen, temp, status, crew, logCount, timestamp, orbitalPeriod, fuelStatus }
view source = column [
text "🛰️ Mission Source — ISS Telemetry" { variant: "title" }
text "Broadcasting to ws://localhost:9100/peer/mission" { variant: "subtitle" }
row [
column [
text "── Orbital ──"
text "Altitude: {altitude} km"
text "Velocity: {velocity} m/s"
row [
button "Boost" { click: velocity += 100, variant: "primary" }
button "Brake" { click: velocity -= 100, variant: "secondary" }
button "Raise" { click: altitude += 10, variant: "primary" }
button "Lower" { click: altitude -= 10, variant: "ghost" }
]
]
column [
text "── Life Support ──"
text "❤️ {heartRate} bpm"
text "🫁 {oxygen}%"
text "🌡️ {temp}°C"
row [
button "Exercise" { click: heartRate += 15, variant: "primary" }
button "Rest" { click: heartRate -= 10, variant: "ghost" }
]
]
]
row [
column [
text "── Fuel ──"
text "⛽ {fuel}%"
row [
button "Burn" { click: fuel -= 5, variant: "destructive" }
button "Refuel" { click: fuel = 100, variant: "primary" }
]
]
column [
text "── Status ──"
row [
button "Nominal" { click: status = "nominal", variant: "primary" }
button "Warning" { click: status = "warning", variant: "secondary" }
button "Critical" { click: status = "critical", variant: "destructive" }
]
]
]
text "── Crew EVA Log ──"
text "Log entries: {logCount}"
button "Log EVA" { click: logCount += 1, variant: "primary" }
]

135
sdk/dreamstack-embed.js Normal file
View file

@ -0,0 +1,135 @@
/**
* DreamStack Embed SDK ~3KB standalone
* Enables embedding DreamStack apps in any website.
*
* Usage:
* <script src="dreamstack-embed.js"></script>
* <ds-stream src="https://yourapp.com"></ds-stream>
*
* Or via JS API:
* DreamStack.connect('https://yourapp.com', '#container');
*/
(function (root, factory) {
if (typeof module !== 'undefined' && module.exports) module.exports = factory();
else root.DreamStack = factory();
})(typeof globalThis !== 'undefined' ? globalThis : this, function () {
'use strict';
// ── Iframe Embed ──
function embed(src, container, options) {
var opts = options || {};
var el = typeof container === 'string' ? document.querySelector(container) : container;
if (!el) throw new Error('[DreamStack] Container not found: ' + container);
var iframe = document.createElement('iframe');
iframe.src = src;
iframe.style.border = 'none';
iframe.style.width = opts.width || '100%';
iframe.style.height = opts.height || '400px';
iframe.style.borderRadius = opts.borderRadius || '12px';
iframe.style.overflow = 'hidden';
iframe.setAttribute('sandbox', 'allow-scripts allow-same-origin');
iframe.setAttribute('loading', 'lazy');
if (opts.className) iframe.className = opts.className;
el.appendChild(iframe);
return {
iframe: iframe,
destroy: function () { el.removeChild(iframe); },
resize: function (w, h) {
iframe.style.width = typeof w === 'number' ? w + 'px' : w;
iframe.style.height = typeof h === 'number' ? h + 'px' : h;
}
};
}
// ── Signal Bridge (bidirectional) ──
function connect(src, container, options) {
var handle = embed(src, container, options);
var listeners = {};
// Listen for messages from the DreamStack app
window.addEventListener('message', function (e) {
if (e.source !== handle.iframe.contentWindow) return;
var data = e.data;
if (data && data.type === 'ds:signal') {
var name = data.name;
if (listeners[name]) {
listeners[name].forEach(function (fn) { fn(data.value); });
}
if (listeners['*']) {
listeners['*'].forEach(function (fn) { fn(name, data.value); });
}
}
});
return {
iframe: handle.iframe,
destroy: handle.destroy,
resize: handle.resize,
// Send a signal value to the DreamStack app
send: function (name, value) {
handle.iframe.contentWindow.postMessage(
{ type: 'ds:signal', name: name, value: value }, '*'
);
},
// Listen for signal changes from the DreamStack app
on: function (name, fn) {
if (!listeners[name]) listeners[name] = [];
listeners[name].push(fn);
return function () {
listeners[name] = listeners[name].filter(function (f) { return f !== fn; });
};
}
};
}
// ── Web Component: <ds-stream> ──
if (typeof customElements !== 'undefined') {
customElements.define('ds-stream', class extends HTMLElement {
constructor() {
super();
this._handle = null;
}
connectedCallback() {
var src = this.getAttribute('src');
if (!src) return;
var shadow = this.attachShadow({ mode: 'open' });
var wrapper = document.createElement('div');
wrapper.style.width = '100%';
wrapper.style.height = this.getAttribute('height') || '400px';
shadow.appendChild(wrapper);
this._handle = embed(src, wrapper, {
width: '100%',
height: '100%',
borderRadius: this.getAttribute('radius') || '12px'
});
}
disconnectedCallback() {
if (this._handle) this._handle.destroy();
}
static get observedAttributes() { return ['src', 'height']; }
attributeChangedCallback(name, old, val) {
if (name === 'src' && this._handle) {
this._handle.iframe.src = val;
} else if (name === 'height' && this._handle) {
this._handle.resize('100%', val);
}
}
});
}
return {
embed: embed,
connect: connect,
version: '0.1.0'
};
});