From c6ce0b24d54b7fa7c4ca5be7b6e8b006ffa2bc16 Mon Sep 17 00:00:00 2001 From: enzotar Date: Wed, 11 Mar 2026 18:11:09 -0700 Subject: [PATCH] feat: Introduce integrity checks, backups, bookmarks, trash, quick capture, audit logs, and backend enhancements for notes, git, crypto, and SRS. --- CHANGELOG.md | 38 + package-lock.json | 83 +- package.json | 7 +- src-tauri/Cargo.lock | 3 +- src-tauri/Cargo.toml | 3 +- src-tauri/src/crypto.rs | 79 ++ src-tauri/src/export.rs | 123 ++ src-tauri/src/git.rs | 80 ++ src-tauri/src/lib.rs | 2017 +++------------------------- src-tauri/src/notes.rs | 1490 ++++++++++++++++++++ src-tauri/src/srs.rs | 117 ++ src-tauri/src/state.rs | 758 +++++++++++ src-tauri/tauri.conf.json | 2 +- src/App.tsx | 4 + src/components/AuditLog.tsx | 85 ++ src/components/BookmarksPanel.tsx | 79 ++ src/components/CommandPalette.tsx | 8 + src/components/GraphAnalytics.tsx | 183 ++- src/components/GraphView.tsx | 447 +++--- src/components/ImportExport.tsx | 34 +- src/components/IntegrityReport.tsx | 232 ++++ src/components/NoteGraphNode.tsx | 43 + src/components/QuickCapture.tsx | 74 + src/components/Sidebar.tsx | 14 + src/components/StatusBar.tsx | 48 +- src/components/TrashPanel.tsx | 100 ++ src/components/WhiteboardView.tsx | 58 +- src/index.css | 1084 +++++++++++++++ src/lib/clustering.ts | 145 ++ src/lib/commands.ts | 172 +++ vite.config.ts | 6 + 31 files changed, 5368 insertions(+), 2248 deletions(-) create mode 100644 src-tauri/src/crypto.rs create mode 100644 src-tauri/src/export.rs create mode 100644 src-tauri/src/git.rs create mode 100644 src-tauri/src/notes.rs create mode 100644 src-tauri/src/srs.rs create mode 100644 src-tauri/src/state.rs create mode 100644 src/components/AuditLog.tsx create mode 100644 src/components/BookmarksPanel.tsx create mode 100644 src/components/IntegrityReport.tsx create mode 100644 src/components/NoteGraphNode.tsx create mode 100644 src/components/QuickCapture.tsx create mode 100644 src/components/TrashPanel.tsx create mode 100644 src/lib/clustering.ts diff --git a/CHANGELOG.md b/CHANGELOG.md index 649f039..6913756 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,44 @@ All notable changes to Graph Notes will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/), and this project adheres to [Semantic Versioning](https://semver.org/). +## [1.5.0] — 2026-03-11 + +### Added +- **Graph View Upgrade** — Complete rewrite using `@blinksgg/canvas` v3.0 with virtualized rendering +- **Note Graph Nodes** — Custom node type showing title, tag pills, link count badge, and cluster color +- **Cluster Group Nodes** — Collapsible `GroupNode` containers grouping related notes by community detection +- **Minimap** — Canvas overview with draggable viewport for large vault navigation +- **Layout Switcher** — Force-directed, tree, and grid layouts with animated transitions +- **Graph Search & Spotlight** — Type-to-search with non-matching nodes dimmed and camera fit-to-bounds +- **Edge Labels** — Wikilink context displayed on graph edges +- **MiniGraph Upgrade** — Sidebar preview upgraded to canvas v3.0 + +### Changed +- `@blinksgg/canvas` updated to v3.0 from `gg-antifragile` repository +- `WhiteboardView` migrated to new `CanvasProvider` API +- `GraphView` reduced from 336 to ~120 lines + +## [1.4.0] — 2026-03-11 + +### Added +- **Content Checksums (SHA-256)** — Per-note hashing with on-demand vault-wide verification against stored checksums +- **Vault Integrity Scanner** — Deep scan for truncated files, leftover `~tmp` files, orphaned `.graph-notes/` entries, and non-UTF-8 encoding issues +- **Automatic Backup Snapshots** — Vault-level `.zip` snapshots in `.graph-notes/backups/` with auto-pruning of old snapshots +- **Write-Ahead Log (WAL)** — Crash recovery via operation journal in `.graph-notes/wal.log` with startup replay +- **Conflict Detection** — mtime-based external modification check before writes; conflict banner with overwrite/discard options +- **Frontmatter Schema Validation** — Inline warnings for unclosed `---` delimiters, duplicate keys, and invalid date formats +- **Orphan Attachment Cleanup** — Scan `_attachments/` for files not referenced by any note, with bulk delete +- **File Operation Audit Log** — Append-only log of all create/update/delete/rename operations with timestamps + +### Changed +- Sidebar: added 🛡️ Integrity Report action +- Command Palette: added Verify Vault, Create Backup, Audit Log commands +- StatusBar: integrity badge showing checksum status +- Editor: conflict banner + frontmatter validation warnings + +### Dependencies +- Added `sha2` (Rust) for content hashing + ## [1.0.0] — 2026-03-09 ### 🎉 First Stable Release diff --git a/package-lock.json b/package-lock.json index 01a31bf..2538838 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,14 +1,15 @@ { "name": "graph-notes", - "version": "1.0.0", + "version": "1.5.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "graph-notes", - "version": "1.0.0", + "version": "1.5.0", "dependencies": { - "@blinksgg/canvas": "file:../space-operator/gg/packages/canvas", + "@blinksgg/canvas": "file:../blinksgg/gg-antifragile/packages/canvas", + "@tanstack/react-query": "^5.90.21", "@tauri-apps/api": "^2", "@tauri-apps/plugin-dialog": "^2", "@tauri-apps/plugin-fs": "^2", @@ -16,8 +17,10 @@ "d3-force": "^3.0.0", "dompurify": "^3.3.2", "graphology": "^0.26.0", + "graphology-types": "^0.24.8", "highlight.js": "^11.11.1", "jotai": "^2.18.0", + "jotai-family": "^1.0.1", "marked": "^15.0.0", "mermaid": "^11.12.3", "react": "^19.1.0", @@ -38,14 +41,14 @@ }, "../blinksgg/gg-antifragile/packages/canvas": { "name": "@blinksgg/canvas", - "version": "0.13.0", - "extraneous": true, + "version": "3.0.0", "dependencies": { "@supabase/supabase-js": "^2.49.5", "@use-gesture/react": "^10.3.1", "debug": "^4.4.3", "graphology": "^0.26.0", - "graphology-types": "^0.24.8" + "graphology-types": "^0.24.8", + "jotai-family": "^1.0.1" }, "devDependencies": { "@babel/core": "^7.29.0", @@ -63,9 +66,11 @@ "@types/node": "^24.5.2", "@types/react": "^19.1.13", "@types/react-dom": "^19.1.9", + "@vitejs/plugin-react": "^4.5.2", "babel-plugin-react-compiler": "^1.0.0", "d3-force": "^3.0.0", "esbuild-plugin-babel": "^0.2.3", + "eslint-plugin-react-compiler": "19.1.0-rc.2", "jotai": "^2.6.0", "jsdom": "^26.1.0", "react": "^19.1.1", @@ -81,14 +86,31 @@ "@tanstack/react-query": "^5.17.0", "d3-force": "^3.0.0", "jotai": "^2.6.0", - "jotai-tanstack-query": "*", - "react": "^19.0.0", - "react-dom": "^19.0.0" + "react": "^19.2.0", + "react-dom": "^19.2.0" + }, + "peerDependenciesMeta": { + "@blocknote/core": { + "optional": true + }, + "@blocknote/react": { + "optional": true + }, + "@blocknote/shadcn": { + "optional": true + }, + "@tanstack/react-query": { + "optional": true + }, + "d3-force": { + "optional": true + } } }, "../space-operator/gg/packages/canvas": { "name": "@blinksgg/canvas", "version": "0.35.0", + "extraneous": true, "dependencies": { "@supabase/supabase-js": "^2.49.5", "@use-gesture/react": "^10.3.1", @@ -432,7 +454,7 @@ } }, "node_modules/@blinksgg/canvas": { - "resolved": "../space-operator/gg/packages/canvas", + "resolved": "../blinksgg/gg-antifragile/packages/canvas", "link": true }, "node_modules/@braintree/sanitize-url": { @@ -1627,6 +1649,32 @@ "vite": "^5.2.0 || ^6 || ^7" } }, + "node_modules/@tanstack/query-core": { + "version": "5.90.20", + "resolved": "https://registry.npmjs.org/@tanstack/query-core/-/query-core-5.90.20.tgz", + "integrity": "sha512-OMD2HLpNouXEfZJWcKeVKUgQ5n+n3A2JFmBaScpNDUqSrQSjiveC7dKMe53uJUg1nDG16ttFPz2xfilz6i2uVg==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/tannerlinsley" + } + }, + "node_modules/@tanstack/react-query": { + "version": "5.90.21", + "resolved": "https://registry.npmjs.org/@tanstack/react-query/-/react-query-5.90.21.tgz", + "integrity": "sha512-0Lu6y5t+tvlTJMTO7oh5NSpJfpg/5D41LlThfepTixPYkJ0sE2Jj0m0f6yYqujBwIXlId87e234+MxG3D3g7kg==", + "license": "MIT", + "dependencies": { + "@tanstack/query-core": "5.90.20" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/tannerlinsley" + }, + "peerDependencies": { + "react": "^18 || ^19" + } + }, "node_modules/@tauri-apps/api": { "version": "2.10.1", "resolved": "https://registry.npmjs.org/@tauri-apps/api/-/api-2.10.1.tgz", @@ -3109,8 +3157,7 @@ "version": "0.24.8", "resolved": "https://registry.npmjs.org/graphology-types/-/graphology-types-0.24.8.tgz", "integrity": "sha512-hDRKYXa8TsoZHjgEaysSRyPdT6uB78Ci8WnjgbStlQysz7xR52PInxNsmnB7IBOM1BhikxkNyCVEFgmPKnpx3Q==", - "license": "MIT", - "peer": true + "license": "MIT" }, "node_modules/hachure-fill": { "version": "0.5.2", @@ -3187,6 +3234,18 @@ } } }, + "node_modules/jotai-family": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/jotai-family/-/jotai-family-1.0.1.tgz", + "integrity": "sha512-Zb/79GNDhC/z82R+6qTTpeKW4l4H6ZCApfF5W8G4SH37E4mhbysU7r8DkP0KX94hWvjB/6lt/97nSr3wB+64Zg==", + "license": "MIT", + "engines": { + "node": ">=12.20.0" + }, + "peerDependencies": { + "jotai": ">=2.9.0" + } + }, "node_modules/js-tokens": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", diff --git a/package.json b/package.json index 88b81ca..8c0b22c 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { "name": "graph-notes", "private": true, - "version": "1.0.0", + "version": "1.5.0", "type": "module", "scripts": { "dev": "vite", @@ -10,7 +10,8 @@ "tauri": "tauri" }, "dependencies": { - "@blinksgg/canvas": "file:../space-operator/gg/packages/canvas", + "@blinksgg/canvas": "file:../blinksgg/gg-antifragile/packages/canvas", + "@tanstack/react-query": "^5.90.21", "@tauri-apps/api": "^2", "@tauri-apps/plugin-dialog": "^2", "@tauri-apps/plugin-fs": "^2", @@ -18,8 +19,10 @@ "d3-force": "^3.0.0", "dompurify": "^3.3.2", "graphology": "^0.26.0", + "graphology-types": "^0.24.8", "highlight.js": "^11.11.1", "jotai": "^2.18.0", + "jotai-family": "^1.0.1", "marked": "^15.0.0", "mermaid": "^11.12.3", "react": "^19.1.0", diff --git a/src-tauri/Cargo.lock b/src-tauri/Cargo.lock index 619512a..31576d0 100644 --- a/src-tauri/Cargo.lock +++ b/src-tauri/Cargo.lock @@ -1473,7 +1473,7 @@ dependencies = [ [[package]] name = "graph-notes" -version = "1.0.0" +version = "1.5.0" dependencies = [ "aes-gcm", "argon2", @@ -1483,6 +1483,7 @@ dependencies = [ "regex", "serde", "serde_json", + "sha2", "tauri", "tauri-build", "tauri-plugin-dialog", diff --git a/src-tauri/Cargo.toml b/src-tauri/Cargo.toml index 5560b99..9c221af 100644 --- a/src-tauri/Cargo.toml +++ b/src-tauri/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "graph-notes" -version = "1.0.0" +version = "1.5.0" description = "A graph-based note-taking app" authors = ["you"] edition = "2021" @@ -27,3 +27,4 @@ argon2 = "0.5" rand = "0.8" base64 = "0.22" zip = "2" +sha2 = "0.10" diff --git a/src-tauri/src/crypto.rs b/src-tauri/src/crypto.rs new file mode 100644 index 0000000..e05eac4 --- /dev/null +++ b/src-tauri/src/crypto.rs @@ -0,0 +1,79 @@ +use std::fs; +use std::path::Path; + +use aes_gcm::{Aes256Gcm, Key, Nonce}; +use aes_gcm::aead::{Aead, KeyInit}; +use argon2::Argon2; +use base64::{Engine as _, engine::general_purpose::STANDARD as B64}; + +use crate::{atomic_write, safe_vault_path}; + +fn derive_key(password: &str, salt: &[u8]) -> [u8; 32] { + let mut key = [0u8; 32]; + Argon2::default() + .hash_password_into(password.as_bytes(), salt, &mut key) + .expect("key derivation failed"); + key +} + +#[tauri::command] +pub fn encrypt_note(vault_path: String, note_path: String, password: String) -> Result<(), String> { + let full = safe_vault_path(&vault_path, ¬e_path)?; + let content = fs::read_to_string(&full).map_err(|e| e.to_string())?; + + let salt: [u8; 16] = rand::random(); + let nonce_bytes: [u8; 12] = rand::random(); + + let key_bytes = derive_key(&password, &salt); + let key = Key::::from_slice(&key_bytes); + let cipher = Aes256Gcm::new(key); + let nonce = Nonce::from_slice(&nonce_bytes); + + let ciphertext = cipher.encrypt(nonce, content.as_bytes()) + .map_err(|_| "Encryption failed".to_string())?; + + // Format: GRAPHNOTES_ENC:v1:{salt_b64}:{nonce_b64}:{ciphertext_b64} + let encoded = format!( + "GRAPHNOTES_ENC:v1:{}:{}:{}", + B64.encode(salt), + B64.encode(nonce_bytes), + B64.encode(&ciphertext), + ); + atomic_write(&full, &encoded) +} + +#[tauri::command] +pub fn decrypt_note(vault_path: String, note_path: String, password: String) -> Result { + let full = safe_vault_path(&vault_path, ¬e_path)?; + let content = fs::read_to_string(&full).map_err(|e| e.to_string())?; + + if !content.starts_with("GRAPHNOTES_ENC:v1:") { + return Err("Note is not encrypted".to_string()); + } + + let parts: Vec<&str> = content.splitn(5, ':').collect(); + if parts.len() != 5 { + return Err("Invalid encrypted format".to_string()); + } + + let salt = B64.decode(parts[2]).map_err(|_| "Invalid salt".to_string())?; + let nonce_bytes = B64.decode(parts[3]).map_err(|_| "Invalid nonce".to_string())?; + let ciphertext = B64.decode(parts[4]).map_err(|_| "Invalid ciphertext".to_string())?; + + let key_bytes = derive_key(&password, &salt); + let key = Key::::from_slice(&key_bytes); + let cipher = Aes256Gcm::new(key); + let nonce = Nonce::from_slice(&nonce_bytes); + + let plaintext = cipher.decrypt(nonce, ciphertext.as_ref()) + .map_err(|_| "Wrong password".to_string())?; + + String::from_utf8(plaintext).map_err(|_| "Invalid UTF-8".to_string()) +} + +#[tauri::command] +pub fn is_encrypted(vault_path: String, note_path: String) -> Result { + let full = safe_vault_path(&vault_path, ¬e_path)?; + let content = fs::read_to_string(&full).map_err(|e| e.to_string())?; + Ok(content.starts_with("GRAPHNOTES_ENC:v1:")) +} diff --git a/src-tauri/src/export.rs b/src-tauri/src/export.rs new file mode 100644 index 0000000..1b0e525 --- /dev/null +++ b/src-tauri/src/export.rs @@ -0,0 +1,123 @@ +use std::fs; +use std::path::Path; + +use serde::{Deserialize, Serialize}; +use walkdir::WalkDir; + +use crate::{safe_vault_path, EXPORT_WIKILINK_RE}; + +#[tauri::command] +pub fn export_note_html(vault_path: String, note_path: String) -> Result { + let full = safe_vault_path(&vault_path, ¬e_path)?; + let content = fs::read_to_string(&full).map_err(|e| e.to_string())?; + let title = Path::new(¬e_path) + .file_stem() + .unwrap_or_default() + .to_string_lossy() + .to_string(); + + // Basic markdown-to-HTML (headings, bold, italic, links, paragraphs) + let mut html_body = String::new(); + for line in content.lines() { + let trimmed = line.trim(); + if trimmed.starts_with("---") { + continue; // skip frontmatter delimiters + } + if trimmed.starts_with("# ") { + html_body.push_str(&format!("

{}

\n", &trimmed[2..])); + } else if trimmed.starts_with("## ") { + html_body.push_str(&format!("

{}

\n", &trimmed[3..])); + } else if trimmed.starts_with("### ") { + html_body.push_str(&format!("

{}

\n", &trimmed[4..])); + } else if trimmed.starts_with("- ") { + html_body.push_str(&format!("
  • {}
  • \n", &trimmed[2..])); + } else if trimmed.is_empty() { + html_body.push_str("
    \n"); + } else { + html_body.push_str(&format!("

    {}

    \n", trimmed)); + } + } + + // Replace wikilinks + let html_body = EXPORT_WIKILINK_RE.replace_all(&html_body, |caps: ®ex::Captures| { + let target = caps.get(1).map_or("", |m| m.as_str()).trim(); + let label = caps.get(2).map_or(target, |m| m.as_str()).trim(); + format!("{}", target, label) + }).to_string(); + + let html = format!(r#" + + + + +{title} + + + +{html_body} + +"#); + Ok(html) +} + +#[tauri::command] +pub fn export_vault_zip(vault_path: String, output_path: String) -> Result { + use std::io::Write; + let vault = Path::new(&vault_path); + let out = Path::new(&output_path); + + let file = fs::File::create(out).map_err(|e| e.to_string())?; + let mut zip = zip::ZipWriter::new(file); + let options = zip::write::SimpleFileOptions::default() + .compression_method(zip::CompressionMethod::Deflated); + + for entry in WalkDir::new(vault) + .into_iter() + .filter_map(|e| e.ok()) + .filter(|e| e.path().is_file()) + { + let path = entry.path(); + let rel = path.strip_prefix(vault).unwrap_or(path).to_string_lossy().to_string(); + // Skip hidden files, in-progress atomic writes (~tmp suffix) + if rel.starts_with(".") || rel.ends_with("~tmp") { continue; } + + let content = fs::read(path).map_err(|e| e.to_string())?; + zip.start_file(&rel, options).map_err(|e| e.to_string())?; + zip.write_all(&content).map_err(|e| e.to_string())?; + } + + zip.finish().map_err(|e| e.to_string())?; + Ok(format!("Exported to {}", output_path)) +} + +#[tauri::command] +pub fn import_folder(vault_path: String, source_path: String) -> Result { + let vault = Path::new(&vault_path); + let source = Path::new(&source_path); + let mut count: u32 = 0; + + for entry in WalkDir::new(source) + .into_iter() + .filter_map(|e| e.ok()) + .filter(|e| e.path().extension().map_or(false, |ext| ext == "md")) + { + let path = entry.path(); + let rel = path.strip_prefix(source).unwrap_or(path); + let dest = vault.join(rel); + + if let Some(parent) = dest.parent() { + fs::create_dir_all(parent).map_err(|e| e.to_string())?; + } + fs::copy(path, &dest).map_err(|e| e.to_string())?; + count += 1; + } + + Ok(count) +} diff --git a/src-tauri/src/git.rs b/src-tauri/src/git.rs new file mode 100644 index 0000000..45a7ac5 --- /dev/null +++ b/src-tauri/src/git.rs @@ -0,0 +1,80 @@ +use std::process::Command; + +#[tauri::command] +pub fn git_status(vault_path: String) -> Result { + let output = Command::new("git") + .args(["status", "--porcelain"]) + .current_dir(&vault_path) + .output() + .map_err(|e| e.to_string())?; + if output.status.success() { + Ok(String::from_utf8_lossy(&output.stdout).to_string()) + } else { + Err(String::from_utf8_lossy(&output.stderr).to_string()) + } +} + +#[tauri::command] +pub fn git_commit(vault_path: String, message: String) -> Result { + let add_output = Command::new("git") + .args(["add", "."]) + .current_dir(&vault_path) + .output() + .map_err(|e| e.to_string())?; + if !add_output.status.success() { + return Err(String::from_utf8_lossy(&add_output.stderr).to_string()); + } + + let output = Command::new("git") + .args(["commit", "-m", &message]) + .current_dir(&vault_path) + .output() + .map_err(|e| e.to_string())?; + if output.status.success() { + Ok(String::from_utf8_lossy(&output.stdout).to_string()) + } else { + Err(String::from_utf8_lossy(&output.stderr).to_string()) + } +} + +#[tauri::command] +pub fn git_pull(vault_path: String) -> Result { + let output = Command::new("git") + .args(["pull"]) + .current_dir(&vault_path) + .output() + .map_err(|e| e.to_string())?; + if output.status.success() { + Ok(String::from_utf8_lossy(&output.stdout).to_string()) + } else { + Err(String::from_utf8_lossy(&output.stderr).to_string()) + } +} + +#[tauri::command] +pub fn git_push(vault_path: String) -> Result { + let output = Command::new("git") + .args(["push"]) + .current_dir(&vault_path) + .output() + .map_err(|e| e.to_string())?; + if output.status.success() { + Ok(String::from_utf8_lossy(&output.stdout).to_string()) + } else { + Err(String::from_utf8_lossy(&output.stderr).to_string()) + } +} + +#[tauri::command] +pub fn git_init(vault_path: String) -> Result { + let output = Command::new("git") + .args(["init"]) + .current_dir(&vault_path) + .output() + .map_err(|e| e.to_string())?; + if output.status.success() { + Ok(String::from_utf8_lossy(&output.stdout).to_string()) + } else { + Err(String::from_utf8_lossy(&output.stderr).to_string()) + } +} diff --git a/src-tauri/src/lib.rs b/src-tauri/src/lib.rs index 65a133d..e914437 100644 --- a/src-tauri/src/lib.rs +++ b/src-tauri/src/lib.rs @@ -1,27 +1,33 @@ +mod notes; +mod git; +mod crypto; +mod srs; +mod export; +mod state; + use serde::{Deserialize, Serialize}; use std::fs; use std::io::Write; use std::path::{Path, PathBuf}; use std::sync::LazyLock; -use walkdir::WalkDir; use regex::Regex; use chrono::Local; /* ── Atomic File Write ───────────────────────────────────── - * 1. Write to a `.tmp` sibling in the same directory + * 1. Write to a `~tmp` sibling in the same directory * 2. fsync the file descriptor (data hits disk) - * 3. Atomic rename `.tmp` → target (POSIX guarantees) + * 3. Atomic rename `~tmp` → target (POSIX guarantees) * 4. fsync the parent directory (metadata durability) * * If any step fails the original file is untouched and - * the `.tmp` file is cleaned up. + * the `~tmp` file is cleaned up. * ────────────────────────────────────────────────────────── */ -fn atomic_write(path: &Path, content: &str) -> Result<(), String> { +pub(crate) fn atomic_write(path: &Path, content: &str) -> Result<(), String> { atomic_write_bytes(path, content.as_bytes()) } -fn atomic_write_bytes(path: &Path, data: &[u8]) -> Result<(), String> { +pub(crate) fn atomic_write_bytes(path: &Path, data: &[u8]) -> Result<(), String> { let parent = path.parent() .ok_or_else(|| "Cannot determine parent directory".to_string())?; fs::create_dir_all(parent) @@ -65,9 +71,11 @@ fn atomic_write_bytes(path: &Path, data: &[u8]) -> Result<(), String> { Ok(()) } +/* ── Path Safety ───────────────────────────────────────── */ + /// Sanitize a name used to construct filenames (workspace names, canvas names, etc.) /// Rejects any path component separators or traversal sequences. -fn safe_name(name: &str) -> Result { +pub(crate) fn safe_name(name: &str) -> Result { let trimmed = name.trim(); if trimmed.is_empty() { return Err("Name cannot be empty".to_string()); @@ -78,137 +86,9 @@ fn safe_name(name: &str) -> Result { Ok(trimmed.to_string()) } -/* ── Static Regexes ──────────────────────────────────────── - * Compiled once, reused across calls. - * ────────────────────────────────────────────────────────── */ - -static PREVIEW_WIKILINK_RE: LazyLock = LazyLock::new(|| { - Regex::new(r"\[\[([^\]|]+)(?:\|[^\]]+)?\]\]").unwrap() -}); - -static PREVIEW_FMT_RE: LazyLock = LazyLock::new(|| { - Regex::new(r"[*_~`]").unwrap() -}); - -static DAILY_NOTE_RE: LazyLock = LazyLock::new(|| { - Regex::new(r"^\d{4}-\d{2}-\d{2}\.md$").unwrap() -}); - -static TASK_RE: LazyLock = LazyLock::new(|| { - Regex::new(r"^(\s*)- \[([ x/])\] (.+)$").unwrap() -}); - -static TASK_MARKER_RE: LazyLock = LazyLock::new(|| { - Regex::new(r"- \[[ x/]\]").unwrap() -}); - -static EXPORT_WIKILINK_RE: LazyLock = LazyLock::new(|| { - Regex::new(r"\[\[([^\]|]+)(?:\|([^\]]+))?\]\]").unwrap() -}); - -static FLASHCARD_RE: LazyLock = LazyLock::new(|| { - Regex::new(r"\?\?\s*(.+?)\s*::\s*(.+?)\s*\?\?").unwrap() -}); - -static FRONTMATTER_RE: LazyLock = LazyLock::new(|| { - Regex::new(r"(?s)^---\n(.+?)\n---").unwrap() -}); - -#[derive(Debug, Serialize, Deserialize, Clone)] -pub struct NoteEntry { - pub path: String, - pub name: String, - pub is_dir: bool, - pub children: Option>, -} - -#[derive(Debug, Serialize, Deserialize)] -pub struct GraphData { - pub nodes: Vec, - pub edges: Vec, -} - -#[derive(Debug, Serialize, Deserialize)] -pub struct GraphNode { - pub id: String, - pub label: String, - pub path: String, - pub link_count: usize, -} - -#[derive(Debug, Serialize, Deserialize)] -pub struct GraphEdge { - pub source: String, - pub target: String, -} - -fn normalize_note_name(name: &str) -> String { - name.trim().to_lowercase() -} - -static WIKILINK_RE: LazyLock = LazyLock::new(|| { - Regex::new(r"\[\[([^\]|]+)(?:\|[^\]]+)?\]\]").unwrap() -}); - -fn extract_wikilinks(content: &str) -> Vec { - WIKILINK_RE.captures_iter(content) - .map(|cap| cap[1].trim().to_string()) - .collect() -} - -#[tauri::command] -fn list_notes(vault_path: String) -> Result, String> { - let vault = Path::new(&vault_path); - if !vault.exists() { - return Err("Vault path does not exist".to_string()); - } - - fn build_tree(dir: &Path, base: &Path) -> Vec { - let mut entries: Vec = Vec::new(); - - if let Ok(read_dir) = fs::read_dir(dir) { - let mut items: Vec<_> = read_dir.filter_map(|e| e.ok()).collect(); - items.sort_by_key(|e| e.file_name()); - - for entry in items { - let path = entry.path(); - let file_name = entry.file_name().to_string_lossy().to_string(); - - // Skip hidden files/dirs - if file_name.starts_with('.') { - continue; - } - - if path.is_dir() { - let children = build_tree(&path, base); - let rel = path.strip_prefix(base).unwrap_or(&path); - entries.push(NoteEntry { - path: rel.to_string_lossy().to_string(), - name: file_name, - is_dir: true, - children: Some(children), - }); - } else if path.extension().map_or(false, |ext| ext == "md") { - let rel = path.strip_prefix(base).unwrap_or(&path); - entries.push(NoteEntry { - path: rel.to_string_lossy().to_string(), - name: file_name.trim_end_matches(".md").to_string(), - is_dir: false, - children: None, - }); - } - } - } - - entries - } - - Ok(build_tree(vault, vault)) -} - /// Validate that a relative path stays within the vault root. /// Returns the canonical full path, or an error if it escapes. -fn safe_vault_path(vault_path: &str, relative_path: &str) -> Result { +pub(crate) fn safe_vault_path(vault_path: &str, relative_path: &str) -> Result { let vault = Path::new(vault_path) .canonicalize() .map_err(|e| format!("Invalid vault path: {}", e))?; @@ -230,103 +110,70 @@ fn safe_vault_path(vault_path: &str, relative_path: &str) -> Result Result { - let full_path = safe_vault_path(&vault_path, &relative_path)?; - fs::read_to_string(&full_path).map_err(|e| format!("Failed to read note: {}", e)) +/* ── Static Regexes ──────────────────────────────────────── + * Compiled once, reused across calls. + * ────────────────────────────────────────────────────────── */ + +pub(crate) static WIKILINK_RE: LazyLock = LazyLock::new(|| { + Regex::new(r"\[\[([^\]|]+)(?:\|[^\]]+)?\]\]").unwrap() +}); + +pub(crate) static PREVIEW_WIKILINK_RE: LazyLock = LazyLock::new(|| { + Regex::new(r"\[\[([^\]|]+)(?:\|[^\]]+)?\]\]").unwrap() +}); + +pub(crate) static PREVIEW_FMT_RE: LazyLock = LazyLock::new(|| { + Regex::new(r"[*_~`]").unwrap() +}); + +pub(crate) static DAILY_NOTE_RE: LazyLock = LazyLock::new(|| { + Regex::new(r"^\d{4}-\d{2}-\d{2}\.md$").unwrap() +}); + +pub(crate) static TASK_RE: LazyLock = LazyLock::new(|| { + Regex::new(r"^(\s*)- \[([ x/])\] (.+)$").unwrap() +}); + +pub(crate) static TASK_MARKER_RE: LazyLock = LazyLock::new(|| { + Regex::new(r"- \[[ x/]\]").unwrap() +}); + +pub(crate) static EXPORT_WIKILINK_RE: LazyLock = LazyLock::new(|| { + Regex::new(r"\[\[([^\]|]+)(?:\|([^\]]+))?\]\]").unwrap() +}); + +pub(crate) static FLASHCARD_RE: LazyLock = LazyLock::new(|| { + Regex::new(r"\?\?\s*(.+?)\s*::\s*(.+?)\s*\?\?").unwrap() +}); + +pub(crate) static FRONTMATTER_RE: LazyLock = LazyLock::new(|| { + Regex::new(r"(?s)^---\n(.+?)\n---").unwrap() +}); + +pub(crate) static TAG_RE: LazyLock = LazyLock::new(|| { + Regex::new(r"(?:^|[\s,;(])(#[a-zA-Z][a-zA-Z0-9_/-]*)").unwrap() +}); + +/* ── Shared Types ──────────────────────────────────────── */ + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct NoteEntry { + pub path: String, + pub name: String, + pub is_dir: bool, + pub children: Option>, } -#[tauri::command] -fn write_note(vault_path: String, relative_path: String, content: String) -> Result<(), String> { - let full_path = safe_vault_path(&vault_path, &relative_path)?; - atomic_write(&full_path, &content) +/* ── Config ─────────────────────────────────────────────── */ + +pub(crate) fn dirs_config_path() -> PathBuf { + let home = std::env::var("HOME").unwrap_or_else(|_| ".".to_string()); + Path::new(&home).join(".config").join("graph-notes").join("vault_path") } -#[tauri::command] -fn delete_note(vault_path: String, relative_path: String) -> Result<(), String> { - let full_path = safe_vault_path(&vault_path, &relative_path)?; - if full_path.is_file() { - fs::remove_file(&full_path).map_err(|e| format!("Failed to delete note: {}", e)) - } else { - Err("Note not found".to_string()) - } -} - -#[tauri::command] -fn build_graph(vault_path: String) -> Result { - let vault = Path::new(&vault_path); - if !vault.exists() { - return Err("Vault path does not exist".to_string()); - } - - let mut nodes: Vec = Vec::new(); - let mut edges: Vec = Vec::new(); - - // Collect all notes - let mut note_map: std::collections::HashMap = std::collections::HashMap::new(); - - for entry in WalkDir::new(vault) - .into_iter() - .filter_map(|e| e.ok()) - .filter(|e| e.path().extension().map_or(false, |ext| ext == "md")) - { - let rel_path = entry.path().strip_prefix(vault).unwrap_or(entry.path()); - let rel_str = rel_path.to_string_lossy().to_string(); - let name = rel_path - .file_stem() - .unwrap_or_default() - .to_string_lossy() - .to_string(); - - note_map.insert(normalize_note_name(&name), rel_str.clone()); - - nodes.push(GraphNode { - id: rel_str.clone(), - label: name, - path: rel_str, - link_count: 0, - }); - } - - // Parse links and build edges - for node in &mut nodes { - let full_path = vault.join(&node.path); - if let Ok(content) = fs::read_to_string(&full_path) { - let links = extract_wikilinks(&content); - node.link_count = links.len(); - - for link in &links { - let normalized = normalize_note_name(link); - if let Some(target_path) = note_map.get(&normalized) { - edges.push(GraphEdge { - source: node.id.clone(), - target: target_path.clone(), - }); - } - } - } - } - - Ok(GraphData { nodes, edges }) -} - -#[tauri::command] -fn get_or_create_daily(vault_path: String) -> Result { - let today = Local::now().format("%Y-%m-%d").to_string(); - let daily_dir = Path::new(&vault_path).join("daily"); - let daily_path = daily_dir.join(format!("{}.md", today)); - let relative_path = format!("daily/{}.md", today); - - if !daily_dir.exists() { - fs::create_dir_all(&daily_dir).map_err(|e| format!("Failed to create daily dir: {}", e))?; - } - - if !daily_path.exists() { - let content = format!("# {}\n\n", today); - atomic_write(&daily_path, &content)?; - } - - Ok(relative_path) +pub(crate) fn dirs_config_dir() -> PathBuf { + let home = std::env::var("HOME").unwrap_or_else(|_| ".".to_string()); + Path::new(&home).join(".config").join("graph-notes") } #[tauri::command] @@ -351,16 +198,6 @@ fn set_vault_path(path: String) -> Result<(), String> { atomic_write(&config_path, &path) } -fn dirs_config_path() -> PathBuf { - let home = std::env::var("HOME").unwrap_or_else(|_| ".".to_string()); - Path::new(&home).join(".config").join("graph-notes").join("vault_path") -} - -fn dirs_config_dir() -> PathBuf { - let home = std::env::var("HOME").unwrap_or_else(|_| ".".to_string()); - Path::new(&home).join(".config").join("graph-notes") -} - #[tauri::command] fn ensure_vault(vault_path: String) -> Result<(), String> { let vault = Path::new(&vault_path); @@ -370,204 +207,6 @@ fn ensure_vault(vault_path: String) -> Result<(), String> { Ok(()) } -/* ── Full-Text Search ──────────────────────────────────────── */ - -#[derive(Debug, Serialize, Deserialize)] -pub struct SearchResult { - pub path: String, - pub name: String, - pub line_number: usize, - pub context: String, - pub score: usize, -} - -#[tauri::command] -fn search_vault(vault_path: String, query: String) -> Result, String> { - let vault = Path::new(&vault_path); - if !vault.exists() { - return Err("Vault path does not exist".to_string()); - } - - let query_lower = query.to_lowercase(); - let mut results: Vec = Vec::new(); - - for entry in WalkDir::new(vault) - .into_iter() - .filter_map(|e| e.ok()) - .filter(|e| e.path().extension().map_or(false, |ext| ext == "md")) - { - let rel_path = entry.path().strip_prefix(vault).unwrap_or(entry.path()); - let rel_str = rel_path.to_string_lossy().to_string(); - let name = rel_path - .file_stem() - .unwrap_or_default() - .to_string_lossy() - .to_string(); - - if let Ok(content) = fs::read_to_string(entry.path()) { - let content_lower = content.to_lowercase(); - let match_count = content_lower.matches(&query_lower).count(); - - if match_count > 0 { - // Find the first matching line for context - for (i, line) in content.lines().enumerate() { - if line.to_lowercase().contains(&query_lower) { - results.push(SearchResult { - path: rel_str.clone(), - name: name.clone(), - line_number: i + 1, - context: line.trim().chars().take(200).collect(), - score: match_count, - }); - break; - } - } - } - } - } - - // Sort by score (most matches first), then by name - results.sort_by(|a, b| b.score.cmp(&a.score).then_with(|| a.name.cmp(&b.name))); - Ok(results) -} - -/* ── Rename & Relink ───────────────────────────────────────── */ - -#[tauri::command] -fn rename_note(vault_path: String, old_path: String, new_path: String) -> Result<(), String> { - let old_full = safe_vault_path(&vault_path, &old_path)?; - let new_full = safe_vault_path(&vault_path, &new_path)?; - - if !old_full.exists() { - return Err("Source note does not exist".to_string()); - } - if new_full.exists() { - return Err("A note with that name already exists".to_string()); - } - - // Ensure destination directory exists - if let Some(parent) = new_full.parent() { - fs::create_dir_all(parent).map_err(|e| format!("Failed to create directory: {}", e))?; - } - - fs::rename(&old_full, &new_full).map_err(|e| format!("Failed to rename note: {}", e)) -} - -#[tauri::command] -fn update_wikilinks(vault_path: String, old_name: String, new_name: String) -> Result { - let vault = Path::new(&vault_path); - let mut updated_count = 0; - - // Compile regexes once before the loop - let pattern = format!(r"\[\[{}\]\]", regex::escape(&old_name)); - let pattern_with_alias = format!(r"\[\[{}\|", regex::escape(&old_name)); - let re1 = Regex::new(&pattern).map_err(|e| format!("Invalid regex: {}", e))?; - let re2 = Regex::new(&pattern_with_alias).map_err(|e| format!("Invalid regex: {}", e))?; - - for entry in WalkDir::new(vault) - .into_iter() - .filter_map(|e| e.ok()) - .filter(|e| e.path().extension().map_or(false, |ext| ext == "md")) - { - if let Ok(content) = fs::read_to_string(entry.path()) { - if re1.is_match(&content) || re2.is_match(&content) { - let new_content = re1.replace_all(&content, format!("[[{}]]", new_name)); - let new_content = re2.replace_all(&new_content, format!("[[{}|", new_name)); - atomic_write(entry.path(), &new_content)?; - updated_count += 1; - } - } - } - - Ok(updated_count) -} - -/* ── Tags ──────────────────────────────────────────────────── */ - -#[derive(Debug, Serialize, Deserialize)] -pub struct TagInfo { - pub tag: String, - pub count: usize, - pub notes: Vec, -} - -static TAG_RE: LazyLock = LazyLock::new(|| { - Regex::new(r"(?:^|[\s,;(])(#[a-zA-Z][a-zA-Z0-9_/-]*)").unwrap() -}); - -fn extract_tags(content: &str) -> Vec { - let mut tags: Vec = TAG_RE - .captures_iter(content) - .map(|cap| cap[1].to_string()) - .collect(); - tags.sort(); - tags.dedup(); - tags -} - -#[tauri::command] -fn list_tags(vault_path: String) -> Result, String> { - let vault = Path::new(&vault_path); - if !vault.exists() { - return Err("Vault path does not exist".to_string()); - } - - let mut tag_map: std::collections::HashMap> = std::collections::HashMap::new(); - - for entry in WalkDir::new(vault) - .into_iter() - .filter_map(|e| e.ok()) - .filter(|e| e.path().extension().map_or(false, |ext| ext == "md")) - { - let rel_path = entry.path().strip_prefix(vault).unwrap_or(entry.path()); - let rel_str = rel_path.to_string_lossy().to_string(); - - if let Ok(content) = fs::read_to_string(entry.path()) { - for tag in extract_tags(&content) { - tag_map.entry(tag).or_default().push(rel_str.clone()); - } - } - } - - let mut tags: Vec = tag_map - .into_iter() - .map(|(tag, notes)| TagInfo { - tag, - count: notes.len(), - notes, - }) - .collect(); - - tags.sort_by(|a, b| b.count.cmp(&a.count).then_with(|| a.tag.cmp(&b.tag))); - Ok(tags) -} - -/* ── Note Preview ────────────────────────────────────────── */ - -#[tauri::command] -fn read_note_preview(vault_path: String, note_path: String, max_chars: Option) -> Result { - let full = safe_vault_path(&vault_path, ¬e_path)?; - let content = fs::read_to_string(&full).map_err(|e| format!("Read failed: {}", e))?; - let limit = max_chars.unwrap_or(200); - - // Strip markdown formatting for clean preview - let cleaned: String = content - .lines() - .filter(|l| !l.trim().starts_with('#')) // remove headings - .collect::>() - .join(" "); - - let cleaned = PREVIEW_WIKILINK_RE.replace_all(&cleaned, "$1").to_string(); - let cleaned = PREVIEW_FMT_RE.replace_all(&cleaned, "").to_string(); - let cleaned = cleaned.trim().to_string(); - - if cleaned.len() > limit { - Ok(format!("{}...", &cleaned[..limit])) - } else { - Ok(cleaned) - } -} - /* ── Vault Management ────────────────────────────────────── */ #[tauri::command] @@ -600,1353 +239,7 @@ fn add_vault(vault_path: String) -> Result<(), String> { atomic_write(&config_path, &json) } -/* ── Templates ───────────────────────────────────────────── */ - -#[derive(Debug, Serialize, Deserialize)] -pub struct TemplateInfo { - pub name: String, - pub path: String, -} - -#[tauri::command] -fn list_templates(vault_path: String) -> Result, String> { - let templates_dir = Path::new(&vault_path).join("_templates"); - if !templates_dir.exists() { - return Ok(vec![]); - } - - let mut templates: Vec = Vec::new(); - for entry in fs::read_dir(&templates_dir).map_err(|e| e.to_string())? { - let entry = entry.map_err(|e| e.to_string())?; - let path = entry.path(); - if path.extension().map_or(false, |ext| ext == "md") { - let name = path.file_stem() - .unwrap_or_default() - .to_string_lossy() - .to_string(); - let rel = format!("_templates/{}", path.file_name().unwrap_or_default().to_string_lossy()); - templates.push(TemplateInfo { name, path: rel }); - } - } - templates.sort_by(|a, b| a.name.cmp(&b.name)); - Ok(templates) -} - -#[tauri::command] -fn create_from_template( - vault_path: String, - template_path: String, - note_name: String, -) -> Result { - let template_full = safe_vault_path(&vault_path, &template_path)?; - let template_content = fs::read_to_string(&template_full) - .map_err(|e| format!("Failed to read template: {}", e))?; - - let today = Local::now().format("%Y-%m-%d").to_string(); - let content = template_content - .replace("{{title}}", ¬e_name) - .replace("{{date}}", &today); - - let note_path = format!("{}.md", note_name); - let full_path = safe_vault_path(&vault_path, ¬e_path)?; - atomic_write(&full_path, &content)?; - Ok(note_path) -} - -/* ── Favorites ──────────────────────────────────────────── */ - -#[tauri::command] -fn get_favorites(vault_path: String) -> Result, String> { - let path = Path::new(&vault_path).join(".graph-notes").join("favorites.json"); - if !path.exists() { - return Ok(vec![]); - } - let content = fs::read_to_string(&path).map_err(|e| e.to_string())?; - let favs: Vec = serde_json::from_str(&content).unwrap_or_default(); - Ok(favs) -} - -#[tauri::command] -fn set_favorites(vault_path: String, favorites: Vec) -> Result<(), String> { - let dir = Path::new(&vault_path).join(".graph-notes"); - fs::create_dir_all(&dir).map_err(|e| e.to_string())?; - let json = serde_json::to_string_pretty(&favorites).map_err(|e| e.to_string())?; - atomic_write(&dir.join("favorites.json"), &json) -} - -/* ── Frontmatter ────────────────────────────────────────── */ - -#[tauri::command] -fn parse_frontmatter(vault_path: String, note_path: String) -> Result { - let full = safe_vault_path(&vault_path, ¬e_path)?; - let content = fs::read_to_string(&full).map_err(|e| e.to_string())?; - - if !content.starts_with("---\n") { - return Ok(serde_json::json!({})); - } - - let end = content[4..].find("\n---"); - match end { - Some(pos) => { - let yaml_str = &content[4..4 + pos]; - // Parse simple key: value pairs - let mut map = serde_json::Map::new(); - for line in yaml_str.lines() { - if let Some(colon_pos) = line.find(':') { - let key = line[..colon_pos].trim().to_string(); - let val = line[colon_pos + 1..].trim().to_string(); - map.insert(key, serde_json::Value::String(val)); - } - } - Ok(serde_json::Value::Object(map)) - } - None => Ok(serde_json::json!({})), - } -} - -#[tauri::command] -fn write_frontmatter( - vault_path: String, - note_path: String, - frontmatter: serde_json::Map, -) -> Result<(), String> { - let full = safe_vault_path(&vault_path, ¬e_path)?; - let content = fs::read_to_string(&full).map_err(|e| e.to_string())?; - - // Strip existing frontmatter - let body = if content.starts_with("---\n") { - if let Some(end) = content[4..].find("\n---") { - content[4 + end + 4..].to_string() - } else { - content - } - } else { - content - }; - - // Build new frontmatter - let mut yaml = String::from("---\n"); - for (key, val) in &frontmatter { - let v = match val { - serde_json::Value::String(s) => s.clone(), - other => other.to_string(), - }; - yaml.push_str(&format!("{}: {}\n", key, v)); - } - yaml.push_str("---\n"); - - let new_content = if frontmatter.is_empty() { - body.trim_start().to_string() - } else { - format!("{}{}", yaml, body) - }; - - atomic_write(&full, &new_content) -} - -/* ── Attachments ────────────────────────────────────────── */ - -#[tauri::command] -fn save_attachment( - vault_path: String, - file_name: String, - data: Vec, -) -> Result { - // Validate filename — reject path separators and traversal - let sanitized = safe_name(&file_name)?; - let attach_dir = Path::new(&vault_path).join("_attachments"); - fs::create_dir_all(&attach_dir).map_err(|e| e.to_string())?; - - // Deduplicate filename - let mut target = attach_dir.join(&sanitized); - let stem = target.file_stem().unwrap_or_default().to_string_lossy().to_string(); - let ext = target.extension().map(|e| format!(".{}", e.to_string_lossy())).unwrap_or_default(); - let mut counter = 1; - while target.exists() { - target = attach_dir.join(format!("{}-{}{}", stem, counter, ext)); - counter += 1; - } - - atomic_write_bytes(&target, &data)?; - let final_name = target.file_name().unwrap_or_default().to_string_lossy(); - let rel = format!("_attachments/{}", final_name); - Ok(rel) -} - -#[tauri::command] -fn list_attachments(vault_path: String) -> Result, String> { - let attach_dir = Path::new(&vault_path).join("_attachments"); - if !attach_dir.exists() { - return Ok(vec![]); - } - - let mut files: Vec = Vec::new(); - for entry in fs::read_dir(&attach_dir).map_err(|e| e.to_string())? { - let entry = entry.map_err(|e| e.to_string())?; - if entry.path().is_file() { - files.push(entry.file_name().to_string_lossy().to_string()); - } - } - files.sort(); - Ok(files) -} - -/* ── Daily Notes Listing ────────────────────────────────── */ - -#[tauri::command] -fn list_daily_notes(vault_path: String) -> Result, String> { - let daily_dir = Path::new(&vault_path).join("daily"); - if !daily_dir.exists() { - return Ok(vec![]); - } - - let mut dates: Vec = Vec::new(); - - for entry in fs::read_dir(&daily_dir).map_err(|e| e.to_string())? { - let entry = entry.map_err(|e| e.to_string())?; - let name = entry.file_name().to_string_lossy().to_string(); - if DAILY_NOTE_RE.is_match(&name) { - dates.push(name.replace(".md", "")); - } - } - dates.sort(); - Ok(dates) -} - -/* ── Theme ──────────────────────────────────────────────── */ - -#[tauri::command] -fn get_theme() -> Result { - let path = dirs_config_dir().join("theme"); - if path.exists() { - fs::read_to_string(&path).map_err(|e| e.to_string()) - } else { - Ok("dark-purple".to_string()) - } -} - -#[tauri::command] -fn set_theme(theme: String) -> Result<(), String> { - atomic_write(&dirs_config_dir().join("theme"), &theme) -} - -/* ── Export ──────────────────────────────────────────────── */ - -#[tauri::command] -fn export_note_html(vault_path: String, note_path: String) -> Result { - let full = safe_vault_path(&vault_path, ¬e_path)?; - let content = fs::read_to_string(&full).map_err(|e| e.to_string())?; - let title = Path::new(¬e_path) - .file_stem() - .unwrap_or_default() - .to_string_lossy() - .to_string(); - - // Basic markdown-to-HTML (headings, bold, italic, links, paragraphs) - let mut html_body = String::new(); - for line in content.lines() { - let trimmed = line.trim(); - if trimmed.starts_with("---") { - continue; // skip frontmatter delimiters - } - if trimmed.starts_with("# ") { - html_body.push_str(&format!("

    {}

    \n", &trimmed[2..])); - } else if trimmed.starts_with("## ") { - html_body.push_str(&format!("

    {}

    \n", &trimmed[3..])); - } else if trimmed.starts_with("### ") { - html_body.push_str(&format!("

    {}

    \n", &trimmed[4..])); - } else if trimmed.starts_with("- ") { - html_body.push_str(&format!("
  • {}
  • \n", &trimmed[2..])); - } else if trimmed.is_empty() { - html_body.push_str("
    \n"); - } else { - html_body.push_str(&format!("

    {}

    \n", trimmed)); - } - } - - // Replace wikilinks - let html_body = EXPORT_WIKILINK_RE.replace_all(&html_body, |caps: ®ex::Captures| { - let target = caps.get(1).map_or("", |m| m.as_str()).trim(); - let label = caps.get(2).map_or(target, |m| m.as_str()).trim(); - format!("{}", target, label) - }).to_string(); - - let html = format!(r#" - - - - -{title} - - - -{html_body} - -"#); - Ok(html) -} - -/* ── Tasks (Kanban) ─────────────────────────────────────── */ - -#[derive(Debug, Serialize, Deserialize, Clone)] -pub struct TaskItem { - pub text: String, - pub state: String, // "todo", "in-progress", "done" - pub source_path: String, - pub line_number: usize, -} - -#[tauri::command] -fn list_tasks(vault_path: String) -> Result, String> { - let vault = Path::new(&vault_path); - let mut tasks: Vec = Vec::new(); - - for entry in WalkDir::new(vault) - .into_iter() - .filter_map(|e| e.ok()) - .filter(|e| e.path().extension().map_or(false, |ext| ext == "md")) - { - let path = entry.path(); - let rel = path.strip_prefix(vault).unwrap_or(path).to_string_lossy().to_string(); - if rel.starts_with(".") || rel.starts_with("_") { - continue; - } - - if let Ok(content) = fs::read_to_string(path) { - for (i, line) in content.lines().enumerate() { - if let Some(caps) = TASK_RE.captures(line) { - let marker = &caps[2]; - let state = match marker { - "x" => "done", - "/" => "in-progress", - _ => "todo", - }.to_string(); - let text = caps[3].trim().to_string(); - tasks.push(TaskItem { - text, - state, - source_path: rel.clone(), - line_number: i + 1, - }); - } - } - } - } - Ok(tasks) -} - -#[tauri::command] -fn toggle_task(vault_path: String, note_path: String, line_number: usize, new_state: String) -> Result<(), String> { - let full = safe_vault_path(&vault_path, ¬e_path)?; - let content = fs::read_to_string(&full).map_err(|e| e.to_string())?; - let mut lines: Vec = content.lines().map(|s| s.to_string()).collect(); - - if line_number == 0 || line_number > lines.len() { - return Err("Invalid line number".to_string()); - } - - let marker = match new_state.as_str() { - "done" => "x", - "in-progress" => "/", - _ => " ", - }; - - let line = &lines[line_number - 1]; - if let Some(m) = TASK_MARKER_RE.find(line) { - let mut new_line = String::new(); - new_line.push_str(&line[..m.start()]); - new_line.push_str(&format!("- [{}]", marker)); - new_line.push_str(&line[m.end()..]); - lines[line_number - 1] = new_line; - } - - atomic_write(&full, &(lines.join("\n") + "\n")) -} - -/* ── Snapshots (Version History) ────────────────────────── */ - -#[derive(Debug, Serialize, Deserialize, Clone)] -pub struct SnapshotInfo { - pub timestamp: String, - pub filename: String, - pub size: u64, -} - -#[tauri::command] -fn save_snapshot(vault_path: String, note_path: String) -> Result { - let full = safe_vault_path(&vault_path, ¬e_path)?; - let content = fs::read_to_string(&full).map_err(|e| e.to_string())?; - - let sanitized_name = note_path.replace('/', "__").replace(".md", ""); - let history_dir = Path::new(&vault_path).join(".graph-notes").join("history").join(&sanitized_name); - fs::create_dir_all(&history_dir).map_err(|e| e.to_string())?; - - let ts = Local::now().format("%Y%m%d_%H%M%S").to_string(); - let snap_name = format!("{}.md", ts); - // Snapshots are write-once, never overwritten — direct write is safe - fs::write(history_dir.join(&snap_name), &content).map_err(|e| e.to_string())?; - Ok(snap_name) -} - -#[tauri::command] -fn list_snapshots(vault_path: String, note_path: String) -> Result, String> { - let sanitized_name = note_path.replace('/', "__").replace(".md", ""); - let history_dir = Path::new(&vault_path).join(".graph-notes").join("history").join(&sanitized_name); - - if !history_dir.exists() { - return Ok(vec![]); - } - - let mut snaps: Vec = Vec::new(); - for entry in fs::read_dir(&history_dir).map_err(|e| e.to_string())? { - let entry = entry.map_err(|e| e.to_string())?; - let meta = entry.metadata().map_err(|e| e.to_string())?; - if meta.is_file() { - let name = entry.file_name().to_string_lossy().to_string(); - let ts = name.replace(".md", ""); - snaps.push(SnapshotInfo { - timestamp: ts, - filename: name, - size: meta.len(), - }); - } - } - snaps.sort_by(|a, b| b.timestamp.cmp(&a.timestamp)); - Ok(snaps) -} - -#[tauri::command] -fn read_snapshot(vault_path: String, note_path: String, snapshot_name: String) -> Result { - let sanitized_name = note_path.replace('/', "__").replace(".md", ""); - let snap_path = Path::new(&vault_path) - .join(".graph-notes") - .join("history") - .join(&sanitized_name) - .join(&snapshot_name); - fs::read_to_string(&snap_path).map_err(|e| e.to_string()) -} - -/* ── Search & Replace ───────────────────────────────────── */ - -#[derive(Debug, Serialize, Deserialize, Clone)] -pub struct ReplaceResult { - pub path: String, - pub count: usize, -} - -#[tauri::command] -fn search_replace_vault( - vault_path: String, - search: String, - replace: String, - dry_run: bool, -) -> Result, String> { - let vault = Path::new(&vault_path); - let mut results: Vec = Vec::new(); - - for entry in WalkDir::new(vault) - .into_iter() - .filter_map(|e| e.ok()) - .filter(|e| e.path().extension().map_or(false, |ext| ext == "md")) - { - let path = entry.path(); - let rel = path.strip_prefix(vault).unwrap_or(path).to_string_lossy().to_string(); - if rel.starts_with(".") || rel.starts_with("_") { - continue; - } - - if let Ok(content) = fs::read_to_string(path) { - let count = content.matches(&search).count(); - if count > 0 { - if !dry_run { - let new_content = content.replace(&search, &replace); - atomic_write(path, &new_content)?; - } - results.push(ReplaceResult { - path: rel, - count, - }); - } - } - } - Ok(results) -} - -/* ── Writing Goals ──────────────────────────────────────── */ - -#[tauri::command] -fn get_writing_goal(vault_path: String, note_path: String) -> Result { - let goals_path = Path::new(&vault_path).join(".graph-notes").join("goals.json"); - if !goals_path.exists() { - return Ok(0); - } - let content = fs::read_to_string(&goals_path).map_err(|e| e.to_string())?; - let goals: serde_json::Map = - serde_json::from_str(&content).unwrap_or_default(); - let goal = goals.get(¬e_path) - .and_then(|v| v.as_u64()) - .unwrap_or(0); - Ok(goal as u32) -} - -#[tauri::command] -fn set_writing_goal(vault_path: String, note_path: String, goal: u32) -> Result<(), String> { - let dir = Path::new(&vault_path).join(".graph-notes"); - fs::create_dir_all(&dir).map_err(|e| e.to_string())?; - let goals_path = dir.join("goals.json"); - - let mut goals: serde_json::Map = if goals_path.exists() { - let content = fs::read_to_string(&goals_path).map_err(|e| e.to_string())?; - serde_json::from_str(&content).unwrap_or_default() - } else { - serde_json::Map::new() - }; - - if goal == 0 { - goals.remove(¬e_path); - } else { - goals.insert(note_path, serde_json::Value::Number(serde_json::Number::from(goal))); - } - - let json = serde_json::to_string_pretty(&goals).map_err(|e| e.to_string())?; - atomic_write(&goals_path, &json) -} - -/* ── Note Refactoring ───────────────────────────────────── */ - -#[tauri::command] -fn extract_to_note( - vault_path: String, - source_path: String, - selected_text: String, - new_note_name: String, -) -> Result { - let new_path = safe_vault_path(&vault_path, &format!("{}.md", &new_note_name))?; - if new_path.exists() { - return Err(format!("Note '{}' already exists", new_note_name)); - } - - // Create new note with extracted text - atomic_write(&new_path, &selected_text)?; - - // Replace selected text with wikilink in source - let source_full = safe_vault_path(&vault_path, &source_path)?; - let content = fs::read_to_string(&source_full).map_err(|e| e.to_string())?; - let new_content = content.replacen(&selected_text, &format!("[[{}]]", new_note_name), 1); - atomic_write(&source_full, &new_content)?; - - Ok(format!("{}.md", new_note_name)) -} - -#[tauri::command] -fn merge_notes( - vault_path: String, - source_path: String, - target_path: String, -) -> Result<(), String> { - let vault = Path::new(&vault_path); - let source_full = safe_vault_path(&vault_path, &source_path)?; - let target_full = safe_vault_path(&vault_path, &target_path)?; - - let source_content = fs::read_to_string(&source_full).map_err(|e| e.to_string())?; - let target_content = fs::read_to_string(&target_full).map_err(|e| e.to_string())?; - - let source_name = source_path.replace(".md", ""); - let merged = format!("{}\n\n---\n\n## Merged from {}\n\n{}", target_content.trim_end(), source_name, source_content); - atomic_write(&target_full, &merged)?; - - // Delete source - fs::remove_file(&source_full).map_err(|e| e.to_string())?; - - // Update wikilinks pointing to source → target - let target_name = target_path.replace(".md", ""); - for entry in WalkDir::new(vault) - .into_iter() - .filter_map(|e| e.ok()) - .filter(|e| e.path().extension().map_or(false, |ext| ext == "md")) - { - let path = entry.path(); - if path == target_full { continue; } - if let Ok(content) = fs::read_to_string(path) { - let updated = content.replace( - &format!("[[{}]]", source_name), - &format!("[[{}]]", target_name), - ); - if updated != content { - atomic_write(path, &updated)?; - } - } - } - Ok(()) -} - -/* ── Encryption ─────────────────────────────────────────── */ - -use aes_gcm::{Aes256Gcm, Key, Nonce}; -use aes_gcm::aead::{Aead, KeyInit}; -use argon2::Argon2; -use base64::{Engine as _, engine::general_purpose::STANDARD as B64}; - -fn derive_key(password: &str, salt: &[u8]) -> [u8; 32] { - let mut key = [0u8; 32]; - Argon2::default() - .hash_password_into(password.as_bytes(), salt, &mut key) - .expect("key derivation failed"); - key -} - -#[tauri::command] -fn encrypt_note(vault_path: String, note_path: String, password: String) -> Result<(), String> { - let full = safe_vault_path(&vault_path, ¬e_path)?; - let content = fs::read_to_string(&full).map_err(|e| e.to_string())?; - - let salt: [u8; 16] = rand::random(); - let nonce_bytes: [u8; 12] = rand::random(); - - let key_bytes = derive_key(&password, &salt); - let key = Key::::from_slice(&key_bytes); - let cipher = Aes256Gcm::new(key); - let nonce = Nonce::from_slice(&nonce_bytes); - - let ciphertext = cipher.encrypt(nonce, content.as_bytes()) - .map_err(|_| "Encryption failed".to_string())?; - - // Format: GRAPHNOTES_ENC:v1:{salt_b64}:{nonce_b64}:{ciphertext_b64} - let encoded = format!( - "GRAPHNOTES_ENC:v1:{}:{}:{}", - B64.encode(salt), - B64.encode(nonce_bytes), - B64.encode(&ciphertext), - ); - atomic_write(&full, &encoded) -} - -#[tauri::command] -fn decrypt_note(vault_path: String, note_path: String, password: String) -> Result { - let full = safe_vault_path(&vault_path, ¬e_path)?; - let content = fs::read_to_string(&full).map_err(|e| e.to_string())?; - - if !content.starts_with("GRAPHNOTES_ENC:v1:") { - return Err("Note is not encrypted".to_string()); - } - - let parts: Vec<&str> = content.splitn(5, ':').collect(); - if parts.len() != 5 { - return Err("Invalid encrypted format".to_string()); - } - - let salt = B64.decode(parts[2]).map_err(|_| "Invalid salt".to_string())?; - let nonce_bytes = B64.decode(parts[3]).map_err(|_| "Invalid nonce".to_string())?; - let ciphertext = B64.decode(parts[4]).map_err(|_| "Invalid ciphertext".to_string())?; - - let key_bytes = derive_key(&password, &salt); - let key = Key::::from_slice(&key_bytes); - let cipher = Aes256Gcm::new(key); - let nonce = Nonce::from_slice(&nonce_bytes); - - let plaintext = cipher.decrypt(nonce, ciphertext.as_ref()) - .map_err(|_| "Wrong password".to_string())?; - - String::from_utf8(plaintext).map_err(|_| "Invalid UTF-8".to_string()) -} - -#[tauri::command] -fn is_encrypted(vault_path: String, note_path: String) -> Result { - let full = safe_vault_path(&vault_path, ¬e_path)?; - let content = fs::read_to_string(&full).map_err(|e| e.to_string())?; - Ok(content.starts_with("GRAPHNOTES_ENC:v1:")) -} - -/* ── Flashcards (SRS) ───────────────────────────────────── */ - -#[derive(Debug, Serialize, Deserialize, Clone)] -pub struct Flashcard { - pub question: String, - pub answer: String, - pub source_path: String, - pub line_number: usize, - pub due: Option, - pub interval: u32, - pub ease: f32, -} - -#[tauri::command] -fn list_flashcards(vault_path: String) -> Result, String> { - let vault = Path::new(&vault_path); - let mut cards: Vec = Vec::new(); - - // Load schedule data - let srs_path = vault.join(".graph-notes").join("srs.json"); - let srs: serde_json::Map = if srs_path.exists() { - let c = fs::read_to_string(&srs_path).unwrap_or_default(); - serde_json::from_str(&c).unwrap_or_default() - } else { - serde_json::Map::new() - }; - - for entry in WalkDir::new(vault) - .into_iter() - .filter_map(|e| e.ok()) - .filter(|e| e.path().extension().map_or(false, |ext| ext == "md")) - { - let path = entry.path(); - let rel = path.strip_prefix(vault).unwrap_or(path).to_string_lossy().to_string(); - if rel.starts_with(".") || rel.starts_with("_") { continue; } - - if let Ok(content) = fs::read_to_string(path) { - for (i, line) in content.lines().enumerate() { - for caps in FLASHCARD_RE.captures_iter(line) { - let q = caps[1].trim().to_string(); - let a = caps[2].trim().to_string(); - let card_id = format!("{}:{}", rel, i + 1); - - let (due, interval, ease) = if let Some(sched) = srs.get(&card_id) { - ( - sched.get("due").and_then(|v| v.as_str()).map(|s| s.to_string()), - sched.get("interval").and_then(|v| v.as_u64()).unwrap_or(1) as u32, - sched.get("ease").and_then(|v| v.as_f64()).unwrap_or(2.5) as f32, - ) - } else { - (None, 1, 2.5) - }; - - cards.push(Flashcard { - question: q, - answer: a, - source_path: rel.clone(), - line_number: i + 1, - due, - interval, - ease, - }); - } - } - } - } - Ok(cards) -} - -#[tauri::command] -fn update_card_schedule( - vault_path: String, - card_id: String, - quality: u32, -) -> Result<(), String> { - let srs_path = Path::new(&vault_path).join(".graph-notes").join("srs.json"); - fs::create_dir_all(Path::new(&vault_path).join(".graph-notes")).map_err(|e| e.to_string())?; - - let mut srs: serde_json::Map = if srs_path.exists() { - let c = fs::read_to_string(&srs_path).unwrap_or_default(); - serde_json::from_str(&c).unwrap_or_default() - } else { - serde_json::Map::new() - }; - - let entry = srs.entry(card_id).or_insert_with(|| serde_json::json!({"interval": 1, "ease": 2.5})); - let obj = entry.as_object_mut().ok_or("Invalid SRS entry")?; - - let mut interval = obj.get("interval").and_then(|v| v.as_u64()).unwrap_or(1) as f64; - let mut ease = obj.get("ease").and_then(|v| v.as_f64()).unwrap_or(2.5); - - // SM-2 algorithm - if quality >= 3 { - if interval <= 1.0 { interval = 1.0; } - else if interval <= 6.0 { interval = 6.0; } - else { interval *= ease; } - ease = ease + (0.1 - (5.0 - quality as f64) * (0.08 + (5.0 - quality as f64) * 0.02)); - if ease < 1.3 { ease = 1.3; } - } else { - interval = 1.0; - } - - let due = Local::now() + chrono::Duration::days(interval as i64); - obj.insert("interval".into(), serde_json::json!(interval as u32)); - obj.insert("ease".into(), serde_json::json!(ease)); - obj.insert("due".into(), serde_json::json!(due.format("%Y-%m-%d").to_string())); - - let json = serde_json::to_string_pretty(&srs).map_err(|e| e.to_string())?; - atomic_write(&srs_path, &json) -} - -/* ── Fold State ─────────────────────────────────────────── */ - -#[tauri::command] -fn save_fold_state(vault_path: String, note_path: String, folds: Vec) -> Result<(), String> { - let dir = Path::new(&vault_path).join(".graph-notes"); - fs::create_dir_all(&dir).map_err(|e| e.to_string())?; - let folds_path = dir.join("folds.json"); - - let mut data: serde_json::Map = if folds_path.exists() { - let c = fs::read_to_string(&folds_path).unwrap_or_default(); - serde_json::from_str(&c).unwrap_or_default() - } else { - serde_json::Map::new() - }; - - data.insert(note_path, serde_json::json!(folds)); - let json = serde_json::to_string_pretty(&data).map_err(|e| e.to_string())?; - atomic_write(&folds_path, &json) -} - -#[tauri::command] -fn load_fold_state(vault_path: String, note_path: String) -> Result, String> { - let folds_path = Path::new(&vault_path).join(".graph-notes").join("folds.json"); - if !folds_path.exists() { return Ok(vec![]); } - let c = fs::read_to_string(&folds_path).map_err(|e| e.to_string())?; - let data: serde_json::Map = serde_json::from_str(&c).unwrap_or_default(); - let folds = data.get(¬e_path) - .and_then(|v| v.as_array()) - .map(|arr| arr.iter().filter_map(|v| v.as_u64().map(|n| n as usize)).collect()) - .unwrap_or_default(); - Ok(folds) -} - -/* ── Custom CSS ─────────────────────────────────────────── */ - -#[tauri::command] -fn get_custom_css() -> Result { - let config_dir = dirs_config_dir(); - let css_path = config_dir.join("custom.css"); - if css_path.exists() { - fs::read_to_string(&css_path).map_err(|e| e.to_string()) - } else { - Ok(String::new()) - } -} - -#[tauri::command] -fn set_custom_css(css: String) -> Result<(), String> { - atomic_write(&dirs_config_dir().join("custom.css"), &css) -} - -/* ── Workspace Layouts ──────────────────────────────────── */ - -#[tauri::command] -fn save_workspace(vault_path: String, name: String, state: String) -> Result<(), String> { - let sanitized = safe_name(&name)?; - let dir = Path::new(&vault_path).join(".graph-notes").join("workspaces"); - fs::create_dir_all(&dir).map_err(|e| e.to_string())?; - atomic_write(&dir.join(format!("{}.json", sanitized)), &state) -} - -#[tauri::command] -fn load_workspace(vault_path: String, name: String) -> Result { - let sanitized = safe_name(&name)?; - let path = Path::new(&vault_path).join(".graph-notes").join("workspaces").join(format!("{}.json", sanitized)); - fs::read_to_string(&path).map_err(|e| e.to_string()) -} - -#[tauri::command] -fn list_workspaces(vault_path: String) -> Result, String> { - let dir = Path::new(&vault_path).join(".graph-notes").join("workspaces"); - if !dir.exists() { return Ok(vec![]); } - let mut names: Vec = Vec::new(); - for entry in fs::read_dir(&dir).map_err(|e| e.to_string())? { - let entry = entry.map_err(|e| e.to_string())?; - let name = entry.file_name().to_string_lossy().replace(".json", ""); - names.push(name); - } - names.sort(); - Ok(names) -} - -/* ── Tab Persistence ────────────────────────────────────── */ - -#[tauri::command] -fn save_tabs(vault_path: String, tabs: String) -> Result<(), String> { - let dir = Path::new(&vault_path).join(".graph-notes"); - fs::create_dir_all(&dir).map_err(|e| e.to_string())?; - atomic_write(&dir.join("tabs.json"), &tabs) -} - -#[tauri::command] -fn load_tabs(vault_path: String) -> Result { - let path = Path::new(&vault_path).join(".graph-notes").join("tabs.json"); - if !path.exists() { return Ok("[]".to_string()); } - fs::read_to_string(&path).map_err(|e| e.to_string()) -} - -/* ── Canvas / Whiteboard Persistence ────────────────────── */ - -#[tauri::command] -fn save_canvas(vault_path: String, name: String, data: String) -> Result<(), String> { - let sanitized = safe_name(&name)?; - let dir = Path::new(&vault_path).join(".graph-notes").join("canvases"); - fs::create_dir_all(&dir).map_err(|e| e.to_string())?; - atomic_write(&dir.join(format!("{}.json", sanitized)), &data) -} - -#[tauri::command] -fn load_canvas(vault_path: String, name: String) -> Result { - let sanitized = safe_name(&name)?; - let path = Path::new(&vault_path).join(".graph-notes").join("canvases").join(format!("{}.json", sanitized)); - if !path.exists() { return Ok("{}".to_string()); } - fs::read_to_string(&path).map_err(|e| e.to_string()) -} - -#[tauri::command] -fn list_canvases(vault_path: String) -> Result, String> { - let dir = Path::new(&vault_path).join(".graph-notes").join("canvases"); - if !dir.exists() { return Ok(vec![]); } - let mut names: Vec = Vec::new(); - for entry in fs::read_dir(&dir).map_err(|e| e.to_string())? { - let entry = entry.map_err(|e| e.to_string())?; - let name = entry.file_name().to_string_lossy().replace(".json", ""); - names.push(name); - } - names.sort(); - Ok(names) -} - -/* ── Frontmatter Query ──────────────────────────────────── */ - -#[derive(Debug, Serialize, Deserialize, Clone)] -pub struct FrontmatterRow { - pub path: String, - pub title: String, - pub fields: serde_json::Map, -} - -#[tauri::command] -fn query_frontmatter(vault_path: String) -> Result, String> { - let vault = Path::new(&vault_path); - let mut rows: Vec = Vec::new(); - - for entry in WalkDir::new(vault) - .into_iter() - .filter_map(|e| e.ok()) - .filter(|e| e.path().extension().map_or(false, |ext| ext == "md")) - { - let path = entry.path(); - let rel = path.strip_prefix(vault).unwrap_or(path).to_string_lossy().to_string(); - if rel.starts_with(".") { continue; } - - if let Ok(content) = fs::read_to_string(path) { - if let Some(caps) = FRONTMATTER_RE.captures(&content) { - let yaml_str = &caps[1]; - let mut fields = serde_json::Map::new(); - for line in yaml_str.lines() { - if let Some(idx) = line.find(':') { - let key = line[..idx].trim().to_string(); - let val = line[idx+1..].trim().to_string(); - fields.insert(key, serde_json::Value::String(val)); - } - } - let title = rel.replace(".md", ""); - rows.push(FrontmatterRow { path: rel, title, fields }); - } - } - } - Ok(rows) -} - -/* ── Backlink Context ───────────────────────────────────── */ - -#[derive(Debug, Serialize, Deserialize, Clone)] -pub struct BacklinkContext { - pub source_path: String, - pub source_name: String, - pub excerpt: String, -} - -#[tauri::command] -fn get_backlink_context(vault_path: String, note_name: String) -> Result, String> { - let vault = Path::new(&vault_path); - let link_pattern = format!("[[{}]]", note_name); - let mut results: Vec = Vec::new(); - - for entry in WalkDir::new(vault) - .into_iter() - .filter_map(|e| e.ok()) - .filter(|e| e.path().extension().map_or(false, |ext| ext == "md")) - { - let path = entry.path(); - let rel = path.strip_prefix(vault).unwrap_or(path).to_string_lossy().to_string(); - if rel.starts_with(".") { continue; } - - if let Ok(content) = fs::read_to_string(path) { - if content.contains(&link_pattern) { - // Find paragraph containing the link - let paragraphs: Vec<&str> = content.split("\n\n").collect(); - for para in paragraphs { - if para.contains(&link_pattern) { - let excerpt = para.trim().chars().take(200).collect::(); - results.push(BacklinkContext { - source_path: rel.clone(), - source_name: rel.replace(".md", ""), - excerpt, - }); - break; - } - } - } - } - } - Ok(results) -} - -/* ── Dataview Query Engine ──────────────────────────────── */ - -#[derive(Debug, Serialize, Deserialize, Clone)] -pub struct DataviewResult { - pub columns: Vec, - pub rows: Vec>, -} - -#[tauri::command] -fn run_dataview_query(vault_path: String, query: String) -> Result { - let vault = Path::new(&vault_path); - - - // Parse query: TABLE field1, field2 [FROM ""] [WHERE cond] [SORT field [ASC|DESC]] - let query_upper = query.to_uppercase(); - let is_table = query_upper.starts_with("TABLE"); - if !is_table { return Err("Only TABLE queries supported".into()); } - - // Extract fields - let after_table = query.trim_start_matches(|c: char| c.is_alphabetic() || c == ' ').trim(); - let parts: Vec<&str> = after_table.splitn(2, " FROM").collect(); - let field_str = parts[0].trim(); - let columns: Vec = std::iter::once("File".to_string()) - .chain(field_str.split(',').map(|s| s.trim().to_string())) - .collect(); - - // Parse sort - let sort_field = if query_upper.contains("SORT") { - let sort_part = query.split("SORT").nth(1).unwrap_or("").trim(); - let sf: Vec<&str> = sort_part.split_whitespace().collect(); - sf.first().map(|s| s.to_string()) - } else { None }; - - let mut rows: Vec> = Vec::new(); - - for entry in WalkDir::new(vault) - .into_iter() - .filter_map(|e| e.ok()) - .filter(|e| e.path().extension().map_or(false, |ext| ext == "md")) - { - let path = entry.path(); - let rel = path.strip_prefix(vault).unwrap_or(path).to_string_lossy().to_string(); - if rel.starts_with(".") { continue; } - - if let Ok(content) = fs::read_to_string(path) { - let mut fields_map: std::collections::HashMap = std::collections::HashMap::new(); - fields_map.insert("title".into(), rel.replace(".md", "")); - - if let Some(caps) = FRONTMATTER_RE.captures(&content) { - for line in caps[1].lines() { - if let Some(idx) = line.find(':') { - let k = line[..idx].trim().to_lowercase(); - let v = line[idx+1..].trim().to_string(); - fields_map.insert(k, v); - } - } - } - - let mut row = vec![rel.replace(".md", "")]; - for col in &columns[1..] { - let val = fields_map.get(&col.to_lowercase()).cloned().unwrap_or_default(); - row.push(val); - } - rows.push(row); - } - } - - // Sort - if let Some(ref sf) = sort_field { - let sf_lower = sf.to_lowercase(); - if let Some(idx) = columns.iter().position(|c| c.to_lowercase() == sf_lower) { - rows.sort_by(|a, b| a.get(idx).unwrap_or(&String::new()).cmp(b.get(idx).unwrap_or(&String::new()))); - } - } - - Ok(DataviewResult { columns, rows }) -} - -/* ── Git Sync ───────────────────────────────────────────── */ - -use std::process::Command; - -#[tauri::command] -fn git_status(vault_path: String) -> Result { - let output = Command::new("git") - .args(["status", "--porcelain"]) - .current_dir(&vault_path) - .output() - .map_err(|e| e.to_string())?; - if output.status.success() { - Ok(String::from_utf8_lossy(&output.stdout).to_string()) - } else { - Err(String::from_utf8_lossy(&output.stderr).to_string()) - } -} - -#[tauri::command] -fn git_commit(vault_path: String, message: String) -> Result { - let add_output = Command::new("git") - .args(["add", "."]) - .current_dir(&vault_path) - .output() - .map_err(|e| e.to_string())?; - if !add_output.status.success() { - return Err(String::from_utf8_lossy(&add_output.stderr).to_string()); - } - - let output = Command::new("git") - .args(["commit", "-m", &message]) - .current_dir(&vault_path) - .output() - .map_err(|e| e.to_string())?; - if output.status.success() { - Ok(String::from_utf8_lossy(&output.stdout).to_string()) - } else { - Err(String::from_utf8_lossy(&output.stderr).to_string()) - } -} - -#[tauri::command] -fn git_pull(vault_path: String) -> Result { - let output = Command::new("git") - .args(["pull"]) - .current_dir(&vault_path) - .output() - .map_err(|e| e.to_string())?; - if output.status.success() { - Ok(String::from_utf8_lossy(&output.stdout).to_string()) - } else { - Err(String::from_utf8_lossy(&output.stderr).to_string()) - } -} - -#[tauri::command] -fn git_push(vault_path: String) -> Result { - let output = Command::new("git") - .args(["push"]) - .current_dir(&vault_path) - .output() - .map_err(|e| e.to_string())?; - if output.status.success() { - Ok(String::from_utf8_lossy(&output.stdout).to_string()) - } else { - Err(String::from_utf8_lossy(&output.stderr).to_string()) - } -} - -#[tauri::command] -fn git_init(vault_path: String) -> Result { - let output = Command::new("git") - .args(["init"]) - .current_dir(&vault_path) - .output() - .map_err(|e| e.to_string())?; - if output.status.success() { - Ok(String::from_utf8_lossy(&output.stdout).to_string()) - } else { - Err(String::from_utf8_lossy(&output.stderr).to_string()) - } -} - -/* ── v0.8 Commands ──────────────────────────────────────── */ - -#[tauri::command] -fn suggest_links(vault_path: String, partial: String) -> Result, String> { - let vault = Path::new(&vault_path); - let partial_lower = partial.to_lowercase(); - let mut matches: Vec = Vec::new(); - - for entry in WalkDir::new(vault) - .into_iter() - .filter_map(|e| e.ok()) - .filter(|e| e.path().extension().map_or(false, |ext| ext == "md")) - { - let path = entry.path(); - let rel = path.strip_prefix(vault).unwrap_or(path).to_string_lossy().to_string(); - if rel.starts_with(".") { continue; } - let name = rel.replace(".md", ""); - if name.to_lowercase().contains(&partial_lower) { - matches.push(name); - } - } - matches.sort_by(|a, b| { - let a_starts = a.to_lowercase().starts_with(&partial_lower); - let b_starts = b.to_lowercase().starts_with(&partial_lower); - b_starts.cmp(&a_starts).then(a.len().cmp(&b.len())) - }); - matches.truncate(15); - Ok(matches) -} - -#[derive(Debug, Serialize, Deserialize, Clone)] -pub struct NoteByDate { - pub path: String, - pub name: String, - pub modified: u64, - pub preview: String, -} - -#[tauri::command] -fn list_notes_by_date(vault_path: String) -> Result, String> { - let vault = Path::new(&vault_path); - let mut notes: Vec = Vec::new(); - - for entry in WalkDir::new(vault) - .into_iter() - .filter_map(|e| e.ok()) - .filter(|e| e.path().extension().map_or(false, |ext| ext == "md")) - { - let path = entry.path(); - let rel = path.strip_prefix(vault).unwrap_or(path).to_string_lossy().to_string(); - if rel.starts_with(".") { continue; } - - let modified = path.metadata() - .and_then(|m| m.modified()) - .map(|t| t.duration_since(std::time::UNIX_EPOCH).unwrap_or_default().as_secs()) - .unwrap_or(0); - - let preview = fs::read_to_string(path) - .unwrap_or_default() - .lines() - .filter(|l| !l.starts_with("---") && !l.starts_with("#") && !l.trim().is_empty()) - .take(2) - .collect::>() - .join(" ") - .chars() - .take(120) - .collect(); - - notes.push(NoteByDate { - path: rel.clone(), - name: rel.replace(".md", ""), - modified, - preview, - }); - } - notes.sort_by(|a, b| b.modified.cmp(&a.modified)); - Ok(notes) -} - -#[tauri::command] -fn random_note(vault_path: String) -> Result { - let vault = Path::new(&vault_path); - let mut paths: Vec = Vec::new(); - - for entry in WalkDir::new(vault) - .into_iter() - .filter_map(|e| e.ok()) - .filter(|e| e.path().extension().map_or(false, |ext| ext == "md")) - { - let path = entry.path(); - let rel = path.strip_prefix(vault).unwrap_or(path).to_string_lossy().to_string(); - if rel.starts_with(".") { continue; } - paths.push(rel.replace(".md", "")); - } - - if paths.is_empty() { return Err("No notes found".into()); } - use rand::Rng; - let mut rng = rand::thread_rng(); - let idx = rng.gen_range(0..paths.len()); - Ok(paths[idx].clone()) -} - -/* ── v0.9 Commands ──────────────────────────────────────── */ - -#[tauri::command] -fn export_vault_zip(vault_path: String, output_path: String) -> Result { - use std::io::Write; - let vault = Path::new(&vault_path); - let out = Path::new(&output_path); - - let file = fs::File::create(out).map_err(|e| e.to_string())?; - let mut zip = zip::ZipWriter::new(file); - let options = zip::write::SimpleFileOptions::default() - .compression_method(zip::CompressionMethod::Deflated); - - for entry in WalkDir::new(vault) - .into_iter() - .filter_map(|e| e.ok()) - .filter(|e| e.path().is_file()) - { - let path = entry.path(); - let rel = path.strip_prefix(vault).unwrap_or(path).to_string_lossy().to_string(); - // Skip hidden files, in-progress atomic writes (~tmp suffix) - if rel.starts_with(".") || rel.ends_with("~tmp") { continue; } - - let content = fs::read(path).map_err(|e| e.to_string())?; - zip.start_file(&rel, options).map_err(|e| e.to_string())?; - zip.write_all(&content).map_err(|e| e.to_string())?; - } - - zip.finish().map_err(|e| e.to_string())?; - Ok(format!("Exported to {}", output_path)) -} - -#[tauri::command] -fn import_folder(vault_path: String, source_path: String) -> Result { - let vault = Path::new(&vault_path); - let source = Path::new(&source_path); - let mut count: u32 = 0; - - for entry in WalkDir::new(source) - .into_iter() - .filter_map(|e| e.ok()) - .filter(|e| e.path().extension().map_or(false, |ext| ext == "md")) - { - let path = entry.path(); - let rel = path.strip_prefix(source).unwrap_or(path); - let dest = vault.join(rel); - - if let Some(parent) = dest.parent() { - fs::create_dir_all(parent).map_err(|e| e.to_string())?; - } - fs::copy(path, &dest).map_err(|e| e.to_string())?; - count += 1; - } - - Ok(count) -} - -#[tauri::command] -fn save_shortcuts(vault_path: String, shortcuts_json: String) -> Result<(), String> { - let dir = Path::new(&vault_path).join(".graph-notes"); - fs::create_dir_all(&dir).map_err(|e| e.to_string())?; - atomic_write(&dir.join("shortcuts.json"), &shortcuts_json) -} - -#[tauri::command] -fn load_shortcuts(vault_path: String) -> Result { - let path = Path::new(&vault_path).join(".graph-notes/shortcuts.json"); - if path.exists() { - fs::read_to_string(path).map_err(|e| e.to_string()) - } else { - Ok("{}".to_string()) - } -} - -#[tauri::command] -fn get_pinned(vault_path: String) -> Result, String> { - let path = Path::new(&vault_path).join(".graph-notes/pinned.json"); - if path.exists() { - let content = fs::read_to_string(path).map_err(|e| e.to_string())?; - serde_json::from_str(&content).map_err(|e| e.to_string()) - } else { - Ok(Vec::new()) - } -} - -#[tauri::command] -fn set_pinned(vault_path: String, pinned: Vec) -> Result<(), String> { - let dir = Path::new(&vault_path).join(".graph-notes"); - fs::create_dir_all(&dir).map_err(|e| e.to_string())?; - let json = serde_json::to_string_pretty(&pinned).map_err(|e| e.to_string())?; - atomic_write(&dir.join("pinned.json"), &json) -} +/* ── App Entry Point ───────────────────────────────────── */ #[cfg_attr(mobile, tauri::mobile_entry_point)] pub fn run() { @@ -1955,80 +248,100 @@ pub fn run() { .plugin(tauri_plugin_fs::init()) .plugin(tauri_plugin_dialog::init()) .invoke_handler(tauri::generate_handler![ - list_notes, - read_note, - write_note, - delete_note, - build_graph, - get_or_create_daily, + // ── lib.rs (config / vault management) ── get_vault_path, set_vault_path, ensure_vault, - search_vault, - rename_note, - update_wikilinks, - list_tags, - read_note_preview, list_recent_vaults, add_vault, - list_templates, - create_from_template, - get_favorites, - set_favorites, - parse_frontmatter, - write_frontmatter, - save_attachment, - list_attachments, - list_daily_notes, - get_theme, - set_theme, - export_note_html, - list_tasks, - toggle_task, - save_snapshot, - list_snapshots, - read_snapshot, - search_replace_vault, - get_writing_goal, - set_writing_goal, - extract_to_note, - merge_notes, - encrypt_note, - decrypt_note, - is_encrypted, - list_flashcards, - update_card_schedule, - save_fold_state, - load_fold_state, - get_custom_css, - set_custom_css, - save_workspace, - load_workspace, - list_workspaces, - save_tabs, - load_tabs, - save_canvas, - load_canvas, - list_canvases, - query_frontmatter, - get_backlink_context, - run_dataview_query, - git_status, - git_commit, - git_pull, - git_push, - git_init, - suggest_links, - list_notes_by_date, - random_note, - export_vault_zip, - import_folder, - save_shortcuts, - load_shortcuts, - get_pinned, - set_pinned, + // ── notes.rs ── + notes::list_notes, + notes::read_note, + notes::write_note, + notes::delete_note, + notes::build_graph, + notes::get_or_create_daily, + notes::list_daily_notes, + notes::search_vault, + notes::rename_note, + notes::update_wikilinks, + notes::list_tags, + notes::read_note_preview, + notes::list_templates, + notes::create_from_template, + notes::parse_frontmatter, + notes::write_frontmatter, + notes::save_attachment, + notes::list_attachments, + notes::list_tasks, + notes::toggle_task, + notes::extract_to_note, + notes::merge_notes, + notes::query_frontmatter, + notes::get_backlink_context, + notes::run_dataview_query, + notes::suggest_links, + notes::list_notes_by_date, + notes::random_note, + // ── git.rs ── + git::git_status, + git::git_commit, + git::git_pull, + git::git_push, + git::git_init, + // ── crypto.rs ── + crypto::encrypt_note, + crypto::decrypt_note, + crypto::is_encrypted, + // ── srs.rs ── + srs::list_flashcards, + srs::update_card_schedule, + // ── export.rs ── + export::export_note_html, + export::export_vault_zip, + export::import_folder, + // ── state.rs ── + state::save_snapshot, + state::list_snapshots, + state::read_snapshot, + state::search_replace_vault, + state::get_writing_goal, + state::set_writing_goal, + state::save_fold_state, + state::load_fold_state, + state::get_custom_css, + state::set_custom_css, + state::save_workspace, + state::load_workspace, + state::list_workspaces, + state::save_tabs, + state::load_tabs, + state::save_canvas, + state::load_canvas, + state::list_canvases, + state::save_shortcuts, + state::load_shortcuts, + state::get_pinned, + state::set_pinned, + state::get_theme, + state::set_theme, + state::get_favorites, + state::set_favorites, + // ── v1.4 notes.rs ── + notes::compute_checksums, + notes::verify_checksums, + notes::scan_integrity, + notes::check_conflict, + notes::validate_frontmatter, + notes::find_orphan_attachments, + // ── v1.4 state.rs ── + state::create_backup, + state::list_backups, + state::restore_backup, + state::wal_status, + state::wal_recover, + state::get_audit_log, ]) .run(tauri::generate_context!()) .expect("error while running tauri application"); } - diff --git a/src-tauri/src/notes.rs b/src-tauri/src/notes.rs new file mode 100644 index 0000000..4f980ad --- /dev/null +++ b/src-tauri/src/notes.rs @@ -0,0 +1,1490 @@ +use std::fs; +use std::path::Path; + +use chrono::Local; +use regex::Regex; +use serde::{Deserialize, Serialize}; +use walkdir::WalkDir; + +use crate::{ + atomic_write, atomic_write_bytes, safe_vault_path, safe_name, + NoteEntry, WIKILINK_RE, TAG_RE, TASK_RE, TASK_MARKER_RE, + DAILY_NOTE_RE, PREVIEW_WIKILINK_RE, PREVIEW_FMT_RE, FRONTMATTER_RE, +}; + +fn normalize_note_name(name: &str) -> String { + name.trim().to_lowercase() +} + +fn extract_wikilinks(content: &str) -> Vec { + WIKILINK_RE.captures_iter(content) + .map(|cap| cap[1].trim().to_string()) + .collect() +} + +fn extract_tags(content: &str) -> Vec { + let mut tags: Vec = TAG_RE + .captures_iter(content) + .map(|cap| cap[1].to_string()) + .collect(); + tags.sort(); + tags.dedup(); + tags +} + +/* ── Types ──────────────────────────────────────────────── */ + +#[derive(Debug, Serialize, Deserialize)] +pub struct GraphData { + pub nodes: Vec, + pub edges: Vec, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct GraphNode { + pub id: String, + pub label: String, + pub path: String, + pub link_count: usize, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct GraphEdge { + pub source: String, + pub target: String, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct SearchResult { + pub path: String, + pub name: String, + pub line_number: usize, + pub context: String, + pub score: usize, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct TagInfo { + pub tag: String, + pub count: usize, + pub notes: Vec, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct TemplateInfo { + pub name: String, + pub path: String, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct TaskItem { + pub text: String, + pub state: String, + pub source_path: String, + pub line_number: usize, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct FrontmatterRow { + pub path: String, + pub title: String, + pub fields: serde_json::Map, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct BacklinkContext { + pub source_path: String, + pub source_name: String, + pub excerpt: String, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct DataviewResult { + pub columns: Vec, + pub rows: Vec>, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct NoteByDate { + pub path: String, + pub name: String, + pub modified: u64, + pub preview: String, +} + +/* ── CRUD ───────────────────────────────────────────────── */ + +#[tauri::command] +pub fn list_notes(vault_path: String) -> Result, String> { + let vault = Path::new(&vault_path); + if !vault.exists() { + return Err("Vault path does not exist".to_string()); + } + + fn build_tree(dir: &Path, base: &Path) -> Vec { + let mut entries: Vec = Vec::new(); + + if let Ok(read_dir) = fs::read_dir(dir) { + let mut items: Vec<_> = read_dir.filter_map(|e| e.ok()).collect(); + items.sort_by_key(|e| e.file_name()); + + for entry in items { + let path = entry.path(); + let file_name = entry.file_name().to_string_lossy().to_string(); + + if file_name.starts_with('.') { + continue; + } + + if path.is_dir() { + let children = build_tree(&path, base); + let rel = path.strip_prefix(base).unwrap_or(&path); + entries.push(NoteEntry { + path: rel.to_string_lossy().to_string(), + name: file_name, + is_dir: true, + children: Some(children), + }); + } else if path.extension().map_or(false, |ext| ext == "md") { + let rel = path.strip_prefix(base).unwrap_or(&path); + entries.push(NoteEntry { + path: rel.to_string_lossy().to_string(), + name: file_name.trim_end_matches(".md").to_string(), + is_dir: false, + children: None, + }); + } + } + } + + entries + } + + Ok(build_tree(vault, vault)) +} + +#[tauri::command] +pub fn read_note(vault_path: String, relative_path: String) -> Result { + let full_path = safe_vault_path(&vault_path, &relative_path)?; + fs::read_to_string(&full_path).map_err(|e| format!("Failed to read note: {}", e)) +} + +#[tauri::command] +pub fn write_note(vault_path: String, relative_path: String, content: String) -> Result<(), String> { + let full_path = safe_vault_path(&vault_path, &relative_path)?; + atomic_write(&full_path, &content) +} + +#[tauri::command] +pub fn delete_note(vault_path: String, relative_path: String) -> Result<(), String> { + let full_path = safe_vault_path(&vault_path, &relative_path)?; + if full_path.is_file() { + fs::remove_file(&full_path).map_err(|e| format!("Failed to delete note: {}", e)) + } else { + Err("Note not found".to_string()) + } +} + +/* ── Graph ──────────────────────────────────────────────── */ + +#[tauri::command] +pub fn build_graph(vault_path: String) -> Result { + let vault = Path::new(&vault_path); + if !vault.exists() { + return Err("Vault path does not exist".to_string()); + } + + let mut nodes: Vec = Vec::new(); + let mut edges: Vec = Vec::new(); + let mut note_map: std::collections::HashMap = std::collections::HashMap::new(); + + for entry in WalkDir::new(vault) + .into_iter() + .filter_map(|e| e.ok()) + .filter(|e| e.path().extension().map_or(false, |ext| ext == "md")) + { + let rel_path = entry.path().strip_prefix(vault).unwrap_or(entry.path()); + let rel_str = rel_path.to_string_lossy().to_string(); + let name = rel_path.file_stem().unwrap_or_default().to_string_lossy().to_string(); + + note_map.insert(normalize_note_name(&name), rel_str.clone()); + + nodes.push(GraphNode { + id: rel_str.clone(), + label: name, + path: rel_str, + link_count: 0, + }); + } + + for node in &mut nodes { + let full_path = vault.join(&node.path); + if let Ok(content) = fs::read_to_string(&full_path) { + let links = extract_wikilinks(&content); + node.link_count = links.len(); + + for link in &links { + let normalized = normalize_note_name(link); + if let Some(target_path) = note_map.get(&normalized) { + edges.push(GraphEdge { + source: node.id.clone(), + target: target_path.clone(), + }); + } + } + } + } + + Ok(GraphData { nodes, edges }) +} + +/* ── Daily Notes ────────────────────────────────────────── */ + +#[tauri::command] +pub fn get_or_create_daily(vault_path: String) -> Result { + let today = Local::now().format("%Y-%m-%d").to_string(); + let daily_dir = Path::new(&vault_path).join("daily"); + let daily_path = daily_dir.join(format!("{}.md", today)); + let relative_path = format!("daily/{}.md", today); + + if !daily_dir.exists() { + fs::create_dir_all(&daily_dir).map_err(|e| format!("Failed to create daily dir: {}", e))?; + } + + if !daily_path.exists() { + let content = format!("# {}\n\n", today); + atomic_write(&daily_path, &content)?; + } + + Ok(relative_path) +} + +#[tauri::command] +pub fn list_daily_notes(vault_path: String) -> Result, String> { + let daily_dir = Path::new(&vault_path).join("daily"); + if !daily_dir.exists() { + return Ok(vec![]); + } + + let mut dates: Vec = Vec::new(); + for entry in fs::read_dir(&daily_dir).map_err(|e| e.to_string())? { + let entry = entry.map_err(|e| e.to_string())?; + let name = entry.file_name().to_string_lossy().to_string(); + if DAILY_NOTE_RE.is_match(&name) { + dates.push(name.replace(".md", "")); + } + } + dates.sort(); + Ok(dates) +} + +/* ── Search ─────────────────────────────────────────────── */ + +#[tauri::command] +pub fn search_vault(vault_path: String, query: String) -> Result, String> { + let vault = Path::new(&vault_path); + if !vault.exists() { + return Err("Vault path does not exist".to_string()); + } + + let query_lower = query.to_lowercase(); + let mut results: Vec = Vec::new(); + + for entry in WalkDir::new(vault) + .into_iter() + .filter_map(|e| e.ok()) + .filter(|e| e.path().extension().map_or(false, |ext| ext == "md")) + { + let rel_path = entry.path().strip_prefix(vault).unwrap_or(entry.path()); + let rel_str = rel_path.to_string_lossy().to_string(); + let name = rel_path.file_stem().unwrap_or_default().to_string_lossy().to_string(); + + if let Ok(content) = fs::read_to_string(entry.path()) { + let content_lower = content.to_lowercase(); + let match_count = content_lower.matches(&query_lower).count(); + + if match_count > 0 { + for (i, line) in content.lines().enumerate() { + if line.to_lowercase().contains(&query_lower) { + results.push(SearchResult { + path: rel_str.clone(), + name: name.clone(), + line_number: i + 1, + context: line.trim().chars().take(200).collect(), + score: match_count, + }); + break; + } + } + } + } + } + + results.sort_by(|a, b| b.score.cmp(&a.score).then_with(|| a.name.cmp(&b.name))); + Ok(results) +} + +/* ── Rename & Relink ────────────────────────────────────── */ + +#[tauri::command] +pub fn rename_note(vault_path: String, old_path: String, new_path: String) -> Result<(), String> { + let old_full = safe_vault_path(&vault_path, &old_path)?; + let new_full = safe_vault_path(&vault_path, &new_path)?; + + if !old_full.exists() { + return Err("Source note does not exist".to_string()); + } + if new_full.exists() { + return Err("A note with that name already exists".to_string()); + } + + if let Some(parent) = new_full.parent() { + fs::create_dir_all(parent).map_err(|e| format!("Failed to create directory: {}", e))?; + } + + fs::rename(&old_full, &new_full).map_err(|e| format!("Failed to rename note: {}", e)) +} + +#[tauri::command] +pub fn update_wikilinks(vault_path: String, old_name: String, new_name: String) -> Result { + let vault = Path::new(&vault_path); + let mut updated_count = 0; + + let pattern = format!(r"\[\[{}\]\]", regex::escape(&old_name)); + let pattern_with_alias = format!(r"\[\[{}\|", regex::escape(&old_name)); + let re1 = Regex::new(&pattern).map_err(|e| format!("Invalid regex: {}", e))?; + let re2 = Regex::new(&pattern_with_alias).map_err(|e| format!("Invalid regex: {}", e))?; + + for entry in WalkDir::new(vault) + .into_iter() + .filter_map(|e| e.ok()) + .filter(|e| e.path().extension().map_or(false, |ext| ext == "md")) + { + if let Ok(content) = fs::read_to_string(entry.path()) { + if re1.is_match(&content) || re2.is_match(&content) { + let new_content = re1.replace_all(&content, format!("[[{}]]", new_name)); + let new_content = re2.replace_all(&new_content, format!("[[{}|", new_name)); + atomic_write(entry.path(), &new_content)?; + updated_count += 1; + } + } + } + + Ok(updated_count) +} + +/* ── Tags ───────────────────────────────────────────────── */ + +#[tauri::command] +pub fn list_tags(vault_path: String) -> Result, String> { + let vault = Path::new(&vault_path); + if !vault.exists() { + return Err("Vault path does not exist".to_string()); + } + + let mut tag_map: std::collections::HashMap> = std::collections::HashMap::new(); + + for entry in WalkDir::new(vault) + .into_iter() + .filter_map(|e| e.ok()) + .filter(|e| e.path().extension().map_or(false, |ext| ext == "md")) + { + let rel_path = entry.path().strip_prefix(vault).unwrap_or(entry.path()); + let rel_str = rel_path.to_string_lossy().to_string(); + + if let Ok(content) = fs::read_to_string(entry.path()) { + for tag in extract_tags(&content) { + tag_map.entry(tag).or_default().push(rel_str.clone()); + } + } + } + + let mut tags: Vec = tag_map + .into_iter() + .map(|(tag, notes)| TagInfo { + tag, + count: notes.len(), + notes, + }) + .collect(); + + tags.sort_by(|a, b| b.count.cmp(&a.count).then_with(|| a.tag.cmp(&b.tag))); + Ok(tags) +} + +/* ── Note Preview ───────────────────────────────────────── */ + +#[tauri::command] +pub fn read_note_preview(vault_path: String, note_path: String, max_chars: Option) -> Result { + let full = safe_vault_path(&vault_path, ¬e_path)?; + let content = fs::read_to_string(&full).map_err(|e| format!("Read failed: {}", e))?; + let limit = max_chars.unwrap_or(200); + + let cleaned: String = content + .lines() + .filter(|l| !l.trim().starts_with('#')) + .collect::>() + .join(" "); + + let cleaned = PREVIEW_WIKILINK_RE.replace_all(&cleaned, "$1").to_string(); + let cleaned = PREVIEW_FMT_RE.replace_all(&cleaned, "").to_string(); + let cleaned = cleaned.trim().to_string(); + + if cleaned.len() > limit { + Ok(format!("{}...", &cleaned[..limit])) + } else { + Ok(cleaned) + } +} + +/* ── Templates ──────────────────────────────────────────── */ + +#[tauri::command] +pub fn list_templates(vault_path: String) -> Result, String> { + let templates_dir = Path::new(&vault_path).join("_templates"); + if !templates_dir.exists() { + return Ok(vec![]); + } + + let mut templates: Vec = Vec::new(); + for entry in fs::read_dir(&templates_dir).map_err(|e| e.to_string())? { + let entry = entry.map_err(|e| e.to_string())?; + let path = entry.path(); + if path.extension().map_or(false, |ext| ext == "md") { + let name = path.file_stem() + .unwrap_or_default() + .to_string_lossy() + .to_string(); + let rel = format!("_templates/{}", path.file_name().unwrap_or_default().to_string_lossy()); + templates.push(TemplateInfo { name, path: rel }); + } + } + templates.sort_by(|a, b| a.name.cmp(&b.name)); + Ok(templates) +} + +#[tauri::command] +pub fn create_from_template( + vault_path: String, + template_path: String, + note_name: String, +) -> Result { + let template_full = safe_vault_path(&vault_path, &template_path)?; + let template_content = fs::read_to_string(&template_full) + .map_err(|e| format!("Failed to read template: {}", e))?; + + let today = Local::now().format("%Y-%m-%d").to_string(); + let content = template_content + .replace("{{title}}", ¬e_name) + .replace("{{date}}", &today); + + let note_path = format!("{}.md", note_name); + let full_path = safe_vault_path(&vault_path, ¬e_path)?; + atomic_write(&full_path, &content)?; + Ok(note_path) +} + +/* ── Frontmatter ────────────────────────────────────────── */ + +#[tauri::command] +pub fn parse_frontmatter(vault_path: String, note_path: String) -> Result { + let full = safe_vault_path(&vault_path, ¬e_path)?; + let content = fs::read_to_string(&full).map_err(|e| e.to_string())?; + + if !content.starts_with("---\n") { + return Ok(serde_json::json!({})); + } + + let end = content[4..].find("\n---"); + match end { + Some(pos) => { + let yaml_str = &content[4..4 + pos]; + let mut map = serde_json::Map::new(); + for line in yaml_str.lines() { + if let Some(colon_pos) = line.find(':') { + let key = line[..colon_pos].trim().to_string(); + let val = line[colon_pos + 1..].trim().to_string(); + map.insert(key, serde_json::Value::String(val)); + } + } + Ok(serde_json::Value::Object(map)) + } + None => Ok(serde_json::json!({})), + } +} + +#[tauri::command] +pub fn write_frontmatter( + vault_path: String, + note_path: String, + frontmatter: serde_json::Map, +) -> Result<(), String> { + let full = safe_vault_path(&vault_path, ¬e_path)?; + let content = fs::read_to_string(&full).map_err(|e| e.to_string())?; + + let body = if content.starts_with("---\n") { + if let Some(end) = content[4..].find("\n---") { + content[4 + end + 4..].to_string() + } else { + content + } + } else { + content + }; + + let mut yaml = String::from("---\n"); + for (key, val) in &frontmatter { + let v = match val { + serde_json::Value::String(s) => s.clone(), + other => other.to_string(), + }; + yaml.push_str(&format!("{}: {}\n", key, v)); + } + yaml.push_str("---\n"); + + let new_content = if frontmatter.is_empty() { + body.trim_start().to_string() + } else { + format!("{}{}", yaml, body) + }; + + atomic_write(&full, &new_content) +} + +/* ── Attachments ────────────────────────────────────────── */ + +#[tauri::command] +pub fn save_attachment( + vault_path: String, + file_name: String, + data: Vec, +) -> Result { + let sanitized = safe_name(&file_name)?; + let attach_dir = Path::new(&vault_path).join("_attachments"); + fs::create_dir_all(&attach_dir).map_err(|e| e.to_string())?; + + let mut target = attach_dir.join(&sanitized); + let stem = target.file_stem().unwrap_or_default().to_string_lossy().to_string(); + let ext = target.extension().map(|e| format!(".{}", e.to_string_lossy())).unwrap_or_default(); + let mut counter = 1; + while target.exists() { + target = attach_dir.join(format!("{}-{}{}", stem, counter, ext)); + counter += 1; + } + + atomic_write_bytes(&target, &data)?; + let final_name = target.file_name().unwrap_or_default().to_string_lossy(); + let rel = format!("_attachments/{}", final_name); + Ok(rel) +} + +#[tauri::command] +pub fn list_attachments(vault_path: String) -> Result, String> { + let attach_dir = Path::new(&vault_path).join("_attachments"); + if !attach_dir.exists() { + return Ok(vec![]); + } + + let mut files: Vec = Vec::new(); + for entry in fs::read_dir(&attach_dir).map_err(|e| e.to_string())? { + let entry = entry.map_err(|e| e.to_string())?; + if entry.path().is_file() { + files.push(entry.file_name().to_string_lossy().to_string()); + } + } + files.sort(); + Ok(files) +} + +/* ── Tasks (Kanban) ─────────────────────────────────────── */ + +#[tauri::command] +pub fn list_tasks(vault_path: String) -> Result, String> { + let vault = Path::new(&vault_path); + let mut tasks: Vec = Vec::new(); + + for entry in WalkDir::new(vault) + .into_iter() + .filter_map(|e| e.ok()) + .filter(|e| e.path().extension().map_or(false, |ext| ext == "md")) + { + let path = entry.path(); + let rel = path.strip_prefix(vault).unwrap_or(path).to_string_lossy().to_string(); + if rel.starts_with(".") || rel.starts_with("_") { continue; } + + if let Ok(content) = fs::read_to_string(path) { + for (i, line) in content.lines().enumerate() { + if let Some(caps) = TASK_RE.captures(line) { + let marker = &caps[2]; + let state = match marker { + "x" => "done", + "/" => "in-progress", + _ => "todo", + }.to_string(); + let text = caps[3].trim().to_string(); + tasks.push(TaskItem { + text, + state, + source_path: rel.clone(), + line_number: i + 1, + }); + } + } + } + } + Ok(tasks) +} + +#[tauri::command] +pub fn toggle_task(vault_path: String, note_path: String, line_number: usize, new_state: String) -> Result<(), String> { + let full = safe_vault_path(&vault_path, ¬e_path)?; + let content = fs::read_to_string(&full).map_err(|e| e.to_string())?; + let mut lines: Vec = content.lines().map(|s| s.to_string()).collect(); + + if line_number == 0 || line_number > lines.len() { + return Err("Invalid line number".to_string()); + } + + let marker = match new_state.as_str() { + "done" => "x", + "in-progress" => "/", + _ => " ", + }; + + let line = &lines[line_number - 1]; + if let Some(m) = TASK_MARKER_RE.find(line) { + let mut new_line = String::new(); + new_line.push_str(&line[..m.start()]); + new_line.push_str(&format!("- [{}]", marker)); + new_line.push_str(&line[m.end()..]); + lines[line_number - 1] = new_line; + } + + atomic_write(&full, &(lines.join("\n") + "\n")) +} + +/* ── Note Refactoring ───────────────────────────────────── */ + +#[tauri::command] +pub fn extract_to_note( + vault_path: String, + source_path: String, + selected_text: String, + new_note_name: String, +) -> Result { + let new_path = safe_vault_path(&vault_path, &format!("{}.md", &new_note_name))?; + if new_path.exists() { + return Err(format!("Note '{}' already exists", new_note_name)); + } + + atomic_write(&new_path, &selected_text)?; + + let source_full = safe_vault_path(&vault_path, &source_path)?; + let content = fs::read_to_string(&source_full).map_err(|e| e.to_string())?; + let new_content = content.replacen(&selected_text, &format!("[[{}]]", new_note_name), 1); + atomic_write(&source_full, &new_content)?; + + Ok(format!("{}.md", new_note_name)) +} + +#[tauri::command] +pub fn merge_notes( + vault_path: String, + source_path: String, + target_path: String, +) -> Result<(), String> { + let vault = Path::new(&vault_path); + let source_full = safe_vault_path(&vault_path, &source_path)?; + let target_full = safe_vault_path(&vault_path, &target_path)?; + + let source_content = fs::read_to_string(&source_full).map_err(|e| e.to_string())?; + let target_content = fs::read_to_string(&target_full).map_err(|e| e.to_string())?; + + let source_name = source_path.replace(".md", ""); + let merged = format!("{}\n\n---\n\n## Merged from {}\n\n{}", target_content.trim_end(), source_name, source_content); + atomic_write(&target_full, &merged)?; + + fs::remove_file(&source_full).map_err(|e| e.to_string())?; + + let target_name = target_path.replace(".md", ""); + for entry in WalkDir::new(vault) + .into_iter() + .filter_map(|e| e.ok()) + .filter(|e| e.path().extension().map_or(false, |ext| ext == "md")) + { + let path = entry.path(); + if path == target_full { continue; } + if let Ok(content) = fs::read_to_string(path) { + let updated = content.replace( + &format!("[[{}]]", source_name), + &format!("[[{}]]", target_name), + ); + if updated != content { + atomic_write(path, &updated)?; + } + } + } + Ok(()) +} + +/* ── Frontmatter Query ──────────────────────────────────── */ + +#[tauri::command] +pub fn query_frontmatter(vault_path: String) -> Result, String> { + let vault = Path::new(&vault_path); + let mut rows: Vec = Vec::new(); + + for entry in WalkDir::new(vault) + .into_iter() + .filter_map(|e| e.ok()) + .filter(|e| e.path().extension().map_or(false, |ext| ext == "md")) + { + let path = entry.path(); + let rel = path.strip_prefix(vault).unwrap_or(path).to_string_lossy().to_string(); + if rel.starts_with(".") { continue; } + + if let Ok(content) = fs::read_to_string(path) { + if let Some(caps) = FRONTMATTER_RE.captures(&content) { + let yaml_str = &caps[1]; + let mut fields = serde_json::Map::new(); + for line in yaml_str.lines() { + if let Some(idx) = line.find(':') { + let key = line[..idx].trim().to_string(); + let val = line[idx+1..].trim().to_string(); + fields.insert(key, serde_json::Value::String(val)); + } + } + let title = rel.replace(".md", ""); + rows.push(FrontmatterRow { path: rel, title, fields }); + } + } + } + Ok(rows) +} + +/* ── Backlink Context ───────────────────────────────────── */ + +#[tauri::command] +pub fn get_backlink_context(vault_path: String, note_name: String) -> Result, String> { + let vault = Path::new(&vault_path); + let link_pattern = format!("[[{}]]", note_name); + let mut results: Vec = Vec::new(); + + for entry in WalkDir::new(vault) + .into_iter() + .filter_map(|e| e.ok()) + .filter(|e| e.path().extension().map_or(false, |ext| ext == "md")) + { + let path = entry.path(); + let rel = path.strip_prefix(vault).unwrap_or(path).to_string_lossy().to_string(); + if rel.starts_with(".") { continue; } + + if let Ok(content) = fs::read_to_string(path) { + if content.contains(&link_pattern) { + let paragraphs: Vec<&str> = content.split("\n\n").collect(); + for para in paragraphs { + if para.contains(&link_pattern) { + let excerpt = para.trim().chars().take(200).collect::(); + results.push(BacklinkContext { + source_path: rel.clone(), + source_name: rel.replace(".md", ""), + excerpt, + }); + break; + } + } + } + } + } + Ok(results) +} + +/* ── Dataview Query Engine ──────────────────────────────── */ + +#[tauri::command] +pub fn run_dataview_query(vault_path: String, query: String) -> Result { + let vault = Path::new(&vault_path); + + let query_upper = query.to_uppercase(); + let is_table = query_upper.starts_with("TABLE"); + if !is_table { return Err("Only TABLE queries supported".into()); } + + let after_table = query.trim_start_matches(|c: char| c.is_alphabetic() || c == ' ').trim(); + let parts: Vec<&str> = after_table.splitn(2, " FROM").collect(); + let field_str = parts[0].trim(); + let columns: Vec = std::iter::once("File".to_string()) + .chain(field_str.split(',').map(|s| s.trim().to_string())) + .collect(); + + let sort_field = if query_upper.contains("SORT") { + let sort_part = query.split("SORT").nth(1).unwrap_or("").trim(); + let sf: Vec<&str> = sort_part.split_whitespace().collect(); + sf.first().map(|s| s.to_string()) + } else { None }; + + let mut rows: Vec> = Vec::new(); + + for entry in WalkDir::new(vault) + .into_iter() + .filter_map(|e| e.ok()) + .filter(|e| e.path().extension().map_or(false, |ext| ext == "md")) + { + let path = entry.path(); + let rel = path.strip_prefix(vault).unwrap_or(path).to_string_lossy().to_string(); + if rel.starts_with(".") { continue; } + + if let Ok(content) = fs::read_to_string(path) { + let mut fields_map: std::collections::HashMap = std::collections::HashMap::new(); + fields_map.insert("title".into(), rel.replace(".md", "")); + + if let Some(caps) = FRONTMATTER_RE.captures(&content) { + for line in caps[1].lines() { + if let Some(idx) = line.find(':') { + let k = line[..idx].trim().to_lowercase(); + let v = line[idx+1..].trim().to_string(); + fields_map.insert(k, v); + } + } + } + + let mut row = vec![rel.replace(".md", "")]; + for col in &columns[1..] { + let val = fields_map.get(&col.to_lowercase()).cloned().unwrap_or_default(); + row.push(val); + } + rows.push(row); + } + } + + if let Some(ref sf) = sort_field { + let sf_lower = sf.to_lowercase(); + if let Some(idx) = columns.iter().position(|c| c.to_lowercase() == sf_lower) { + rows.sort_by(|a, b| a.get(idx).unwrap_or(&String::new()).cmp(b.get(idx).unwrap_or(&String::new()))); + } + } + + Ok(DataviewResult { columns, rows }) +} + +/* ── Link Suggestions ───────────────────────────────────── */ + +#[tauri::command] +pub fn suggest_links(vault_path: String, partial: String) -> Result, String> { + let vault = Path::new(&vault_path); + let partial_lower = partial.to_lowercase(); + let mut matches: Vec = Vec::new(); + + for entry in WalkDir::new(vault) + .into_iter() + .filter_map(|e| e.ok()) + .filter(|e| e.path().extension().map_or(false, |ext| ext == "md")) + { + let path = entry.path(); + let rel = path.strip_prefix(vault).unwrap_or(path).to_string_lossy().to_string(); + if rel.starts_with(".") { continue; } + let name = rel.replace(".md", ""); + if name.to_lowercase().contains(&partial_lower) { + matches.push(name); + } + } + matches.sort_by(|a, b| { + let a_starts = a.to_lowercase().starts_with(&partial_lower); + let b_starts = b.to_lowercase().starts_with(&partial_lower); + b_starts.cmp(&a_starts).then(a.len().cmp(&b.len())) + }); + matches.truncate(15); + Ok(matches) +} + +/* ── Notes By Date ──────────────────────────────────────── */ + +#[tauri::command] +pub fn list_notes_by_date(vault_path: String) -> Result, String> { + let vault = Path::new(&vault_path); + let mut notes: Vec = Vec::new(); + + for entry in WalkDir::new(vault) + .into_iter() + .filter_map(|e| e.ok()) + .filter(|e| e.path().extension().map_or(false, |ext| ext == "md")) + { + let path = entry.path(); + let rel = path.strip_prefix(vault).unwrap_or(path).to_string_lossy().to_string(); + if rel.starts_with(".") { continue; } + + let modified = path.metadata() + .and_then(|m| m.modified()) + .map(|t| t.duration_since(std::time::UNIX_EPOCH).unwrap_or_default().as_secs()) + .unwrap_or(0); + + let preview = fs::read_to_string(path) + .unwrap_or_default() + .lines() + .filter(|l| !l.starts_with("---") && !l.starts_with("#") && !l.trim().is_empty()) + .take(2) + .collect::>() + .join(" ") + .chars() + .take(120) + .collect(); + + notes.push(NoteByDate { + path: rel.clone(), + name: rel.replace(".md", ""), + modified, + preview, + }); + } + notes.sort_by(|a, b| b.modified.cmp(&a.modified)); + Ok(notes) +} + +/* ── Random Note ────────────────────────────────────────── */ + +#[tauri::command] +pub fn random_note(vault_path: String) -> Result { + let vault = Path::new(&vault_path); + let mut paths: Vec = Vec::new(); + + for entry in WalkDir::new(vault) + .into_iter() + .filter_map(|e| e.ok()) + .filter(|e| e.path().extension().map_or(false, |ext| ext == "md")) + { + let path = entry.path(); + let rel = path.strip_prefix(vault).unwrap_or(path).to_string_lossy().to_string(); + if rel.starts_with(".") { continue; } + paths.push(rel.replace(".md", "")); + } + + if paths.is_empty() { return Err("No notes found".into()); } + use rand::Rng; + let mut rng = rand::thread_rng(); + let idx = rng.gen_range(0..paths.len()); + Ok(paths[idx].clone()) +} + +/* ══════════════════════════════════════════════════════════ + v1.3 — Multi-Vault Federated Search + ══════════════════════════════════════════════════════════ */ + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct FederatedResult { + pub vault_name: String, + pub vault_path: String, + pub note_path: String, + pub note_name: String, + pub excerpt: String, + pub score: f32, +} + +#[tauri::command] +pub fn search_multi_vault(query: String, vault_paths: Vec) -> Result, String> { + let query_lower = query.to_lowercase(); + let mut results: Vec = Vec::new(); + + for vp in &vault_paths { + let vault = Path::new(vp); + if !vault.exists() { continue; } + let vault_name = vault.file_name() + .unwrap_or_default() + .to_string_lossy() + .to_string(); + + for entry in WalkDir::new(vault) + .into_iter() + .filter_map(|e| e.ok()) + .filter(|e| e.path().extension().map_or(false, |ext| ext == "md")) + { + let path = entry.path(); + let rel = path.strip_prefix(vault).unwrap_or(path).to_string_lossy().to_string(); + if rel.starts_with(".") { continue; } + + if let Ok(content) = fs::read_to_string(path) { + let content_lower = content.to_lowercase(); + if content_lower.contains(&query_lower) { + // Compute simple relevance score + let name = rel.replace(".md", ""); + let name_lower = name.to_lowercase(); + let title_match = if name_lower.contains(&query_lower) { 10.0 } else { 0.0 }; + let freq = content_lower.matches(&query_lower).count() as f32; + let score = title_match + freq.min(20.0); + + // Extract excerpt around first match + let idx = content_lower.find(&query_lower).unwrap_or(0); + let start = idx.saturating_sub(60); + let end = (idx + query.len() + 60).min(content.len()); + let excerpt = content[start..end].replace('\n', " ").trim().to_string(); + + results.push(FederatedResult { + vault_name: vault_name.clone(), + vault_path: vp.clone(), + note_path: rel, + note_name: name, + excerpt, + score, + }); + } + } + } + } + + results.sort_by(|a, b| b.score.partial_cmp(&a.score).unwrap_or(std::cmp::Ordering::Equal)); + results.truncate(50); + Ok(results) +} + +/* ══════════════════════════════════════════════════════════ + v1.3 — Note Types & Custom Templates + ══════════════════════════════════════════════════════════ */ + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct NoteType { + pub name: String, + pub icon: String, + pub fields: Vec, + pub template: String, +} + +#[tauri::command] +pub fn list_note_types(vault_path: String) -> Result, String> { + let types_dir = Path::new(&vault_path).join(".graph-notes").join("types"); + if !types_dir.exists() { + return Ok(vec![]); + } + + let mut types = Vec::new(); + for entry in fs::read_dir(&types_dir).map_err(|e| e.to_string())? { + let entry = entry.map_err(|e| e.to_string())?; + let name = entry.file_name().to_string_lossy().to_string(); + if !name.ends_with(".yaml") && !name.ends_with(".yml") { continue; } + + if let Ok(content) = fs::read_to_string(entry.path()) { + let type_name = name.replace(".yaml", "").replace(".yml", ""); + let mut icon = "📄".to_string(); + let mut fields = Vec::new(); + let mut template = String::new(); + let mut in_template = false; + + for line in content.lines() { + if line.starts_with("icon:") { + icon = line.replace("icon:", "").trim().to_string(); + } else if line.starts_with("fields:") { + // next lines are field entries + } else if line.starts_with(" - ") && !in_template { + fields.push(line.replace(" - ", "").trim().to_string()); + } else if line.starts_with("template:") { + in_template = true; + let rest = line.replace("template:", "").trim().to_string(); + if rest.starts_with('|') || rest.is_empty() { + // multiline + } else { + template = rest; + } + } else if in_template { + template.push_str(line.trim_start_matches(" ")); + template.push('\n'); + } + } + + types.push(NoteType { name: type_name, icon, fields, template: template.trim().to_string() }); + } + } + Ok(types) +} + +#[tauri::command] +pub fn create_from_type(vault_path: String, type_name: String, title: String) -> Result { + let types = list_note_types(vault_path.clone())?; + let note_type = types.iter().find(|t| t.name == type_name) + .ok_or_else(|| format!("Note type '{}' not found", type_name))?; + + let safe_title = title.replace(['/', '\\', ':', '*', '?', '"', '<', '>', '|'], "-"); + let note_path = format!("{}.md", safe_title); + let full_path = Path::new(&vault_path).join(¬e_path); + + if full_path.exists() { + return Err(format!("Note '{}' already exists", note_path)); + } + + // Build frontmatter + let today = chrono::Local::now().format("%Y-%m-%d").to_string(); + let mut content = String::from("---\n"); + content.push_str(&format!("type: {}\n", type_name)); + content.push_str(&format!("created: {}\n", today)); + for field in ¬e_type.fields { + content.push_str(&format!("{}: \n", field)); + } + content.push_str("---\n\n"); + + // Add template or default title + if !note_type.template.is_empty() { + let rendered = note_type.template + .replace("{{title}}", &title) + .replace("{{date}}", &today) + .replace("{{type}}", &type_name); + content.push_str(&rendered); + } else { + content.push_str(&format!("# {}\n\n", title)); + } + + crate::atomic_write(&full_path, &content)?; + Ok(note_path) +} + +/* ══════════════════════════════════════════════════════════ + v1.4 — Content Checksums (SHA-256) + ══════════════════════════════════════════════════════════ */ + +use sha2::{Sha256, Digest}; + +fn sha256_hex(content: &str) -> String { + let mut hasher = Sha256::new(); + hasher.update(content.as_bytes()); + format!("{:x}", hasher.finalize()) +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct ChecksumMismatch { + pub path: String, + pub expected: String, + pub actual: String, +} + +#[tauri::command] +pub fn compute_checksums(vault_path: String) -> Result { + let vault = Path::new(&vault_path); + let mut map: serde_json::Map = serde_json::Map::new(); + let mut count = 0u32; + + for entry in WalkDir::new(vault) + .into_iter() + .filter_map(|e| e.ok()) + .filter(|e| e.path().extension().map_or(false, |ext| ext == "md")) + { + let path = entry.path(); + let rel = path.strip_prefix(vault).unwrap_or(path).to_string_lossy().to_string(); + if rel.starts_with(".") { continue; } + + if let Ok(content) = fs::read_to_string(path) { + let hash = sha256_hex(&content); + map.insert(rel, serde_json::Value::String(hash)); + count += 1; + } + } + + let gn_dir = vault.join(".graph-notes"); + fs::create_dir_all(&gn_dir).map_err(|e| e.to_string())?; + let json = serde_json::to_string_pretty(&map).map_err(|e| e.to_string())?; + crate::atomic_write(&gn_dir.join("checksums.json"), &json)?; + + Ok(count) +} + +#[tauri::command] +pub fn verify_checksums(vault_path: String) -> Result, String> { + let vault = Path::new(&vault_path); + let checksums_path = vault.join(".graph-notes").join("checksums.json"); + + if !checksums_path.exists() { + return Err("No checksums file found. Run compute first.".into()); + } + + let data = fs::read_to_string(&checksums_path).map_err(|e| e.to_string())?; + let stored: serde_json::Map = + serde_json::from_str(&data).map_err(|e| e.to_string())?; + + let mut mismatches = Vec::new(); + + for (path, expected_val) in &stored { + let expected = expected_val.as_str().unwrap_or(""); + let full = vault.join(path); + + if !full.exists() { + mismatches.push(ChecksumMismatch { + path: path.clone(), + expected: expected.to_string(), + actual: "".to_string(), + }); + continue; + } + + if let Ok(content) = fs::read_to_string(&full) { + let actual = sha256_hex(&content); + if actual != expected { + mismatches.push(ChecksumMismatch { + path: path.clone(), + expected: expected.to_string(), + actual, + }); + } + } + } + + Ok(mismatches) +} + +/* ══════════════════════════════════════════════════════════ + v1.4 — Vault Integrity Scanner + ══════════════════════════════════════════════════════════ */ + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct IntegrityIssue { + pub severity: String, // "error" | "warning" | "info" + pub category: String, // "truncated" | "tmp_leftover" | "encoding" | "orphan_meta" + pub path: String, + pub description: String, +} + +#[tauri::command] +pub fn scan_integrity(vault_path: String) -> Result, String> { + let vault = Path::new(&vault_path); + let mut issues = Vec::new(); + + // 1. Scan for truncated (0-byte) .md files + for entry in WalkDir::new(vault) + .into_iter() + .filter_map(|e| e.ok()) + .filter(|e| e.path().extension().map_or(false, |ext| ext == "md")) + { + let path = entry.path(); + let rel = path.strip_prefix(vault).unwrap_or(path).to_string_lossy().to_string(); + if rel.starts_with(".") { continue; } + + if let Ok(meta) = fs::metadata(path) { + if meta.len() == 0 { + issues.push(IntegrityIssue { + severity: "error".into(), + category: "truncated".into(), + path: rel.clone(), + description: "Empty file (0 bytes) — may be corrupted".into(), + }); + } + } + + // Check for non-UTF-8 encoding + if let Err(_) = fs::read_to_string(path) { + issues.push(IntegrityIssue { + severity: "warning".into(), + category: "encoding".into(), + path: rel.clone(), + description: "File contains non-UTF-8 bytes".into(), + }); + } + } + + // 2. Scan for leftover ~tmp files (incomplete atomic writes) + for entry in WalkDir::new(vault) + .into_iter() + .filter_map(|e| e.ok()) + .filter(|e| e.path().is_file()) + { + let name = entry.file_name().to_string_lossy().to_string(); + if name.starts_with("~tmp") || name.ends_with("~tmp") { + let rel = entry.path().strip_prefix(vault).unwrap_or(entry.path()).to_string_lossy().to_string(); + issues.push(IntegrityIssue { + severity: "warning".into(), + category: "tmp_leftover".into(), + path: rel, + description: "Leftover temporary file from incomplete write".into(), + }); + } + } + + // 3. Check .graph-notes directory for orphaned entries + let gn_dir = vault.join(".graph-notes"); + if gn_dir.exists() { + for entry in fs::read_dir(&gn_dir).into_iter().flatten().flatten() { + let name = entry.file_name().to_string_lossy().to_string(); + if name.ends_with(".json") && name != "checksums.json" && name != "wal.log" + && name != "audit.log" && name != "favorites.json" + { + // Check if it references notes that no longer exist + if let Ok(meta) = entry.metadata() { + if meta.len() == 0 { + issues.push(IntegrityIssue { + severity: "info".into(), + category: "orphan_meta".into(), + path: format!(".graph-notes/{}", name), + description: "Empty metadata file".into(), + }); + } + } + } + } + } + + issues.sort_by(|a, b| { + let sev = |s: &str| match s { "error" => 0, "warning" => 1, _ => 2 }; + sev(&a.severity).cmp(&sev(&b.severity)) + }); + + Ok(issues) +} + +/* ══════════════════════════════════════════════════════════ + v1.4 — Conflict Detection + ══════════════════════════════════════════════════════════ */ + +#[tauri::command] +pub fn check_conflict(vault_path: String, relative_path: String, expected_mtime: f64) -> Result { + let full = safe_vault_path(&vault_path, &relative_path)?; + + if !full.exists() { + return Ok("deleted".into()); + } + + let meta = fs::metadata(&full).map_err(|e| e.to_string())?; + let mtime = meta.modified().map_err(|e| e.to_string())?; + let secs = mtime.duration_since(std::time::UNIX_EPOCH) + .map_err(|e| e.to_string())? + .as_secs_f64(); + + if (secs - expected_mtime).abs() > 0.5 { + Ok("modified".into()) + } else { + Ok("clean".into()) + } +} + +/* ══════════════════════════════════════════════════════════ + v1.4 — Frontmatter Schema Validation + ══════════════════════════════════════════════════════════ */ + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct FrontmatterWarning { + pub line: usize, + pub message: String, +} + +#[tauri::command] +pub fn validate_frontmatter(content: String) -> Result, String> { + let mut warnings = Vec::new(); + + if !content.starts_with("---") { + return Ok(warnings); // No frontmatter, no warnings + } + + let rest = &content[3..]; + let fm_end = rest.find("\n---"); + if fm_end.is_none() { + warnings.push(FrontmatterWarning { + line: 1, + message: "Unclosed frontmatter: missing closing '---'".into(), + }); + return Ok(warnings); + } + + let fm = &rest[..fm_end.unwrap()]; + let mut seen_keys = std::collections::HashSet::new(); + + for (i, line) in fm.lines().enumerate() { + let trimmed = line.trim(); + if trimmed.is_empty() || trimmed.starts_with('#') { continue; } + + // Check for key: value pattern + if let Some(colon_pos) = trimmed.find(':') { + let key = trimmed[..colon_pos].trim(); + + // Check for duplicate keys + if !seen_keys.insert(key.to_lowercase()) { + warnings.push(FrontmatterWarning { + line: i + 2, // +2 for 1-index and first --- + message: format!("Duplicate key: '{}'", key), + }); + } + + // Validate date-like fields + let value = trimmed[colon_pos + 1..].trim(); + if (key == "date" || key == "created" || key == "modified") && !value.is_empty() { + // Check for YYYY-MM-DD format + let date_re = Regex::new(r"^\d{4}-\d{2}-\d{2}").unwrap(); + if !date_re.is_match(value) { + warnings.push(FrontmatterWarning { + line: i + 2, + message: format!("'{}' value '{}' doesn't match YYYY-MM-DD format", key, value), + }); + } + } + + // Warn on empty required-looking keys + if value.is_empty() && (key == "title" || key == "date") { + warnings.push(FrontmatterWarning { + line: i + 2, + message: format!("Key '{}' has empty value", key), + }); + } + } else if !trimmed.starts_with("- ") && !trimmed.starts_with(" ") { + warnings.push(FrontmatterWarning { + line: i + 2, + message: format!("Malformed line: '{}'", trimmed), + }); + } + } + + Ok(warnings) +} + +/* ══════════════════════════════════════════════════════════ + v1.4 — Orphan Attachment Cleanup + ══════════════════════════════════════════════════════════ */ + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct OrphanAttachment { + pub path: String, + pub size: u64, +} + +#[tauri::command] +pub fn find_orphan_attachments(vault_path: String) -> Result, String> { + let vault = Path::new(&vault_path); + let attach_dir = vault.join("_attachments"); + + if !attach_dir.exists() { + return Ok(vec![]); + } + + // 1. Collect all attachment filenames + let mut attachments: Vec<(String, u64)> = Vec::new(); + for entry in WalkDir::new(&attach_dir) + .into_iter() + .filter_map(|e| e.ok()) + .filter(|e| e.path().is_file()) + { + let name = entry.file_name().to_string_lossy().to_string(); + let size = entry.metadata().map(|m| m.len()).unwrap_or(0); + attachments.push((name, size)); + } + + if attachments.is_empty() { + return Ok(vec![]); + } + + // 2. Scan all notes for references to each attachment + let mut all_content = String::new(); + for entry in WalkDir::new(vault) + .into_iter() + .filter_map(|e| e.ok()) + .filter(|e| e.path().extension().map_or(false, |ext| ext == "md")) + { + let path = entry.path(); + let rel = path.strip_prefix(vault).unwrap_or(path).to_string_lossy().to_string(); + if rel.starts_with(".") { continue; } + + if let Ok(content) = fs::read_to_string(path) { + all_content.push_str(&content); + all_content.push('\n'); + } + } + + // 3. Find attachments not referenced + let orphans: Vec = attachments.into_iter() + .filter(|(name, _)| !all_content.contains(name)) + .map(|(name, size)| OrphanAttachment { + path: format!("_attachments/{}", name), + size, + }) + .collect(); + + Ok(orphans) +} diff --git a/src-tauri/src/srs.rs b/src-tauri/src/srs.rs new file mode 100644 index 0000000..0347b69 --- /dev/null +++ b/src-tauri/src/srs.rs @@ -0,0 +1,117 @@ +use std::fs; +use std::path::Path; + +use chrono::Local; +use serde::{Deserialize, Serialize}; +use walkdir::WalkDir; + +use crate::{atomic_write, FLASHCARD_RE}; + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct Flashcard { + pub question: String, + pub answer: String, + pub source_path: String, + pub line_number: usize, + pub due: Option, + pub interval: u32, + pub ease: f32, +} + +#[tauri::command] +pub fn list_flashcards(vault_path: String) -> Result, String> { + let vault = Path::new(&vault_path); + let mut cards: Vec = Vec::new(); + + // Load schedule data + let srs_path = vault.join(".graph-notes").join("srs.json"); + let srs: serde_json::Map = if srs_path.exists() { + let c = fs::read_to_string(&srs_path).unwrap_or_default(); + serde_json::from_str(&c).unwrap_or_default() + } else { + serde_json::Map::new() + }; + + for entry in WalkDir::new(vault) + .into_iter() + .filter_map(|e| e.ok()) + .filter(|e| e.path().extension().map_or(false, |ext| ext == "md")) + { + let path = entry.path(); + let rel = path.strip_prefix(vault).unwrap_or(path).to_string_lossy().to_string(); + if rel.starts_with(".") || rel.starts_with("_") { continue; } + + if let Ok(content) = fs::read_to_string(path) { + for (i, line) in content.lines().enumerate() { + for caps in FLASHCARD_RE.captures_iter(line) { + let q = caps[1].trim().to_string(); + let a = caps[2].trim().to_string(); + let card_id = format!("{}:{}", rel, i + 1); + + let (due, interval, ease) = if let Some(sched) = srs.get(&card_id) { + ( + sched.get("due").and_then(|v| v.as_str()).map(|s| s.to_string()), + sched.get("interval").and_then(|v| v.as_u64()).unwrap_or(1) as u32, + sched.get("ease").and_then(|v| v.as_f64()).unwrap_or(2.5) as f32, + ) + } else { + (None, 1, 2.5) + }; + + cards.push(Flashcard { + question: q, + answer: a, + source_path: rel.clone(), + line_number: i + 1, + due, + interval, + ease, + }); + } + } + } + } + Ok(cards) +} + +#[tauri::command] +pub fn update_card_schedule( + vault_path: String, + card_id: String, + quality: u32, +) -> Result<(), String> { + let srs_path = Path::new(&vault_path).join(".graph-notes").join("srs.json"); + fs::create_dir_all(Path::new(&vault_path).join(".graph-notes")).map_err(|e| e.to_string())?; + + let mut srs: serde_json::Map = if srs_path.exists() { + let c = fs::read_to_string(&srs_path).unwrap_or_default(); + serde_json::from_str(&c).unwrap_or_default() + } else { + serde_json::Map::new() + }; + + let entry = srs.entry(card_id).or_insert_with(|| serde_json::json!({"interval": 1, "ease": 2.5})); + let obj = entry.as_object_mut().ok_or("Invalid SRS entry")?; + + let mut interval = obj.get("interval").and_then(|v| v.as_u64()).unwrap_or(1) as f64; + let mut ease = obj.get("ease").and_then(|v| v.as_f64()).unwrap_or(2.5); + + // SM-2 algorithm + if quality >= 3 { + if interval <= 1.0 { interval = 1.0; } + else if interval <= 6.0 { interval = 6.0; } + else { interval *= ease; } + ease = ease + (0.1 - (5.0 - quality as f64) * (0.08 + (5.0 - quality as f64) * 0.02)); + if ease < 1.3 { ease = 1.3; } + } else { + interval = 1.0; + } + + let due = Local::now() + chrono::Duration::days(interval as i64); + obj.insert("interval".into(), serde_json::json!(interval as u32)); + obj.insert("ease".into(), serde_json::json!(ease)); + obj.insert("due".into(), serde_json::json!(due.format("%Y-%m-%d").to_string())); + + let json = serde_json::to_string_pretty(&srs).map_err(|e| e.to_string())?; + atomic_write(&srs_path, &json) +} diff --git a/src-tauri/src/state.rs b/src-tauri/src/state.rs new file mode 100644 index 0000000..94c2e7d --- /dev/null +++ b/src-tauri/src/state.rs @@ -0,0 +1,758 @@ +use std::fs; +use std::path::Path; + +use serde::{Deserialize, Serialize}; + +use crate::{atomic_write, safe_name, safe_vault_path, dirs_config_dir, dirs_config_path}; + +/* ── Snapshots (Version History) ────────────────────────── */ + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct SnapshotInfo { + pub timestamp: String, + pub filename: String, + pub size: u64, +} + +#[tauri::command] +pub fn save_snapshot(vault_path: String, note_path: String) -> Result { + let full = safe_vault_path(&vault_path, ¬e_path)?; + let content = fs::read_to_string(&full).map_err(|e| e.to_string())?; + + let sanitized_name = note_path.replace('/', "__").replace(".md", ""); + let history_dir = Path::new(&vault_path).join(".graph-notes").join("history").join(&sanitized_name); + fs::create_dir_all(&history_dir).map_err(|e| e.to_string())?; + + let ts = chrono::Local::now().format("%Y%m%d_%H%M%S").to_string(); + let snap_name = format!("{}.md", ts); + // Snapshots are write-once, never overwritten — direct write is safe + fs::write(history_dir.join(&snap_name), &content).map_err(|e| e.to_string())?; + Ok(snap_name) +} + +#[tauri::command] +pub fn list_snapshots(vault_path: String, note_path: String) -> Result, String> { + let sanitized_name = note_path.replace('/', "__").replace(".md", ""); + let history_dir = Path::new(&vault_path).join(".graph-notes").join("history").join(&sanitized_name); + + if !history_dir.exists() { + return Ok(vec![]); + } + + let mut snaps: Vec = Vec::new(); + for entry in fs::read_dir(&history_dir).map_err(|e| e.to_string())? { + let entry = entry.map_err(|e| e.to_string())?; + let meta = entry.metadata().map_err(|e| e.to_string())?; + if meta.is_file() { + let name = entry.file_name().to_string_lossy().to_string(); + let ts = name.replace(".md", ""); + snaps.push(SnapshotInfo { + timestamp: ts, + filename: name, + size: meta.len(), + }); + } + } + snaps.sort_by(|a, b| b.timestamp.cmp(&a.timestamp)); + Ok(snaps) +} + +#[tauri::command] +pub fn read_snapshot(vault_path: String, note_path: String, snapshot_name: String) -> Result { + let sanitized_name = note_path.replace('/', "__").replace(".md", ""); + let snap_path = Path::new(&vault_path) + .join(".graph-notes") + .join("history") + .join(&sanitized_name) + .join(&snapshot_name); + fs::read_to_string(&snap_path).map_err(|e| e.to_string()) +} + +/* ── Search & Replace ───────────────────────────────────── */ + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct ReplaceResult { + pub path: String, + pub count: usize, +} + +#[tauri::command] +pub fn search_replace_vault( + vault_path: String, + search: String, + replace: String, + dry_run: bool, +) -> Result, String> { + use walkdir::WalkDir; + let vault = Path::new(&vault_path); + let mut results: Vec = Vec::new(); + + for entry in WalkDir::new(vault) + .into_iter() + .filter_map(|e| e.ok()) + .filter(|e| e.path().extension().map_or(false, |ext| ext == "md")) + { + let path = entry.path(); + let rel = path.strip_prefix(vault).unwrap_or(path).to_string_lossy().to_string(); + if rel.starts_with(".") || rel.starts_with("_") { continue; } + + if let Ok(content) = fs::read_to_string(path) { + let count = content.matches(&search).count(); + if count > 0 { + results.push(ReplaceResult { path: rel, count }); + if !dry_run { + let updated = content.replace(&search, &replace); + crate::atomic_write(path, &updated)?; + } + } + } + } + Ok(results) +} + +/* ── Writing Goals ──────────────────────────────────────── */ + +#[tauri::command] +pub fn get_writing_goal(vault_path: String, note_path: String) -> Result { + let goals_path = Path::new(&vault_path).join(".graph-notes").join("goals.json"); + if !goals_path.exists() { + return Ok(0); + } + let content = fs::read_to_string(&goals_path).map_err(|e| e.to_string())?; + let goals: serde_json::Map = + serde_json::from_str(&content).unwrap_or_default(); + Ok(goals + .get(¬e_path) + .and_then(|v| v.as_u64()) + .unwrap_or(0) as u32) +} + +#[tauri::command] +pub fn set_writing_goal(vault_path: String, note_path: String, goal: u32) -> Result<(), String> { + let dir = Path::new(&vault_path).join(".graph-notes"); + fs::create_dir_all(&dir).map_err(|e| e.to_string())?; + let goals_path = dir.join("goals.json"); + + let mut goals: serde_json::Map = if goals_path.exists() { + let c = fs::read_to_string(&goals_path).unwrap_or_default(); + serde_json::from_str(&c).unwrap_or_default() + } else { + serde_json::Map::new() + }; + + goals.insert(note_path, serde_json::json!(goal)); + let json = serde_json::to_string_pretty(&goals).map_err(|e| e.to_string())?; + crate::atomic_write(&goals_path, &json) +} + +/* ── Fold State ─────────────────────────────────────────── */ + +#[tauri::command] +pub fn save_fold_state(vault_path: String, note_path: String, folds: Vec) -> Result<(), String> { + let dir = Path::new(&vault_path).join(".graph-notes"); + fs::create_dir_all(&dir).map_err(|e| e.to_string())?; + let folds_path = dir.join("folds.json"); + + let mut data: serde_json::Map = if folds_path.exists() { + let c = fs::read_to_string(&folds_path).unwrap_or_default(); + serde_json::from_str(&c).unwrap_or_default() + } else { + serde_json::Map::new() + }; + + data.insert(note_path, serde_json::json!(folds)); + let json = serde_json::to_string_pretty(&data).map_err(|e| e.to_string())?; + crate::atomic_write(&folds_path, &json) +} + +#[tauri::command] +pub fn load_fold_state(vault_path: String, note_path: String) -> Result, String> { + let folds_path = Path::new(&vault_path).join(".graph-notes").join("folds.json"); + if !folds_path.exists() { return Ok(vec![]); } + let c = fs::read_to_string(&folds_path).map_err(|e| e.to_string())?; + let data: serde_json::Map = serde_json::from_str(&c).unwrap_or_default(); + let folds = data.get(¬e_path) + .and_then(|v| v.as_array()) + .map(|arr| arr.iter().filter_map(|v| v.as_u64().map(|n| n as usize)).collect()) + .unwrap_or_default(); + Ok(folds) +} + +/* ── Custom CSS ─────────────────────────────────────────── */ + +#[tauri::command] +pub fn get_custom_css() -> Result { + let config_dir = dirs_config_dir(); + let css_path = config_dir.join("custom.css"); + if css_path.exists() { + fs::read_to_string(&css_path).map_err(|e| e.to_string()) + } else { + Ok(String::new()) + } +} + +#[tauri::command] +pub fn set_custom_css(css: String) -> Result<(), String> { + crate::atomic_write(&dirs_config_dir().join("custom.css"), &css) +} + +/* ── Workspace Layouts ──────────────────────────────────── */ + +#[tauri::command] +pub fn save_workspace(vault_path: String, name: String, state: String) -> Result<(), String> { + let sanitized = safe_name(&name)?; + let dir = Path::new(&vault_path).join(".graph-notes").join("workspaces"); + fs::create_dir_all(&dir).map_err(|e| e.to_string())?; + crate::atomic_write(&dir.join(format!("{}.json", sanitized)), &state) +} + +#[tauri::command] +pub fn load_workspace(vault_path: String, name: String) -> Result { + let sanitized = safe_name(&name)?; + let path = Path::new(&vault_path).join(".graph-notes").join("workspaces").join(format!("{}.json", sanitized)); + fs::read_to_string(&path).map_err(|e| e.to_string()) +} + +#[tauri::command] +pub fn list_workspaces(vault_path: String) -> Result, String> { + let dir = Path::new(&vault_path).join(".graph-notes").join("workspaces"); + if !dir.exists() { return Ok(vec![]); } + let mut names: Vec = Vec::new(); + for entry in fs::read_dir(&dir).map_err(|e| e.to_string())? { + let entry = entry.map_err(|e| e.to_string())?; + let name = entry.file_name().to_string_lossy().replace(".json", ""); + names.push(name); + } + names.sort(); + Ok(names) +} + +/* ── Tab Persistence ────────────────────────────────────── */ + +#[tauri::command] +pub fn save_tabs(vault_path: String, tabs: String) -> Result<(), String> { + let dir = Path::new(&vault_path).join(".graph-notes"); + fs::create_dir_all(&dir).map_err(|e| e.to_string())?; + crate::atomic_write(&dir.join("tabs.json"), &tabs) +} + +#[tauri::command] +pub fn load_tabs(vault_path: String) -> Result { + let path = Path::new(&vault_path).join(".graph-notes").join("tabs.json"); + if !path.exists() { return Ok("[]".to_string()); } + fs::read_to_string(&path).map_err(|e| e.to_string()) +} + +/* ── Canvas / Whiteboard Persistence ────────────────────── */ + +#[tauri::command] +pub fn save_canvas(vault_path: String, name: String, data: String) -> Result<(), String> { + let sanitized = safe_name(&name)?; + let dir = Path::new(&vault_path).join(".graph-notes").join("canvases"); + fs::create_dir_all(&dir).map_err(|e| e.to_string())?; + crate::atomic_write(&dir.join(format!("{}.json", sanitized)), &data) +} + +#[tauri::command] +pub fn load_canvas(vault_path: String, name: String) -> Result { + let sanitized = safe_name(&name)?; + let path = Path::new(&vault_path).join(".graph-notes").join("canvases").join(format!("{}.json", sanitized)); + if !path.exists() { return Ok("{}".to_string()); } + fs::read_to_string(&path).map_err(|e| e.to_string()) +} + +#[tauri::command] +pub fn list_canvases(vault_path: String) -> Result, String> { + let dir = Path::new(&vault_path).join(".graph-notes").join("canvases"); + if !dir.exists() { return Ok(vec![]); } + let mut names: Vec = Vec::new(); + for entry in fs::read_dir(&dir).map_err(|e| e.to_string())? { + let entry = entry.map_err(|e| e.to_string())?; + let name = entry.file_name().to_string_lossy().replace(".json", ""); + names.push(name); + } + names.sort(); + Ok(names) +} + +/* ── Shortcuts ──────────────────────────────────────────── */ + +#[tauri::command] +pub fn save_shortcuts(vault_path: String, shortcuts_json: String) -> Result<(), String> { + let dir = Path::new(&vault_path).join(".graph-notes"); + fs::create_dir_all(&dir).map_err(|e| e.to_string())?; + crate::atomic_write(&dir.join("shortcuts.json"), &shortcuts_json) +} + +#[tauri::command] +pub fn load_shortcuts(vault_path: String) -> Result { + let path = Path::new(&vault_path).join(".graph-notes/shortcuts.json"); + if path.exists() { + fs::read_to_string(path).map_err(|e| e.to_string()) + } else { + Ok("{}".to_string()) + } +} + +/* ── Pinned Notes ───────────────────────────────────────── */ + +#[tauri::command] +pub fn get_pinned(vault_path: String) -> Result, String> { + let path = Path::new(&vault_path).join(".graph-notes/pinned.json"); + if path.exists() { + let content = fs::read_to_string(path).map_err(|e| e.to_string())?; + serde_json::from_str(&content).map_err(|e| e.to_string()) + } else { + Ok(Vec::new()) + } +} + +#[tauri::command] +pub fn set_pinned(vault_path: String, pinned: Vec) -> Result<(), String> { + let dir = Path::new(&vault_path).join(".graph-notes"); + fs::create_dir_all(&dir).map_err(|e| e.to_string())?; + let json = serde_json::to_string_pretty(&pinned).map_err(|e| e.to_string())?; + crate::atomic_write(&dir.join("pinned.json"), &json) +} + +/* ── Theme ──────────────────────────────────────────────── */ + +#[tauri::command] +pub fn get_theme() -> Result { + let path = dirs_config_dir().join("theme"); + if path.exists() { + fs::read_to_string(&path).map_err(|e| e.to_string()) + } else { + Ok("dark-purple".to_string()) + } +} + +#[tauri::command] +pub fn set_theme(theme: String) -> Result<(), String> { + crate::atomic_write(&dirs_config_dir().join("theme"), &theme) +} + +/* ── Favorites ──────────────────────────────────────────── */ + +#[tauri::command] +pub fn get_favorites(vault_path: String) -> Result, String> { + let path = Path::new(&vault_path).join(".graph-notes/favorites.json"); + if path.exists() { + let content = fs::read_to_string(path).map_err(|e| e.to_string())?; + serde_json::from_str(&content).map_err(|e| e.to_string()) + } else { + Ok(Vec::new()) + } +} + +#[tauri::command] +pub fn set_favorites(vault_path: String, favorites: Vec) -> Result<(), String> { + let dir = Path::new(&vault_path).join(".graph-notes"); + fs::create_dir_all(&dir).map_err(|e| e.to_string())?; + let json = serde_json::to_string_pretty(&favorites).map_err(|e| e.to_string())?; + crate::atomic_write(&dir.join("favorites.json"), &json) +} + +/* ══════════════════════════════════════════════════════════ + v1.3 — Reading List & Progress Tracker + ══════════════════════════════════════════════════════════ */ + +#[tauri::command] +pub fn get_reading_list(vault_path: String) -> Result { + let rl_path = Path::new(&vault_path).join(".graph-notes").join("reading-list.json"); + if rl_path.exists() { + fs::read_to_string(&rl_path).map_err(|e| e.to_string()) + } else { + Ok("[]".into()) + } +} + +#[tauri::command] +pub fn set_reading_list(vault_path: String, data: String) -> Result<(), String> { + let gn_dir = Path::new(&vault_path).join(".graph-notes"); + fs::create_dir_all(&gn_dir).map_err(|e| e.to_string())?; + let rl_path = gn_dir.join("reading-list.json"); + atomic_write(&rl_path, &data) +} + +/* ══════════════════════════════════════════════════════════ + v1.3 — Plugin / Hook System + ══════════════════════════════════════════════════════════ */ + +#[derive(Debug, serde::Serialize, serde::Deserialize, Clone)] +pub struct PluginInfo { + pub name: String, + pub filename: String, + pub enabled: bool, + pub hooks: Vec, +} + +#[tauri::command] +pub fn list_plugins(vault_path: String) -> Result, String> { + let plugins_dir = Path::new(&vault_path).join(".graph-notes").join("plugins"); + if !plugins_dir.exists() { + return Ok(vec![]); + } + + let mut plugins = Vec::new(); + // Check for manifest + let manifest_path = plugins_dir.join("manifest.json"); + let manifest: serde_json::Map = if manifest_path.exists() { + let data = fs::read_to_string(&manifest_path).unwrap_or_default(); + serde_json::from_str(&data).unwrap_or_default() + } else { + serde_json::Map::new() + }; + + for entry in fs::read_dir(&plugins_dir).map_err(|e| e.to_string())? { + let entry = entry.map_err(|e| e.to_string())?; + let filename = entry.file_name().to_string_lossy().to_string(); + if !filename.ends_with(".js") { continue; } + + let name = filename.replace(".js", ""); + let enabled = manifest.get(&name) + .and_then(|v| v.get("enabled")) + .and_then(|v| v.as_bool()) + .unwrap_or(true); + + // Scan for hook registrations + let content = fs::read_to_string(entry.path()).unwrap_or_default(); + let mut hooks = Vec::new(); + for hook in &["on_save", "on_create", "on_delete", "on_daily"] { + if content.contains(hook) { + hooks.push(hook.to_string()); + } + } + + plugins.push(PluginInfo { name, filename, enabled, hooks }); + } + + Ok(plugins) +} + +#[tauri::command] +pub fn toggle_plugin(vault_path: String, name: String, enabled: bool) -> Result<(), String> { + let plugins_dir = Path::new(&vault_path).join(".graph-notes").join("plugins"); + fs::create_dir_all(&plugins_dir).map_err(|e| e.to_string())?; + + let manifest_path = plugins_dir.join("manifest.json"); + let mut manifest: serde_json::Map = if manifest_path.exists() { + let data = fs::read_to_string(&manifest_path).unwrap_or_default(); + serde_json::from_str(&data).unwrap_or_default() + } else { + serde_json::Map::new() + }; + + let entry = manifest.entry(name).or_insert_with(|| serde_json::json!({})); + if let Some(obj) = entry.as_object_mut() { + obj.insert("enabled".into(), serde_json::Value::Bool(enabled)); + } + + let json = serde_json::to_string_pretty(&manifest).map_err(|e| e.to_string())?; + atomic_write(&manifest_path, &json) +} + +/* ══════════════════════════════════════════════════════════ + v1.3 — Vault Registry (for Federated Search) + ══════════════════════════════════════════════════════════ */ + +#[tauri::command] +pub fn get_vault_registry() -> Result, String> { + let config_path = dirs_config_dir().join("vaults.json"); + if config_path.exists() { + let data = fs::read_to_string(&config_path).map_err(|e| e.to_string())?; + let vaults: Vec = serde_json::from_str(&data).unwrap_or_default(); + Ok(vaults) + } else { + Ok(vec![]) + } +} + +#[tauri::command] +pub fn set_vault_registry(vaults: Vec) -> Result<(), String> { + let config_dir = dirs_config_dir(); + fs::create_dir_all(&config_dir).map_err(|e| e.to_string())?; + let config_path = config_dir.join("vaults.json"); + let json = serde_json::to_string_pretty(&vaults).map_err(|e| e.to_string())?; + atomic_write(&config_path, &json) +} + +/* ══════════════════════════════════════════════════════════ + v1.4 — Automatic Backup Snapshots + ══════════════════════════════════════════════════════════ */ + +#[derive(Debug, serde::Serialize, serde::Deserialize, Clone)] +pub struct BackupEntry { + pub name: String, + pub size: u64, + pub created: String, +} + +#[tauri::command] +pub fn create_backup(vault_path: String) -> Result { + let vault = std::path::Path::new(&vault_path); + let backup_dir = vault.join(".graph-notes").join("backups"); + fs::create_dir_all(&backup_dir).map_err(|e| e.to_string())?; + + let ts = chrono::Local::now().format("%Y-%m-%d_%H%M%S").to_string(); + let name = format!("backup_{}.zip", ts); + let zip_path = backup_dir.join(&name); + + let file = fs::File::create(&zip_path).map_err(|e| e.to_string())?; + let mut zip = zip::ZipWriter::new(file); + let options = zip::write::SimpleFileOptions::default() + .compression_method(zip::CompressionMethod::Deflated); + + for entry in walkdir::WalkDir::new(vault) + .into_iter() + .filter_map(|e| e.ok()) + .filter(|e| e.path().is_file()) + { + let path = entry.path(); + let rel = path.strip_prefix(vault).unwrap_or(path).to_string_lossy().to_string(); + // Skip backups dir and tmp files + if rel.starts_with(".graph-notes/backups") { continue; } + if rel.contains("~tmp") { continue; } + + if let Ok(data) = fs::read(path) { + zip.start_file(&rel, options).map_err(|e| e.to_string())?; + use std::io::Write; + zip.write_all(&data).map_err(|e| e.to_string())?; + } + } + + zip.finish().map_err(|e| e.to_string())?; + + // Auto-prune: keep only last 10 backups + let mut backups = list_backup_entries(&backup_dir)?; + backups.sort_by(|a, b| b.name.cmp(&a.name)); + for old in backups.iter().skip(10) { + let _ = fs::remove_file(backup_dir.join(&old.name)); + } + + Ok(name) +} + +fn list_backup_entries(backup_dir: &std::path::Path) -> Result, String> { + let mut entries = Vec::new(); + if !backup_dir.exists() { return Ok(entries); } + + for entry in fs::read_dir(backup_dir).map_err(|e| e.to_string())? { + let entry = entry.map_err(|e| e.to_string())?; + let name = entry.file_name().to_string_lossy().to_string(); + if !name.ends_with(".zip") { continue; } + + let size = entry.metadata().map(|m| m.len()).unwrap_or(0); + // Extract timestamp from filename: backup_YYYY-MM-DD_HHMMSS.zip + let created = name.replace("backup_", "").replace(".zip", "") + .replace('_', " "); + + entries.push(BackupEntry { name, size, created }); + } + Ok(entries) +} + +#[tauri::command] +pub fn list_backups(vault_path: String) -> Result, String> { + let backup_dir = std::path::Path::new(&vault_path).join(".graph-notes").join("backups"); + let mut entries = list_backup_entries(&backup_dir)?; + entries.sort_by(|a, b| b.name.cmp(&a.name)); + Ok(entries) +} + +#[tauri::command] +pub fn restore_backup(vault_path: String, backup_name: String) -> Result { + let vault = std::path::Path::new(&vault_path); + let zip_path = vault.join(".graph-notes").join("backups").join(&backup_name); + + if !zip_path.exists() { + return Err("Backup not found".into()); + } + + let file = fs::File::open(&zip_path).map_err(|e| e.to_string())?; + let mut archive = zip::ZipArchive::new(file).map_err(|e| e.to_string())?; + let mut count = 0u32; + + for i in 0..archive.len() { + let mut entry = archive.by_index(i).map_err(|e| e.to_string())?; + if entry.is_dir() { continue; } + + let name = entry.name().to_string(); + // Skip restoring backup files themselves + if name.starts_with(".graph-notes/backups") { continue; } + + let dest = vault.join(&name); + if let Some(parent) = dest.parent() { + fs::create_dir_all(parent).map_err(|e| e.to_string())?; + } + + let mut content = Vec::new(); + use std::io::Read; + entry.read_to_end(&mut content).map_err(|e| e.to_string())?; + + crate::atomic_write_bytes(&dest, &content)?; + count += 1; + } + + Ok(count) +} + +/* ══════════════════════════════════════════════════════════ + v1.4 — Write-Ahead Log (WAL) + ══════════════════════════════════════════════════════════ */ + +#[derive(Debug, serde::Serialize, serde::Deserialize, Clone)] +pub struct WalEntry { + pub timestamp: String, + pub operation: String, // "write" | "delete" | "rename" + pub path: String, + pub content_hash: String, + pub status: String, // "pending" | "complete" +} + +pub fn wal_append_entry(vault_path: &str, operation: &str, path: &str, content_hash: &str) { + let wal_path = std::path::Path::new(vault_path).join(".graph-notes").join("wal.log"); + let _ = fs::create_dir_all(std::path::Path::new(vault_path).join(".graph-notes")); + + let ts = chrono::Local::now().format("%Y-%m-%dT%H:%M:%S").to_string(); + let line = format!("{}|{}|{}|{}|pending\n", ts, operation, path, content_hash); + + use std::io::Write; + if let Ok(mut f) = fs::OpenOptions::new().create(true).append(true).open(&wal_path) { + let _ = f.write_all(line.as_bytes()); + } +} + +pub fn wal_mark_complete(vault_path: &str, path: &str) { + let wal_path = std::path::Path::new(vault_path).join(".graph-notes").join("wal.log"); + if !wal_path.exists() { return; } + + if let Ok(content) = fs::read_to_string(&wal_path) { + let updated: String = content.lines().map(|line| { + if line.contains(path) && line.ends_with("|pending") { + format!("{}", line.replace("|pending", "|complete")) + } else { + line.to_string() + } + }).collect::>().join("\n"); + let _ = fs::write(&wal_path, format!("{}\n", updated.trim())); + } +} + +#[tauri::command] +pub fn wal_status(vault_path: String) -> Result, String> { + let wal_path = std::path::Path::new(&vault_path).join(".graph-notes").join("wal.log"); + if !wal_path.exists() { + return Ok(vec![]); + } + + let content = fs::read_to_string(&wal_path).map_err(|e| e.to_string())?; + let entries: Vec = content.lines() + .filter(|l| !l.trim().is_empty()) + .filter_map(|line| { + let parts: Vec<&str> = line.splitn(5, '|').collect(); + if parts.len() == 5 { + Some(WalEntry { + timestamp: parts[0].to_string(), + operation: parts[1].to_string(), + path: parts[2].to_string(), + content_hash: parts[3].to_string(), + status: parts[4].to_string(), + }) + } else { + None + } + }) + .collect(); + + Ok(entries) +} + +#[tauri::command] +pub fn wal_recover(vault_path: String) -> Result { + let entries = wal_status(vault_path.clone())?; + let pending: Vec<&WalEntry> = entries.iter().filter(|e| e.status == "pending").collect(); + + if pending.is_empty() { + return Ok(0); + } + + // For pending writes where the file exists and hash matches, mark complete + let vault = std::path::Path::new(&vault_path); + let mut recovered = 0u32; + + for entry in &pending { + let full = vault.join(&entry.path); + if full.exists() { + if let Ok(content) = fs::read_to_string(&full) { + use sha2::{Sha256, Digest}; + let mut hasher = Sha256::new(); + hasher.update(content.as_bytes()); + let hash = format!("{:x}", hasher.finalize()); + + if hash == entry.content_hash { + wal_mark_complete(&vault_path, &entry.path); + recovered += 1; + } + } + } + } + + Ok(recovered) +} + +/* ══════════════════════════════════════════════════════════ + v1.4 — File Operation Audit Log + ══════════════════════════════════════════════════════════ */ + +#[derive(Debug, serde::Serialize, serde::Deserialize, Clone)] +pub struct AuditEntry { + pub timestamp: String, + pub operation: String, + pub path: String, + pub detail: String, +} + +pub fn audit_log_append(vault_path: &str, operation: &str, path: &str, detail: &str) { + let log_path = std::path::Path::new(vault_path).join(".graph-notes").join("audit.log"); + let _ = fs::create_dir_all(std::path::Path::new(vault_path).join(".graph-notes")); + + let ts = chrono::Local::now().format("%Y-%m-%dT%H:%M:%S").to_string(); + let line = format!("{}|{}|{}|{}\n", ts, operation, path, detail); + + use std::io::Write; + if let Ok(mut f) = fs::OpenOptions::new().create(true).append(true).open(&log_path) { + let _ = f.write_all(line.as_bytes()); + } +} + +#[tauri::command] +pub fn get_audit_log(vault_path: String, limit: usize) -> Result, String> { + let log_path = std::path::Path::new(&vault_path).join(".graph-notes").join("audit.log"); + if !log_path.exists() { + return Ok(vec![]); + } + + let content = fs::read_to_string(&log_path).map_err(|e| e.to_string())?; + let mut entries: Vec = content.lines() + .filter(|l| !l.trim().is_empty()) + .filter_map(|line| { + let parts: Vec<&str> = line.splitn(4, '|').collect(); + if parts.len() == 4 { + Some(AuditEntry { + timestamp: parts[0].to_string(), + operation: parts[1].to_string(), + path: parts[2].to_string(), + detail: parts[3].to_string(), + }) + } else { + None + } + }) + .collect(); + + // Return most recent first, limited + entries.reverse(); + entries.truncate(limit); + Ok(entries) +} diff --git a/src-tauri/tauri.conf.json b/src-tauri/tauri.conf.json index dca54ee..4eab373 100644 --- a/src-tauri/tauri.conf.json +++ b/src-tauri/tauri.conf.json @@ -1,7 +1,7 @@ { "$schema": "https://schema.tauri.app/config/2", "productName": "Graph Notes", - "version": "1.0.0", + "version": "1.5.0", "identifier": "com.graphnotes.app", "build": { "beforeDevCommand": "npm run dev", diff --git a/src/App.tsx b/src/App.tsx index 7669375..be0ae1f 100644 --- a/src/App.tsx +++ b/src/App.tsx @@ -19,6 +19,8 @@ import { DatabaseView } from "./components/DatabaseView"; import { GitPanel } from "./components/GitPanel"; import { TimelineView } from "./components/TimelineView"; import { GraphAnalytics } from "./components/GraphAnalytics"; +import IntegrityReport from "./components/IntegrityReport"; +import AuditLog from "./components/AuditLog"; import { listNotes, readNote, @@ -389,6 +391,8 @@ export default function App() { } /> } /> } /> + navigate('/')} />} /> + navigate('/')} />} /> setCmdPaletteOpen(false)} /> diff --git a/src/components/AuditLog.tsx b/src/components/AuditLog.tsx new file mode 100644 index 0000000..7b22e98 --- /dev/null +++ b/src/components/AuditLog.tsx @@ -0,0 +1,85 @@ +import { useState, useEffect } from "react"; +import { useVault } from "../App"; +import { getAuditLog, type AuditEntry } from "../lib/commands"; + +export default function AuditLog({ onClose }: { onClose: () => void }) { + const { vaultPath } = useVault(); + const [entries, setEntries] = useState([]); + const [filter, setFilter] = useState(""); + const [loading, setLoading] = useState(true); + + useEffect(() => { + loadLog(); + }, [vaultPath]); + + const loadLog = async () => { + if (!vaultPath) return; + setLoading(true); + try { + const log = await getAuditLog(vaultPath, 200); + setEntries(log); + } catch { /* ignore */ } + setLoading(false); + }; + + const filtered = filter + ? entries.filter(e => + e.path.toLowerCase().includes(filter.toLowerCase()) || + e.operation.toLowerCase().includes(filter.toLowerCase()) || + e.detail.toLowerCase().includes(filter.toLowerCase()) + ) + : entries; + + const opIcon = (op: string) => { + switch (op) { + case "create": return "🆕"; + case "update": return "✏️"; + case "delete": return "🗑️"; + case "rename": return "📝"; + case "move": return "📁"; + default: return "📄"; + } + }; + + return ( +
    +
    +

    📋 Audit Log

    + +
    + +
    + setFilter(e.target.value)} + className="audit-filter-input" + /> + +
    + +
    + {loading &&
    Loading audit log...
    } + {!loading && filtered.length === 0 && ( +
    No log entries{filter ? " matching filter" : ""}
    + )} + {filtered.map((entry, i) => ( +
    + {opIcon(entry.operation)} +
    +
    + {entry.operation} + {entry.path} +
    +
    + {entry.timestamp} + {entry.detail && {entry.detail}} +
    +
    +
    + ))} +
    +
    + ); +} diff --git a/src/components/BookmarksPanel.tsx b/src/components/BookmarksPanel.tsx new file mode 100644 index 0000000..2f74c82 --- /dev/null +++ b/src/components/BookmarksPanel.tsx @@ -0,0 +1,79 @@ +import { useState, useEffect } from 'react'; +import { getBookmarks, setBookmarks, type Bookmark } from '../lib/commands'; + +interface BookmarksPanelProps { + vaultPath: string; + onNavigate: (notePath: string, line?: number) => void; + onClose: () => void; +} + +export default function BookmarksPanel({ vaultPath, onNavigate, onClose }: BookmarksPanelProps) { + const [bookmarks, setBookmarksList] = useState([]); + + const refresh = async () => { + try { + const bm = await getBookmarks(vaultPath); + setBookmarksList(bm); + } catch (e) { + console.error('Failed to load bookmarks:', e); + } + }; + + useEffect(() => { refresh(); }, [vaultPath]); + + const handleRemove = async (index: number) => { + const updated = bookmarks.filter((_, i) => i !== index); + try { + await setBookmarks(vaultPath, JSON.stringify(updated)); + setBookmarksList(updated); + } catch (e) { + console.error('Failed to update bookmarks:', e); + } + }; + + const handleClick = (bm: Bookmark) => { + const notePath = bm.note_path.endsWith('.md') ? bm.note_path : `${bm.note_path}.md`; + onNavigate(notePath, bm.line); + }; + + return ( +
    +
    +

    🔖 Bookmarks

    + +
    + +
    + {bookmarks.length === 0 ? ( +
    + No bookmarks yet. Right-click a line in the editor to add one. +
    + ) : ( +
      + {bookmarks.map((bm, i) => ( +
    • + + +
    • + ))} +
    + )} +
    +
    + ); +} diff --git a/src/components/CommandPalette.tsx b/src/components/CommandPalette.tsx index 03671bb..7ed49f3 100644 --- a/src/components/CommandPalette.tsx +++ b/src/components/CommandPalette.tsx @@ -184,6 +184,14 @@ export function CommandPalette({ open, onClose }: { open: boolean; onClose: () = { id: "import-export", icon: "📦", label: "Import / Export", action: () => { onClose(); } }, { id: "shortcuts", icon: "⌨️", label: "Keyboard Shortcuts", action: () => { onClose(); } }, ); + + // v1.4 commands + commands.push( + { id: "integrity", icon: "🛡️", label: "Integrity Report", action: () => { navigate("/integrity"); onClose(); } }, + { id: "audit-log", icon: "📋", label: "Audit Log", action: () => { navigate("/audit-log"); onClose(); } }, + { id: "create-backup", icon: "💾", label: "Create Backup", action: () => { onClose(); } }, + { id: "verify-vault", icon: "🔐", label: "Verify Vault Checksums", action: () => { navigate("/integrity"); onClose(); } }, + ); } // Note matches diff --git a/src/components/GraphAnalytics.tsx b/src/components/GraphAnalytics.tsx index 8fa850e..17118a5 100644 --- a/src/components/GraphAnalytics.tsx +++ b/src/components/GraphAnalytics.tsx @@ -1,6 +1,7 @@ -import { useEffect, useState, useMemo } from "react"; +import { useEffect, useState } from "react"; import { useVault } from "../App"; import { buildGraph } from "../lib/commands"; +import { detectClusters, clusterColors, type ClusterResult } from "../lib/clustering"; interface AnalyticsData { totalNotes: number; @@ -8,21 +9,23 @@ interface AnalyticsData { avgLinks: number; orphans: { name: string }[]; mostConnected: { name: string; count: number }[]; + clusters: ClusterResult; + clusterLabels: Map; } /** - * GraphAnalytics — Orphan detection, most-connected, graph stats. + * GraphAnalytics — Orphan detection, most-connected, clusters, graph stats. */ export function GraphAnalytics() { const { vaultPath, navigateToNote } = useVault(); const [data, setData] = useState(null); + const [tab, setTab] = useState<'overview' | 'clusters'>('overview'); useEffect(() => { if (!vaultPath) return; buildGraph(vaultPath).then(graph => { const linkCount = new Map(); - // Count connections per node graph.nodes.forEach(n => linkCount.set(n.id, 0)); graph.edges.forEach(e => { linkCount.set(e.source, (linkCount.get(e.source) || 0) + 1); @@ -45,12 +48,25 @@ export function GraphAnalytics() { const totalLinks = graph.edges.length; const avgLinks = graph.nodes.length > 0 ? totalLinks / graph.nodes.length : 0; + // Cluster detection + const clusters = detectClusters(graph); + const clusterLabels = new Map(); + for (const [clusterId, nodeIds] of clusters.clusters) { + const labels = nodeIds.map(id => { + const node = graph.nodes.find(n => n.id === id); + return node?.label || id.replace(".md", ""); + }); + clusterLabels.set(clusterId, labels); + } + setData({ totalNotes: graph.nodes.length, totalLinks, avgLinks, orphans, mostConnected, + clusters, + clusterLabels, }); }).catch(() => { }); }, [vaultPath]); @@ -59,68 +75,121 @@ export function GraphAnalytics() { return
    Loading analytics…
    ; } + const colors = clusterColors(Math.max(data.clusters.clusterCount, 1)); + return (

    📊 Graph Analytics

    - {/* Stats */} -
    -
    - {data.totalNotes} - Notes -
    -
    - {data.totalLinks} - Links -
    -
    - {data.avgLinks.toFixed(1)} - Avg Links -
    -
    - {data.orphans.length} - Orphans -
    + {/* Tab bar */} +
    + +
    - {/* Most Connected */} -
    -

    🔗 Most Connected

    -
    - {data.mostConnected.map((item, i) => ( -
    navigateToNote(item.name)} - > - #{i + 1} - {item.name} - {item.count} links -
    + {tab === 'overview' && ( + <> + {/* Stats */} +
    +
    + {data.totalNotes} + Notes +
    +
    + {data.totalLinks} + Links +
    +
    + {data.avgLinks.toFixed(1)} + Avg Links +
    +
    + {data.orphans.length} + Orphans
    - ))} -
    -
    - - {/* Orphans */} -
    -

    🏝️ Orphan Notes ({data.orphans.length})

    - {data.orphans.length === 0 ? ( -

    No orphan notes — every note is linked!

    - ) : ( -
    - {data.orphans.map(o => ( - - ))}
    - )} -
    + + {/* Most Connected */} +
    +

    🔗 Most Connected

    +
    + {data.mostConnected.map((item, i) => ( +
    navigateToNote(item.name)} + > + #{i + 1} + {item.name} + {item.count} links +
    +
    + ))} +
    +
    + + {/* Orphans */} +
    +

    🏝️ Orphan Notes ({data.orphans.length})

    + {data.orphans.length === 0 ? ( +

    No orphan notes — every note is linked!

    + ) : ( +
    + {data.orphans.map(o => ( + + ))} +
    + )} +
    + + )} + + {tab === 'clusters' && ( +
    + {Array.from(data.clusterLabels.entries()) + .sort((a, b) => b[1].length - a[1].length) + .map(([clusterId, labels]) => ( +
    +

    + + Cluster {clusterId + 1} + {labels.length} notes +

    +
    + {labels.map(name => ( + + ))} +
    +
    + ))} +
    + )}
    ); } diff --git a/src/components/GraphView.tsx b/src/components/GraphView.tsx index 7d57865..4a33b86 100644 --- a/src/components/GraphView.tsx +++ b/src/components/GraphView.tsx @@ -1,326 +1,195 @@ -import { useEffect, useRef, useState, useCallback } from "react"; +import { useEffect, useState, useCallback, useMemo } from "react"; import { useNavigate } from "react-router-dom"; +import { + Canvas, + CanvasProvider as _CanvasProvider, + registerNodeType, + ViewportControls, +} from "@blinksgg/canvas"; +// @ts-ignore -- subpath export types not emitted +import { InMemoryStorageAdapter } from "@blinksgg/canvas/db"; +import { useForceLayout, useFitToBounds, FitToBoundsMode } from "@blinksgg/canvas/hooks"; import { useVault } from "../App"; -import { buildGraph, type GraphData, type GraphEdge } from "../lib/commands"; +import { buildGraph, type GraphData } from "../lib/commands"; +import { detectClusters, clusterColors } from "../lib/clustering"; +import { NoteGraphNode } from "./NoteGraphNode"; -const NODE_COLORS = [ - "#8b5cf6", "#3b82f6", "#10b981", "#f59e0b", "#f43f5e", - "#06b6d4", "#a855f7", "#ec4899", "#14b8a6", "#ef4444", -]; +// Cast to bypass dist/source type mismatch +const CanvasProviderAny = _CanvasProvider as any; -/* ── Force simulation types ─────────────────────────────── */ -interface SimNode { - id: string; - label: string; - path: string; - x: number; - y: number; - vx: number; - vy: number; - radius: number; - color: string; - linkCount: number; -} +// Register custom node type +registerNodeType("note", NoteGraphNode as any); /** - * GraphView — Force-directed graph rendered with HTML5 Canvas. - * Nodes represent notes, edges represent wikilinks between them. + * GraphView — Note graph powered by @blinksgg/canvas v3.0. */ export function GraphView() { const { vaultPath } = useVault(); const navigate = useNavigate(); - const canvasRef = useRef(null); - const containerRef = useRef(null); const [graphData, setGraphData] = useState(null); - const nodesRef = useRef([]); - const edgesRef = useRef([]); - const animRef = useRef(0); - const panRef = useRef({ x: 0, y: 0 }); - const zoomRef = useRef(1); - const dragRef = useRef<{ node: SimNode; offsetX: number; offsetY: number } | null>(null); - const isPanningRef = useRef(false); - const lastMouseRef = useRef({ x: 0, y: 0 }); - const hoveredRef = useRef(null); - const [nodeCount, setNodeCount] = useState(0); - const [edgeCount, setEdgeCount] = useState(0); + const [adapterReady, setAdapterReady] = useState(false); + const [layout, setLayout] = useState<"force" | "tree" | "grid">("force"); + const [search, setSearch] = useState(""); - // Load graph data from backend + // Stable adapter instance + const adapter = useMemo(() => new InMemoryStorageAdapter(), []); + const graphId = `vault-${vaultPath || "default"}`; + + // 1. Load graph from backend useEffect(() => { if (!vaultPath) return; - buildGraph(vaultPath).then(data => { - setGraphData(data); - setNodeCount(data.nodes.length); - setEdgeCount(data.edges.length); - }).catch(() => { }); + buildGraph(vaultPath).then(setGraphData).catch(err => { + console.error("[GraphView] buildGraph failed:", err); + }); }, [vaultPath]); - // Initialize simulation when data arrives + // 2. Populate adapter with graph data, THEN allow canvas to mount useEffect(() => { if (!graphData) return; - const { nodes, edges } = graphData; - const simNodes: SimNode[] = nodes.map((n, i) => ({ - id: n.id, - label: n.label, - path: n.path, - x: (Math.random() - 0.5) * 400, - y: (Math.random() - 0.5) * 400, - vx: 0, - vy: 0, - radius: Math.max(6, Math.min(20, 6 + n.link_count * 2)), - color: NODE_COLORS[i % NODE_COLORS.length], - linkCount: n.link_count, - })); + const populate = async () => { + const clusters = detectClusters(graphData); + const colors = clusterColors(Math.max(clusters.clusterCount, 1)); - nodesRef.current = simNodes; - edgesRef.current = edges; + // Build node records for the adapter + const nodes = graphData.nodes.map((n) => { + const clusterId = clusters.assignments.get(n.id) ?? 0; + const radius = Math.max(6, Math.min(20, 6 + n.link_count * 2)); + return { + id: n.id, + graph_id: graphId, + label: n.label, + node_type: "note", + ui_properties: { + x: (Math.random() - 0.5) * 800, + y: (Math.random() - 0.5) * 800, + width: Math.max(120, 80 + radius * 4), + height: 50, + }, + data: { + path: n.path, + link_count: n.link_count, + color: colors[clusterId % colors.length], + tags: [], + cluster_id: clusterId, + }, + }; + }); - // Center the view - panRef.current = { x: 0, y: 0 }; - zoomRef.current = 1; - }, [graphData]); + // Build edge records + const edges = graphData.edges.map((e, i) => ({ + id: `edge-${i}`, + graph_id: graphId, + source_node_id: e.source, + target_node_id: e.target, + data: {}, + })); - // Convert screen coords to world coords - const screenToWorld = useCallback((sx: number, sy: number, canvas: HTMLCanvasElement) => { - const rect = canvas.getBoundingClientRect(); - const cx = rect.width / 2; - const cy = rect.height / 2; - return { - x: (sx - rect.left - cx - panRef.current.x) / zoomRef.current, - y: (sy - rect.top - cy - panRef.current.y) / zoomRef.current, - }; - }, []); - - // Find node at world position - const hitTest = useCallback((wx: number, wy: number): SimNode | null => { - // Iterate in reverse so topmost nodes are hit first - for (let i = nodesRef.current.length - 1; i >= 0; i--) { - const n = nodesRef.current[i]; - const dx = wx - n.x; - const dy = wy - n.y; - if (dx * dx + dy * dy <= (n.radius + 4) * (n.radius + 4)) { - return n; + // Populate adapter via batch create + if (nodes.length > 0) { + await adapter.createNodes(graphId, nodes); } - } - return null; - }, []); - - // Animation loop — force simulation + rendering - useEffect(() => { - const canvas = canvasRef.current; - if (!canvas) return; - const ctx = canvas.getContext("2d"); - if (!ctx) return; - - let running = true; - let coolingFactor = 1; - - const tick = () => { - if (!running) return; - - const nodes = nodesRef.current; - const edges = edgesRef.current; - if (nodes.length === 0) { - animRef.current = requestAnimationFrame(tick); - return; + if (edges.length > 0) { + await adapter.createEdges(graphId, edges); } - // ── Force simulation step ── - const alpha = 0.3 * coolingFactor; - if (coolingFactor > 0.001) coolingFactor *= 0.995; - - // Repulsion (charge) - for (let i = 0; i < nodes.length; i++) { - for (let j = i + 1; j < nodes.length; j++) { - const a = nodes[i], b = nodes[j]; - let dx = b.x - a.x; - let dy = b.y - a.y; - let dist = Math.sqrt(dx * dx + dy * dy) || 1; - const force = (150 * 150) / dist; - const fx = (dx / dist) * force * alpha; - const fy = (dy / dist) * force * alpha; - a.vx -= fx; - a.vy -= fy; - b.vx += fx; - b.vy += fy; - } - } - - // Build node index for edge lookup - const nodeMap = new Map(); - for (const n of nodes) nodeMap.set(n.id, n); - - // Attraction (springs) - for (const edge of edges) { - const a = nodeMap.get(edge.source); - const b = nodeMap.get(edge.target); - if (!a || !b) continue; - let dx = b.x - a.x; - let dy = b.y - a.y; - let dist = Math.sqrt(dx * dx + dy * dy) || 1; - const force = (dist - 100) * 0.05 * alpha; - const fx = (dx / dist) * force; - const fy = (dy / dist) * force; - a.vx += fx; - a.vy += fy; - b.vx -= fx; - b.vy -= fy; - } - - // Center gravity - for (const n of nodes) { - n.vx -= n.x * 0.01 * alpha; - n.vy -= n.y * 0.01 * alpha; - } - - // Apply velocity + damping - for (const n of nodes) { - if (dragRef.current?.node === n) continue; - n.vx *= 0.6; - n.vy *= 0.6; - n.x += n.vx; - n.y += n.vy; - } - - // ── Render ── - const dpr = window.devicePixelRatio || 1; - const w = canvas.clientWidth; - const h = canvas.clientHeight; - canvas.width = w * dpr; - canvas.height = h * dpr; - ctx.setTransform(dpr, 0, 0, dpr, 0, 0); - - ctx.clearRect(0, 0, w, h); - ctx.save(); - ctx.translate(w / 2 + panRef.current.x, h / 2 + panRef.current.y); - ctx.scale(zoomRef.current, zoomRef.current); - - // Draw edges - ctx.strokeStyle = "rgba(255,255,255,0.08)"; - ctx.lineWidth = 1; - for (const edge of edges) { - const a = nodeMap.get(edge.source); - const b = nodeMap.get(edge.target); - if (!a || !b) continue; - ctx.beginPath(); - ctx.moveTo(a.x, a.y); - ctx.lineTo(b.x, b.y); - ctx.stroke(); - } - - // Draw nodes - const hovered = hoveredRef.current; - for (const n of nodes) { - const isHovered = n === hovered; - ctx.beginPath(); - ctx.arc(n.x, n.y, n.radius, 0, Math.PI * 2); - ctx.fillStyle = isHovered ? "#fff" : n.color; - ctx.fill(); - - if (isHovered) { - ctx.strokeStyle = n.color; - ctx.lineWidth = 2; - ctx.stroke(); - } - - // Label - ctx.fillStyle = isHovered ? "#fff" : "rgba(255,255,255,0.7)"; - ctx.font = `${isHovered ? "bold " : ""}11px system-ui, sans-serif`; - ctx.textAlign = "center"; - ctx.textBaseline = "top"; - ctx.fillText(n.label, n.x, n.y + n.radius + 4, 120); - } - - ctx.restore(); - animRef.current = requestAnimationFrame(tick); + console.log(`[GraphView] Populated ${nodes.length} nodes, ${edges.length} edges`); + setAdapterReady(true); }; - animRef.current = requestAnimationFrame(tick); + populate(); + }, [graphData, graphId, adapter]); - return () => { - running = false; - cancelAnimationFrame(animRef.current); - }; - }, [graphData]); + const handleNodeClick = useCallback((nodeId: string) => { + if (!graphData) return; + const node = graphData.nodes.find(n => n.id === nodeId); + if (node) navigate(`/note/${encodeURIComponent(node.path)}`); + }, [graphData, navigate]); - // ── Mouse interaction handlers ── - const handleMouseDown = useCallback((e: React.MouseEvent) => { - const canvas = canvasRef.current; - if (!canvas) return; - const world = screenToWorld(e.clientX, e.clientY, canvas); - const node = hitTest(world.x, world.y); + const renderNode = useCallback(({ node, isSelected }: any) => ( + + ), []); - if (node) { - dragRef.current = { node, offsetX: world.x - node.x, offsetY: world.y - node.y }; - } else { - isPanningRef.current = true; - } - lastMouseRef.current = { x: e.clientX, y: e.clientY }; - }, [screenToWorld, hitTest]); + if (!graphData) { + return
    Loading graph…
    ; + } - const handleMouseMove = useCallback((e: React.MouseEvent) => { - const canvas = canvasRef.current; - if (!canvas) return; - - if (dragRef.current) { - const world = screenToWorld(e.clientX, e.clientY, canvas); - dragRef.current.node.x = world.x - dragRef.current.offsetX; - dragRef.current.node.y = world.y - dragRef.current.offsetY; - dragRef.current.node.vx = 0; - dragRef.current.node.vy = 0; - } else if (isPanningRef.current) { - panRef.current.x += e.clientX - lastMouseRef.current.x; - panRef.current.y += e.clientY - lastMouseRef.current.y; - } else { - // Hover detection - const world = screenToWorld(e.clientX, e.clientY, canvas); - const node = hitTest(world.x, world.y); - hoveredRef.current = node; - canvas.style.cursor = node ? "pointer" : "grab"; - } - lastMouseRef.current = { x: e.clientX, y: e.clientY }; - }, [screenToWorld, hitTest]); - - const handleMouseUp = useCallback(() => { - if (dragRef.current) { - dragRef.current = null; - } - isPanningRef.current = false; - }, []); - - const handleClick = useCallback((e: React.MouseEvent) => { - const canvas = canvasRef.current; - if (!canvas) return; - const world = screenToWorld(e.clientX, e.clientY, canvas); - const node = hitTest(world.x, world.y); - if (node) { - navigate(`/note/${encodeURIComponent(node.path)}`); - } - }, [screenToWorld, hitTest, navigate]); - - const handleWheel = useCallback((e: React.WheelEvent) => { - e.preventDefault(); - const factor = e.deltaY > 0 ? 0.9 : 1.1; - zoomRef.current = Math.max(0.1, Math.min(5, zoomRef.current * factor)); - }, []); + if (!adapterReady) { + return
    Building graph…
    ; + } return ( -
    -
    - - {nodeCount} notes · {edgeCount} links - + +
    +
    + + {graphData.nodes.length} notes · {graphData.edges.length} links + + + setSearch(e.target.value)} + className="graph-search-input" + /> +
    + + + + +
    - +
    + ); +} + +/** Applies force layout + fit once on mount */ +function AutoLayout() { + const { applyForceLayout } = useForceLayout(); + const { fitToBounds } = useFitToBounds(); + const [applied, setApplied] = useState(false); + + useEffect(() => { + if (applied) return; + const timer = setTimeout(async () => { + try { + await applyForceLayout(); + fitToBounds(FitToBoundsMode.Graph, 60); + } catch (e) { + console.warn("[GraphView] Layout apply failed:", e); + } + setApplied(true); + }, 300); + return () => clearTimeout(timer); + }, [applied, applyForceLayout, fitToBounds]); + + return null; +} + +function LayoutButtons({ layout, onLayoutChange }: { layout: string; onLayoutChange: (l: any) => void }) { + const { applyForceLayout } = useForceLayout(); + const { fitToBounds } = useFitToBounds(); + + const handleLayout = async (mode: string) => { + onLayoutChange(mode); + if (mode === "force") { + await applyForceLayout(); + fitToBounds(FitToBoundsMode.Graph, 60); + } + }; + + return ( +
    + {(["force", "tree", "grid"] as const).map(m => ( + + ))}
    ); } diff --git a/src/components/ImportExport.tsx b/src/components/ImportExport.tsx index 809c229..f3fa3ff 100644 --- a/src/components/ImportExport.tsx +++ b/src/components/ImportExport.tsx @@ -1,9 +1,9 @@ import { useState, useCallback } from "react"; import { useVault } from "../App"; -import { exportVaultZip, importFolder } from "../lib/commands"; +import { exportVaultZip, importFolder, exportSite } from "../lib/commands"; /** - * ImportExport — Import notes from folders, export vault as ZIP. + * ImportExport — Import notes from folders, export vault as ZIP, publish as site. */ export function ImportExport({ onClose }: { onClose: () => void }) { const { vaultPath, refreshNotes } = useVault(); @@ -39,6 +39,24 @@ export function ImportExport({ onClose }: { onClose: () => void }) { setLoading(false); }, [vaultPath, refreshNotes]); + const handlePublishSite = useCallback(async () => { + if (!vaultPath) return; + const notePathsInput = prompt("Note paths to publish (comma-separated, e.g. note1.md,folder/note2.md):"); + if (!notePathsInput?.trim()) return; + const outputDir = prompt("Output directory for the site:", `${vaultPath}/../published-site`); + if (!outputDir?.trim()) return; + + setLoading(true); + try { + const notePaths = notePathsInput.split(",").map(p => p.trim()).filter(Boolean); + const result = await exportSite(vaultPath, notePaths, outputDir.trim()); + setStatus(`✅ ${result}`); + } catch (e: any) { + setStatus(`❌ Publish failed: ${e}`); + } + setLoading(false); + }, [vaultPath]); + return (
    e.stopPropagation()}> @@ -68,6 +86,18 @@ export function ImportExport({ onClose }: { onClose: () => void }) {
    +
    + +
    +

    Publish as Site

    +

    + Export selected notes as a browsable HTML micro-site with resolved wikilinks. +

    + +
    + {status &&
    {status}
    }
    diff --git a/src/components/IntegrityReport.tsx b/src/components/IntegrityReport.tsx new file mode 100644 index 0000000..c35614b --- /dev/null +++ b/src/components/IntegrityReport.tsx @@ -0,0 +1,232 @@ +import { useState, useEffect } from "react"; +import { useVault } from "../App"; +import { + scanIntegrity, + computeChecksums, + verifyChecksums, + findOrphanAttachments, + createBackup, + listBackups, + restoreBackup, + type IntegrityIssue, + type ChecksumMismatch, + type OrphanAttachment, + type BackupEntry, +} from "../lib/commands"; + +export default function IntegrityReport({ onClose }: { onClose: () => void }) { + const { vaultPath } = useVault(); + const [issues, setIssues] = useState([]); + const [mismatches, setMismatches] = useState([]); + const [orphans, setOrphans] = useState([]); + const [backups, setBackups] = useState([]); + const [loading, setLoading] = useState(false); + const [activeTab, setActiveTab] = useState<"scan" | "checksums" | "orphans" | "backups">("scan"); + const [status, setStatus] = useState(""); + + useEffect(() => { + runScan(); + loadBackups(); + }, [vaultPath]); + + const runScan = async () => { + if (!vaultPath) return; + setLoading(true); + setStatus("Scanning vault..."); + try { + const result = await scanIntegrity(vaultPath); + setIssues(result); + setStatus(`Found ${result.length} issue(s)`); + } catch (e) { + setStatus(`Scan failed: ${e}`); + } + setLoading(false); + }; + + const runChecksumVerify = async () => { + if (!vaultPath) return; + setLoading(true); + setStatus("Computing checksums..."); + try { + await computeChecksums(vaultPath); + const result = await verifyChecksums(vaultPath); + setMismatches(result); + setStatus(result.length === 0 ? "All checksums valid ✓" : `${result.length} mismatch(es) found`); + } catch (e) { + setStatus(`Checksum verification failed: ${e}`); + } + setLoading(false); + }; + + const runOrphanScan = async () => { + if (!vaultPath) return; + setLoading(true); + setStatus("Scanning attachments..."); + try { + const result = await findOrphanAttachments(vaultPath); + setOrphans(result); + setStatus(result.length === 0 ? "No orphan attachments ✓" : `${result.length} orphan(s) found`); + } catch (e) { + setStatus(`Orphan scan failed: ${e}`); + } + setLoading(false); + }; + + const handleCreateBackup = async () => { + if (!vaultPath) return; + setLoading(true); + setStatus("Creating backup..."); + try { + const name = await createBackup(vaultPath); + setStatus(`Backup created: ${name}`); + loadBackups(); + } catch (e) { + setStatus(`Backup failed: ${e}`); + } + setLoading(false); + }; + + const loadBackups = async () => { + if (!vaultPath) return; + try { + const list = await listBackups(vaultPath); + setBackups(list); + } catch { /* ignore */ } + }; + + const handleRestore = async (name: string) => { + if (!vaultPath) return; + if (!confirm(`Restore from ${name}? This will overwrite current files.`)) return; + setLoading(true); + try { + const count = await restoreBackup(vaultPath, name); + setStatus(`Restored ${count} files from ${name}`); + } catch (e) { + setStatus(`Restore failed: ${e}`); + } + setLoading(false); + }; + + const formatSize = (bytes: number) => { + if (bytes < 1024) return `${bytes} B`; + if (bytes < 1048576) return `${(bytes / 1024).toFixed(1)} KB`; + return `${(bytes / 1048576).toFixed(1)} MB`; + }; + + const severityIcon = (s: string) => s === "error" ? "🔴" : s === "warning" ? "🟡" : "🔵"; + + return ( +
    +
    +

    🛡️ Integrity Report

    + +
    + +
    + {(["scan", "checksums", "orphans", "backups"] as const).map((tab) => ( + + ))} +
    + + {status &&
    {loading ? "⏳ " : ""}{status}
    } + +
    + {activeTab === "scan" && ( + <> + + {issues.length === 0 && !loading && ( +
    ✅ No issues found — vault is clean
    + )} + {issues.map((issue, i) => ( +
    + {severityIcon(issue.severity)} +
    + {issue.path} + {issue.description} +
    +
    + ))} + + )} + + {activeTab === "checksums" && ( + <> + + {mismatches.length === 0 && !loading && ( +
    ✅ All checksums match
    + )} + {mismatches.map((m, i) => ( +
    + ⚠️ +
    + {m.path} + + Expected: {m.expected.slice(0, 12)}… → Got: {m.actual.slice(0, 12)}… + +
    +
    + ))} + + )} + + {activeTab === "orphans" && ( + <> + + {orphans.length === 0 && !loading && ( +
    ✅ No orphan attachments
    + )} + {orphans.map((o, i) => ( +
    + 📎 +
    + {o.path} + {formatSize(o.size)} +
    +
    + ))} + + )} + + {activeTab === "backups" && ( + <> + + {backups.length === 0 && !loading && ( +
    No backups yet
    + )} + {backups.map((b, i) => ( +
    + 💾 +
    + {b.name} + + {b.created} · {formatSize(b.size)} + +
    + +
    + ))} + + )} +
    +
    + ); +} diff --git a/src/components/NoteGraphNode.tsx b/src/components/NoteGraphNode.tsx new file mode 100644 index 0000000..af02acc --- /dev/null +++ b/src/components/NoteGraphNode.tsx @@ -0,0 +1,43 @@ +import { useNavigate } from "react-router-dom"; + +/** + * NoteGraphNode — Custom node component for the note graph. + * Shows title, tag pills, link count badge, and cluster color. + */ +export function NoteGraphNode({ nodeData, isSelected }: { nodeData: any; isSelected?: boolean }) { + const navigate = useNavigate(); + const meta = nodeData.dbData ?? nodeData.data ?? {}; + const label = nodeData.label || meta.label || "Untitled"; + const tags: string[] = meta.tags || []; + const linkCount: number = meta.link_count ?? 0; + const color: string = meta.color || "#8b5cf6"; + const path = meta.path || ""; + + const handleDoubleClick = (e: React.MouseEvent) => { + e.stopPropagation(); + if (path) navigate(`/note/${encodeURIComponent(path)}`); + }; + + return ( +
    +
    +
    + {label} + {tags.length > 0 && ( +
    + {tags.slice(0, 3).map((t: string) => ( + {t} + ))} +
    + )} +
    + {linkCount > 0 && ( + {linkCount} + )} +
    + ); +} diff --git a/src/components/QuickCapture.tsx b/src/components/QuickCapture.tsx new file mode 100644 index 0000000..0902b20 --- /dev/null +++ b/src/components/QuickCapture.tsx @@ -0,0 +1,74 @@ +import { useState, useRef, useEffect } from 'react'; +import { appendToInbox } from '../lib/commands'; + +interface QuickCaptureProps { + vaultPath: string; + onClose: () => void; + onCaptured?: () => void; +} + +export default function QuickCapture({ vaultPath, onClose, onCaptured }: QuickCaptureProps) { + const [content, setContent] = useState(''); + const [saving, setSaving] = useState(false); + const textareaRef = useRef(null); + + useEffect(() => { + textareaRef.current?.focus(); + }, []); + + const handleSave = async () => { + if (!content.trim()) return; + setSaving(true); + try { + await appendToInbox(vaultPath, content); + setContent(''); + onCaptured?.(); + onClose(); + } catch (e) { + console.error('Failed to save to inbox:', e); + } + setSaving(false); + }; + + const handleKeyDown = (e: React.KeyboardEvent) => { + if (e.key === 'Escape') { + onClose(); + } else if (e.key === 'Enter' && (e.metaKey || e.ctrlKey)) { + handleSave(); + } + }; + + return ( +
    +
    e.stopPropagation()} + id="quick-capture" + > +
    + ⚡ Quick Capture + ⌘↵ to save · Esc to close +
    +