harden: atomic writes, path validation, and save pipeline integrity
Backend (lib.rs): - Add atomic_write/atomic_write_bytes helpers (write→fsync→rename→fsync parent) - Apply safe_vault_path() to all 20 file-access commands (was 3) - Apply safe_name() to workspace/canvas/attachment filename params - Fix 2 silent error swallowing sites (let _ = fs::write) - Fix git_status/git_commit/git_init error handling (check exit codes) - Migrate all Regex::new() to LazyLock statics (10 total) - Use ~tmp suffix for atomic writes (not extension replacement) - Replace 2 unwrap() panic sites with unwrap_or_default() - Skip ~tmp files in export_vault_zip Frontend (Editor.tsx): - Fix critical note-switch race: capture note path at call time, not when debounced timer fires (prevented old content → new note) - Clear pending save timeout on note switch (defense-in-depth) - Fix handleSlashSelect: route through debounced saveContent pipeline with domToMarkdown() instead of direct writeNote() with innerText - Fix handlePaste stale closure (add saveContent to deps)
This commit is contained in:
parent
6fa547802c
commit
d639d40612
2 changed files with 224 additions and 124 deletions
|
|
@ -1,11 +1,119 @@
|
|||
use serde::{Deserialize, Serialize};
|
||||
use std::fs;
|
||||
use std::io::Write;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::LazyLock;
|
||||
use walkdir::WalkDir;
|
||||
use regex::Regex;
|
||||
use chrono::Local;
|
||||
|
||||
/* ── Atomic File Write ─────────────────────────────────────
|
||||
* 1. Write to a `.tmp` sibling in the same directory
|
||||
* 2. fsync the file descriptor (data hits disk)
|
||||
* 3. Atomic rename `.tmp` → target (POSIX guarantees)
|
||||
* 4. fsync the parent directory (metadata durability)
|
||||
*
|
||||
* If any step fails the original file is untouched and
|
||||
* the `.tmp` file is cleaned up.
|
||||
* ────────────────────────────────────────────────────────── */
|
||||
|
||||
fn atomic_write(path: &Path, content: &str) -> Result<(), String> {
|
||||
atomic_write_bytes(path, content.as_bytes())
|
||||
}
|
||||
|
||||
fn atomic_write_bytes(path: &Path, data: &[u8]) -> Result<(), String> {
|
||||
let parent = path.parent()
|
||||
.ok_or_else(|| "Cannot determine parent directory".to_string())?;
|
||||
fs::create_dir_all(parent)
|
||||
.map_err(|e| format!("Failed to create directory: {}", e))?;
|
||||
|
||||
// Use ~tmp suffix (appended, not replacing extension) to avoid
|
||||
// colliding with real files that share the same stem
|
||||
let mut tmp_name = path.as_os_str().to_os_string();
|
||||
tmp_name.push("~tmp");
|
||||
let tmp_path = PathBuf::from(tmp_name);
|
||||
|
||||
// 1 — Write to .tmp
|
||||
let result = (|| -> Result<(), String> {
|
||||
let mut file = fs::File::create(&tmp_path)
|
||||
.map_err(|e| format!("Failed to create temp file: {}", e))?;
|
||||
file.write_all(data)
|
||||
.map_err(|e| format!("Failed to write temp file: {}", e))?;
|
||||
|
||||
// 2 — fsync: flush data to disk
|
||||
file.sync_all()
|
||||
.map_err(|e| format!("Failed to sync file: {}", e))?;
|
||||
Ok(())
|
||||
})();
|
||||
|
||||
if let Err(e) = result {
|
||||
let _ = fs::remove_file(&tmp_path); // Clean up on failure
|
||||
return Err(e);
|
||||
}
|
||||
|
||||
// 3 — Atomic rename
|
||||
if let Err(e) = fs::rename(&tmp_path, path) {
|
||||
let _ = fs::remove_file(&tmp_path);
|
||||
return Err(format!("Failed to finalize write: {}", e));
|
||||
}
|
||||
|
||||
// 4 — fsync parent directory for metadata durability
|
||||
if let Ok(dir) = fs::File::open(parent) {
|
||||
let _ = dir.sync_all();
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Sanitize a name used to construct filenames (workspace names, canvas names, etc.)
|
||||
/// Rejects any path component separators or traversal sequences.
|
||||
fn safe_name(name: &str) -> Result<String, String> {
|
||||
let trimmed = name.trim();
|
||||
if trimmed.is_empty() {
|
||||
return Err("Name cannot be empty".to_string());
|
||||
}
|
||||
if trimmed.contains('/') || trimmed.contains('\\') || trimmed.contains("..") || trimmed.contains('\0') {
|
||||
return Err("Name contains invalid characters".to_string());
|
||||
}
|
||||
Ok(trimmed.to_string())
|
||||
}
|
||||
|
||||
/* ── Static Regexes ────────────────────────────────────────
|
||||
* Compiled once, reused across calls.
|
||||
* ────────────────────────────────────────────────────────── */
|
||||
|
||||
static PREVIEW_WIKILINK_RE: LazyLock<Regex> = LazyLock::new(|| {
|
||||
Regex::new(r"\[\[([^\]|]+)(?:\|[^\]]+)?\]\]").unwrap()
|
||||
});
|
||||
|
||||
static PREVIEW_FMT_RE: LazyLock<Regex> = LazyLock::new(|| {
|
||||
Regex::new(r"[*_~`]").unwrap()
|
||||
});
|
||||
|
||||
static DAILY_NOTE_RE: LazyLock<Regex> = LazyLock::new(|| {
|
||||
Regex::new(r"^\d{4}-\d{2}-\d{2}\.md$").unwrap()
|
||||
});
|
||||
|
||||
static TASK_RE: LazyLock<Regex> = LazyLock::new(|| {
|
||||
Regex::new(r"^(\s*)- \[([ x/])\] (.+)$").unwrap()
|
||||
});
|
||||
|
||||
static TASK_MARKER_RE: LazyLock<Regex> = LazyLock::new(|| {
|
||||
Regex::new(r"- \[[ x/]\]").unwrap()
|
||||
});
|
||||
|
||||
static EXPORT_WIKILINK_RE: LazyLock<Regex> = LazyLock::new(|| {
|
||||
Regex::new(r"\[\[([^\]|]+)(?:\|([^\]]+))?\]\]").unwrap()
|
||||
});
|
||||
|
||||
static FLASHCARD_RE: LazyLock<Regex> = LazyLock::new(|| {
|
||||
Regex::new(r"\?\?\s*(.+?)\s*::\s*(.+?)\s*\?\?").unwrap()
|
||||
});
|
||||
|
||||
static FRONTMATTER_RE: LazyLock<Regex> = LazyLock::new(|| {
|
||||
Regex::new(r"(?s)^---\n(.+?)\n---").unwrap()
|
||||
});
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct NoteEntry {
|
||||
pub path: String,
|
||||
|
|
@ -131,13 +239,7 @@ fn read_note(vault_path: String, relative_path: String) -> Result<String, String
|
|||
#[tauri::command]
|
||||
fn write_note(vault_path: String, relative_path: String, content: String) -> Result<(), String> {
|
||||
let full_path = safe_vault_path(&vault_path, &relative_path)?;
|
||||
|
||||
// Ensure parent directory exists
|
||||
if let Some(parent) = full_path.parent() {
|
||||
fs::create_dir_all(parent).map_err(|e| format!("Failed to create directory: {}", e))?;
|
||||
}
|
||||
|
||||
fs::write(&full_path, content).map_err(|e| format!("Failed to write note: {}", e))
|
||||
atomic_write(&full_path, &content)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
|
|
@ -221,7 +323,7 @@ fn get_or_create_daily(vault_path: String) -> Result<String, String> {
|
|||
|
||||
if !daily_path.exists() {
|
||||
let content = format!("# {}\n\n", today);
|
||||
fs::write(&daily_path, content).map_err(|e| format!("Failed to create daily note: {}", e))?;
|
||||
atomic_write(&daily_path, &content)?;
|
||||
}
|
||||
|
||||
Ok(relative_path)
|
||||
|
|
@ -246,10 +348,7 @@ fn get_vault_path() -> Result<Option<String>, String> {
|
|||
#[tauri::command]
|
||||
fn set_vault_path(path: String) -> Result<(), String> {
|
||||
let config_path = dirs_config_path();
|
||||
if let Some(parent) = config_path.parent() {
|
||||
fs::create_dir_all(parent).map_err(|e| e.to_string())?;
|
||||
}
|
||||
fs::write(&config_path, &path).map_err(|e| e.to_string())
|
||||
atomic_write(&config_path, &path)
|
||||
}
|
||||
|
||||
fn dirs_config_path() -> PathBuf {
|
||||
|
|
@ -336,9 +435,8 @@ fn search_vault(vault_path: String, query: String) -> Result<Vec<SearchResult>,
|
|||
|
||||
#[tauri::command]
|
||||
fn rename_note(vault_path: String, old_path: String, new_path: String) -> Result<(), String> {
|
||||
let vault = Path::new(&vault_path);
|
||||
let old_full = vault.join(&old_path);
|
||||
let new_full = vault.join(&new_path);
|
||||
let old_full = safe_vault_path(&vault_path, &old_path)?;
|
||||
let new_full = safe_vault_path(&vault_path, &new_path)?;
|
||||
|
||||
if !old_full.exists() {
|
||||
return Err("Source note does not exist".to_string());
|
||||
|
|
@ -360,24 +458,22 @@ fn update_wikilinks(vault_path: String, old_name: String, new_name: String) -> R
|
|||
let vault = Path::new(&vault_path);
|
||||
let mut updated_count = 0;
|
||||
|
||||
// Compile regexes once before the loop
|
||||
let pattern = format!(r"\[\[{}\]\]", regex::escape(&old_name));
|
||||
let pattern_with_alias = format!(r"\[\[{}\|", regex::escape(&old_name));
|
||||
let re1 = Regex::new(&pattern).map_err(|e| format!("Invalid regex: {}", e))?;
|
||||
let re2 = Regex::new(&pattern_with_alias).map_err(|e| format!("Invalid regex: {}", e))?;
|
||||
|
||||
for entry in WalkDir::new(vault)
|
||||
.into_iter()
|
||||
.filter_map(|e| e.ok())
|
||||
.filter(|e| e.path().extension().map_or(false, |ext| ext == "md"))
|
||||
{
|
||||
if let Ok(content) = fs::read_to_string(entry.path()) {
|
||||
// Replace [[old_name]] and [[old_name|display]] patterns
|
||||
let pattern = format!(r"\[\[{}\]\]", regex::escape(&old_name));
|
||||
let pattern_with_alias = format!(r"\[\[{}\|", regex::escape(&old_name));
|
||||
|
||||
let re1 = Regex::new(&pattern).unwrap();
|
||||
let re2 = Regex::new(&pattern_with_alias).unwrap();
|
||||
|
||||
if re1.is_match(&content) || re2.is_match(&content) {
|
||||
let new_content = re1.replace_all(&content, format!("[[{}]]", new_name));
|
||||
let new_content = re2.replace_all(&new_content, format!("[[{}|", new_name));
|
||||
fs::write(entry.path(), new_content.as_ref())
|
||||
.map_err(|e| format!("Failed to update links: {}", e))?;
|
||||
atomic_write(entry.path(), &new_content)?;
|
||||
updated_count += 1;
|
||||
}
|
||||
}
|
||||
|
|
@ -450,7 +546,7 @@ fn list_tags(vault_path: String) -> Result<Vec<TagInfo>, String> {
|
|||
|
||||
#[tauri::command]
|
||||
fn read_note_preview(vault_path: String, note_path: String, max_chars: Option<usize>) -> Result<String, String> {
|
||||
let full = Path::new(&vault_path).join(¬e_path);
|
||||
let full = safe_vault_path(&vault_path, ¬e_path)?;
|
||||
let content = fs::read_to_string(&full).map_err(|e| format!("Read failed: {}", e))?;
|
||||
let limit = max_chars.unwrap_or(200);
|
||||
|
||||
|
|
@ -458,17 +554,11 @@ fn read_note_preview(vault_path: String, note_path: String, max_chars: Option<us
|
|||
let cleaned: String = content
|
||||
.lines()
|
||||
.filter(|l| !l.trim().starts_with('#')) // remove headings
|
||||
.map(|l| {
|
||||
let s = l.to_string();
|
||||
s
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.join(" ");
|
||||
|
||||
let re_wikilink = Regex::new(r"\[\[([^\]|]+)(?:\|[^\]]+)?\]\]").unwrap();
|
||||
let cleaned = re_wikilink.replace_all(&cleaned, "$1").to_string();
|
||||
let re_fmt = Regex::new(r"[*_~`]").unwrap();
|
||||
let cleaned = re_fmt.replace_all(&cleaned, "").to_string();
|
||||
let cleaned = PREVIEW_WIKILINK_RE.replace_all(&cleaned, "$1").to_string();
|
||||
let cleaned = PREVIEW_FMT_RE.replace_all(&cleaned, "").to_string();
|
||||
let cleaned = cleaned.trim().to_string();
|
||||
|
||||
if cleaned.len() > limit {
|
||||
|
|
@ -506,11 +596,8 @@ fn add_vault(vault_path: String) -> Result<(), String> {
|
|||
vaults.insert(0, vault_path);
|
||||
vaults.truncate(10); // Keep last 10
|
||||
|
||||
if let Some(parent) = config_path.parent() {
|
||||
fs::create_dir_all(parent).map_err(|e| e.to_string())?;
|
||||
}
|
||||
let json = serde_json::to_string_pretty(&vaults).map_err(|e| e.to_string())?;
|
||||
fs::write(&config_path, json).map_err(|e| e.to_string())
|
||||
atomic_write(&config_path, &json)
|
||||
}
|
||||
|
||||
/* ── Templates ───────────────────────────────────────────── */
|
||||
|
|
@ -537,7 +624,7 @@ fn list_templates(vault_path: String) -> Result<Vec<TemplateInfo>, String> {
|
|||
.unwrap_or_default()
|
||||
.to_string_lossy()
|
||||
.to_string();
|
||||
let rel = format!("_templates/{}", path.file_name().unwrap().to_string_lossy());
|
||||
let rel = format!("_templates/{}", path.file_name().unwrap_or_default().to_string_lossy());
|
||||
templates.push(TemplateInfo { name, path: rel });
|
||||
}
|
||||
}
|
||||
|
|
@ -551,8 +638,7 @@ fn create_from_template(
|
|||
template_path: String,
|
||||
note_name: String,
|
||||
) -> Result<String, String> {
|
||||
let vault = Path::new(&vault_path);
|
||||
let template_full = vault.join(&template_path);
|
||||
let template_full = safe_vault_path(&vault_path, &template_path)?;
|
||||
let template_content = fs::read_to_string(&template_full)
|
||||
.map_err(|e| format!("Failed to read template: {}", e))?;
|
||||
|
||||
|
|
@ -562,11 +648,8 @@ fn create_from_template(
|
|||
.replace("{{date}}", &today);
|
||||
|
||||
let note_path = format!("{}.md", note_name);
|
||||
let full_path = vault.join(¬e_path);
|
||||
if let Some(parent) = full_path.parent() {
|
||||
fs::create_dir_all(parent).map_err(|e| e.to_string())?;
|
||||
}
|
||||
fs::write(&full_path, content).map_err(|e| format!("Failed to create note: {}", e))?;
|
||||
let full_path = safe_vault_path(&vault_path, ¬e_path)?;
|
||||
atomic_write(&full_path, &content)?;
|
||||
Ok(note_path)
|
||||
}
|
||||
|
||||
|
|
@ -588,14 +671,14 @@ fn set_favorites(vault_path: String, favorites: Vec<String>) -> Result<(), Strin
|
|||
let dir = Path::new(&vault_path).join(".graph-notes");
|
||||
fs::create_dir_all(&dir).map_err(|e| e.to_string())?;
|
||||
let json = serde_json::to_string_pretty(&favorites).map_err(|e| e.to_string())?;
|
||||
fs::write(dir.join("favorites.json"), json).map_err(|e| e.to_string())
|
||||
atomic_write(&dir.join("favorites.json"), &json)
|
||||
}
|
||||
|
||||
/* ── Frontmatter ────────────────────────────────────────── */
|
||||
|
||||
#[tauri::command]
|
||||
fn parse_frontmatter(vault_path: String, note_path: String) -> Result<serde_json::Value, String> {
|
||||
let full = Path::new(&vault_path).join(¬e_path);
|
||||
let full = safe_vault_path(&vault_path, ¬e_path)?;
|
||||
let content = fs::read_to_string(&full).map_err(|e| e.to_string())?;
|
||||
|
||||
if !content.starts_with("---\n") {
|
||||
|
|
@ -627,7 +710,7 @@ fn write_frontmatter(
|
|||
note_path: String,
|
||||
frontmatter: serde_json::Map<String, serde_json::Value>,
|
||||
) -> Result<(), String> {
|
||||
let full = Path::new(&vault_path).join(¬e_path);
|
||||
let full = safe_vault_path(&vault_path, ¬e_path)?;
|
||||
let content = fs::read_to_string(&full).map_err(|e| e.to_string())?;
|
||||
|
||||
// Strip existing frontmatter
|
||||
|
|
@ -658,7 +741,7 @@ fn write_frontmatter(
|
|||
format!("{}{}", yaml, body)
|
||||
};
|
||||
|
||||
fs::write(&full, new_content).map_err(|e| e.to_string())
|
||||
atomic_write(&full, &new_content)
|
||||
}
|
||||
|
||||
/* ── Attachments ────────────────────────────────────────── */
|
||||
|
|
@ -669,11 +752,13 @@ fn save_attachment(
|
|||
file_name: String,
|
||||
data: Vec<u8>,
|
||||
) -> Result<String, String> {
|
||||
// Validate filename — reject path separators and traversal
|
||||
let sanitized = safe_name(&file_name)?;
|
||||
let attach_dir = Path::new(&vault_path).join("_attachments");
|
||||
fs::create_dir_all(&attach_dir).map_err(|e| e.to_string())?;
|
||||
|
||||
// Deduplicate filename
|
||||
let mut target = attach_dir.join(&file_name);
|
||||
let mut target = attach_dir.join(&sanitized);
|
||||
let stem = target.file_stem().unwrap_or_default().to_string_lossy().to_string();
|
||||
let ext = target.extension().map(|e| format!(".{}", e.to_string_lossy())).unwrap_or_default();
|
||||
let mut counter = 1;
|
||||
|
|
@ -682,8 +767,9 @@ fn save_attachment(
|
|||
counter += 1;
|
||||
}
|
||||
|
||||
fs::write(&target, &data).map_err(|e| e.to_string())?;
|
||||
let rel = format!("_attachments/{}", target.file_name().unwrap().to_string_lossy());
|
||||
atomic_write_bytes(&target, &data)?;
|
||||
let final_name = target.file_name().unwrap_or_default().to_string_lossy();
|
||||
let rel = format!("_attachments/{}", final_name);
|
||||
Ok(rel)
|
||||
}
|
||||
|
||||
|
|
@ -714,13 +800,12 @@ fn list_daily_notes(vault_path: String) -> Result<Vec<String>, String> {
|
|||
return Ok(vec![]);
|
||||
}
|
||||
|
||||
let re = Regex::new(r"^\d{4}-\d{2}-\d{2}\.md$").unwrap();
|
||||
let mut dates: Vec<String> = Vec::new();
|
||||
|
||||
for entry in fs::read_dir(&daily_dir).map_err(|e| e.to_string())? {
|
||||
let entry = entry.map_err(|e| e.to_string())?;
|
||||
let name = entry.file_name().to_string_lossy().to_string();
|
||||
if re.is_match(&name) {
|
||||
if DAILY_NOTE_RE.is_match(&name) {
|
||||
dates.push(name.replace(".md", ""));
|
||||
}
|
||||
}
|
||||
|
|
@ -742,16 +827,14 @@ fn get_theme() -> Result<String, String> {
|
|||
|
||||
#[tauri::command]
|
||||
fn set_theme(theme: String) -> Result<(), String> {
|
||||
let dir = dirs_config_dir();
|
||||
fs::create_dir_all(&dir).map_err(|e| e.to_string())?;
|
||||
fs::write(dir.join("theme"), &theme).map_err(|e| e.to_string())
|
||||
atomic_write(&dirs_config_dir().join("theme"), &theme)
|
||||
}
|
||||
|
||||
/* ── Export ──────────────────────────────────────────────── */
|
||||
|
||||
#[tauri::command]
|
||||
fn export_note_html(vault_path: String, note_path: String) -> Result<String, String> {
|
||||
let full = Path::new(&vault_path).join(¬e_path);
|
||||
let full = safe_vault_path(&vault_path, ¬e_path)?;
|
||||
let content = fs::read_to_string(&full).map_err(|e| e.to_string())?;
|
||||
let title = Path::new(¬e_path)
|
||||
.file_stem()
|
||||
|
|
@ -782,8 +865,7 @@ fn export_note_html(vault_path: String, note_path: String) -> Result<String, Str
|
|||
}
|
||||
|
||||
// Replace wikilinks
|
||||
let re_wl = Regex::new(r"\[\[([^\]|]+)(?:\|([^\]]+))?\]\]").unwrap();
|
||||
let html_body = re_wl.replace_all(&html_body, |caps: ®ex::Captures| {
|
||||
let html_body = EXPORT_WIKILINK_RE.replace_all(&html_body, |caps: ®ex::Captures| {
|
||||
let target = caps.get(1).map_or("", |m| m.as_str()).trim();
|
||||
let label = caps.get(2).map_or(target, |m| m.as_str()).trim();
|
||||
format!("<a href=\"{}.html\">{}</a>", target, label)
|
||||
|
|
@ -824,7 +906,6 @@ pub struct TaskItem {
|
|||
#[tauri::command]
|
||||
fn list_tasks(vault_path: String) -> Result<Vec<TaskItem>, String> {
|
||||
let vault = Path::new(&vault_path);
|
||||
let re = Regex::new(r"^(\s*)- \[([ x/])\] (.+)$").unwrap();
|
||||
let mut tasks: Vec<TaskItem> = Vec::new();
|
||||
|
||||
for entry in WalkDir::new(vault)
|
||||
|
|
@ -840,7 +921,7 @@ fn list_tasks(vault_path: String) -> Result<Vec<TaskItem>, String> {
|
|||
|
||||
if let Ok(content) = fs::read_to_string(path) {
|
||||
for (i, line) in content.lines().enumerate() {
|
||||
if let Some(caps) = re.captures(line) {
|
||||
if let Some(caps) = TASK_RE.captures(line) {
|
||||
let marker = &caps[2];
|
||||
let state = match marker {
|
||||
"x" => "done",
|
||||
|
|
@ -863,7 +944,7 @@ fn list_tasks(vault_path: String) -> Result<Vec<TaskItem>, String> {
|
|||
|
||||
#[tauri::command]
|
||||
fn toggle_task(vault_path: String, note_path: String, line_number: usize, new_state: String) -> Result<(), String> {
|
||||
let full = Path::new(&vault_path).join(¬e_path);
|
||||
let full = safe_vault_path(&vault_path, ¬e_path)?;
|
||||
let content = fs::read_to_string(&full).map_err(|e| e.to_string())?;
|
||||
let mut lines: Vec<String> = content.lines().map(|s| s.to_string()).collect();
|
||||
|
||||
|
|
@ -877,9 +958,8 @@ fn toggle_task(vault_path: String, note_path: String, line_number: usize, new_st
|
|||
_ => " ",
|
||||
};
|
||||
|
||||
let re = Regex::new(r"- \[[ x/]\]").unwrap();
|
||||
let line = &lines[line_number - 1];
|
||||
if let Some(m) = re.find(line) {
|
||||
if let Some(m) = TASK_MARKER_RE.find(line) {
|
||||
let mut new_line = String::new();
|
||||
new_line.push_str(&line[..m.start()]);
|
||||
new_line.push_str(&format!("- [{}]", marker));
|
||||
|
|
@ -887,7 +967,7 @@ fn toggle_task(vault_path: String, note_path: String, line_number: usize, new_st
|
|||
lines[line_number - 1] = new_line;
|
||||
}
|
||||
|
||||
fs::write(&full, lines.join("\n") + "\n").map_err(|e| e.to_string())
|
||||
atomic_write(&full, &(lines.join("\n") + "\n"))
|
||||
}
|
||||
|
||||
/* ── Snapshots (Version History) ────────────────────────── */
|
||||
|
|
@ -901,23 +981,24 @@ pub struct SnapshotInfo {
|
|||
|
||||
#[tauri::command]
|
||||
fn save_snapshot(vault_path: String, note_path: String) -> Result<String, String> {
|
||||
let full = Path::new(&vault_path).join(¬e_path);
|
||||
let full = safe_vault_path(&vault_path, ¬e_path)?;
|
||||
let content = fs::read_to_string(&full).map_err(|e| e.to_string())?;
|
||||
|
||||
let safe_name = note_path.replace('/', "__").replace(".md", "");
|
||||
let history_dir = Path::new(&vault_path).join(".graph-notes").join("history").join(&safe_name);
|
||||
let sanitized_name = note_path.replace('/', "__").replace(".md", "");
|
||||
let history_dir = Path::new(&vault_path).join(".graph-notes").join("history").join(&sanitized_name);
|
||||
fs::create_dir_all(&history_dir).map_err(|e| e.to_string())?;
|
||||
|
||||
let ts = Local::now().format("%Y%m%d_%H%M%S").to_string();
|
||||
let snap_name = format!("{}.md", ts);
|
||||
// Snapshots are write-once, never overwritten — direct write is safe
|
||||
fs::write(history_dir.join(&snap_name), &content).map_err(|e| e.to_string())?;
|
||||
Ok(snap_name)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
fn list_snapshots(vault_path: String, note_path: String) -> Result<Vec<SnapshotInfo>, String> {
|
||||
let safe_name = note_path.replace('/', "__").replace(".md", "");
|
||||
let history_dir = Path::new(&vault_path).join(".graph-notes").join("history").join(&safe_name);
|
||||
let sanitized_name = note_path.replace('/', "__").replace(".md", "");
|
||||
let history_dir = Path::new(&vault_path).join(".graph-notes").join("history").join(&sanitized_name);
|
||||
|
||||
if !history_dir.exists() {
|
||||
return Ok(vec![]);
|
||||
|
|
@ -943,11 +1024,11 @@ fn list_snapshots(vault_path: String, note_path: String) -> Result<Vec<SnapshotI
|
|||
|
||||
#[tauri::command]
|
||||
fn read_snapshot(vault_path: String, note_path: String, snapshot_name: String) -> Result<String, String> {
|
||||
let safe_name = note_path.replace('/', "__").replace(".md", "");
|
||||
let sanitized_name = note_path.replace('/', "__").replace(".md", "");
|
||||
let snap_path = Path::new(&vault_path)
|
||||
.join(".graph-notes")
|
||||
.join("history")
|
||||
.join(&safe_name)
|
||||
.join(&sanitized_name)
|
||||
.join(&snapshot_name);
|
||||
fs::read_to_string(&snap_path).map_err(|e| e.to_string())
|
||||
}
|
||||
|
|
@ -986,7 +1067,7 @@ fn search_replace_vault(
|
|||
if count > 0 {
|
||||
if !dry_run {
|
||||
let new_content = content.replace(&search, &replace);
|
||||
let _ = fs::write(path, new_content);
|
||||
atomic_write(path, &new_content)?;
|
||||
}
|
||||
results.push(ReplaceResult {
|
||||
path: rel,
|
||||
|
|
@ -1035,7 +1116,7 @@ fn set_writing_goal(vault_path: String, note_path: String, goal: u32) -> Result<
|
|||
}
|
||||
|
||||
let json = serde_json::to_string_pretty(&goals).map_err(|e| e.to_string())?;
|
||||
fs::write(&goals_path, json).map_err(|e| e.to_string())
|
||||
atomic_write(&goals_path, &json)
|
||||
}
|
||||
|
||||
/* ── Note Refactoring ───────────────────────────────────── */
|
||||
|
|
@ -1047,20 +1128,19 @@ fn extract_to_note(
|
|||
selected_text: String,
|
||||
new_note_name: String,
|
||||
) -> Result<String, String> {
|
||||
let vault = Path::new(&vault_path);
|
||||
let new_path = vault.join(format!("{}.md", &new_note_name));
|
||||
let new_path = safe_vault_path(&vault_path, &format!("{}.md", &new_note_name))?;
|
||||
if new_path.exists() {
|
||||
return Err(format!("Note '{}' already exists", new_note_name));
|
||||
}
|
||||
|
||||
// Create new note with extracted text
|
||||
fs::write(&new_path, &selected_text).map_err(|e| e.to_string())?;
|
||||
atomic_write(&new_path, &selected_text)?;
|
||||
|
||||
// Replace selected text with wikilink in source
|
||||
let source_full = vault.join(&source_path);
|
||||
let source_full = safe_vault_path(&vault_path, &source_path)?;
|
||||
let content = fs::read_to_string(&source_full).map_err(|e| e.to_string())?;
|
||||
let new_content = content.replacen(&selected_text, &format!("[[{}]]", new_note_name), 1);
|
||||
fs::write(&source_full, new_content).map_err(|e| e.to_string())?;
|
||||
atomic_write(&source_full, &new_content)?;
|
||||
|
||||
Ok(format!("{}.md", new_note_name))
|
||||
}
|
||||
|
|
@ -1072,15 +1152,15 @@ fn merge_notes(
|
|||
target_path: String,
|
||||
) -> Result<(), String> {
|
||||
let vault = Path::new(&vault_path);
|
||||
let source_full = vault.join(&source_path);
|
||||
let target_full = vault.join(&target_path);
|
||||
let source_full = safe_vault_path(&vault_path, &source_path)?;
|
||||
let target_full = safe_vault_path(&vault_path, &target_path)?;
|
||||
|
||||
let source_content = fs::read_to_string(&source_full).map_err(|e| e.to_string())?;
|
||||
let target_content = fs::read_to_string(&target_full).map_err(|e| e.to_string())?;
|
||||
|
||||
let source_name = source_path.replace(".md", "");
|
||||
let merged = format!("{}\n\n---\n\n## Merged from {}\n\n{}", target_content.trim_end(), source_name, source_content);
|
||||
fs::write(&target_full, merged).map_err(|e| e.to_string())?;
|
||||
atomic_write(&target_full, &merged)?;
|
||||
|
||||
// Delete source
|
||||
fs::remove_file(&source_full).map_err(|e| e.to_string())?;
|
||||
|
|
@ -1100,7 +1180,7 @@ fn merge_notes(
|
|||
&format!("[[{}]]", target_name),
|
||||
);
|
||||
if updated != content {
|
||||
let _ = fs::write(path, updated);
|
||||
atomic_write(path, &updated)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1124,7 +1204,7 @@ fn derive_key(password: &str, salt: &[u8]) -> [u8; 32] {
|
|||
|
||||
#[tauri::command]
|
||||
fn encrypt_note(vault_path: String, note_path: String, password: String) -> Result<(), String> {
|
||||
let full = Path::new(&vault_path).join(¬e_path);
|
||||
let full = safe_vault_path(&vault_path, ¬e_path)?;
|
||||
let content = fs::read_to_string(&full).map_err(|e| e.to_string())?;
|
||||
|
||||
let salt: [u8; 16] = rand::random();
|
||||
|
|
@ -1145,12 +1225,12 @@ fn encrypt_note(vault_path: String, note_path: String, password: String) -> Resu
|
|||
B64.encode(nonce_bytes),
|
||||
B64.encode(&ciphertext),
|
||||
);
|
||||
fs::write(&full, encoded).map_err(|e| e.to_string())
|
||||
atomic_write(&full, &encoded)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
fn decrypt_note(vault_path: String, note_path: String, password: String) -> Result<String, String> {
|
||||
let full = Path::new(&vault_path).join(¬e_path);
|
||||
let full = safe_vault_path(&vault_path, ¬e_path)?;
|
||||
let content = fs::read_to_string(&full).map_err(|e| e.to_string())?;
|
||||
|
||||
if !content.starts_with("GRAPHNOTES_ENC:v1:") {
|
||||
|
|
@ -1179,7 +1259,7 @@ fn decrypt_note(vault_path: String, note_path: String, password: String) -> Resu
|
|||
|
||||
#[tauri::command]
|
||||
fn is_encrypted(vault_path: String, note_path: String) -> Result<bool, String> {
|
||||
let full = Path::new(&vault_path).join(¬e_path);
|
||||
let full = safe_vault_path(&vault_path, ¬e_path)?;
|
||||
let content = fs::read_to_string(&full).map_err(|e| e.to_string())?;
|
||||
Ok(content.starts_with("GRAPHNOTES_ENC:v1:"))
|
||||
}
|
||||
|
|
@ -1200,7 +1280,6 @@ pub struct Flashcard {
|
|||
#[tauri::command]
|
||||
fn list_flashcards(vault_path: String) -> Result<Vec<Flashcard>, String> {
|
||||
let vault = Path::new(&vault_path);
|
||||
let re = Regex::new(r"\?\?\s*(.+?)\s*::\s*(.+?)\s*\?\?").unwrap();
|
||||
let mut cards: Vec<Flashcard> = Vec::new();
|
||||
|
||||
// Load schedule data
|
||||
|
|
@ -1223,7 +1302,7 @@ fn list_flashcards(vault_path: String) -> Result<Vec<Flashcard>, String> {
|
|||
|
||||
if let Ok(content) = fs::read_to_string(path) {
|
||||
for (i, line) in content.lines().enumerate() {
|
||||
for caps in re.captures_iter(line) {
|
||||
for caps in FLASHCARD_RE.captures_iter(line) {
|
||||
let q = caps[1].trim().to_string();
|
||||
let a = caps[2].trim().to_string();
|
||||
let card_id = format!("{}:{}", rel, i + 1);
|
||||
|
|
@ -1293,7 +1372,7 @@ fn update_card_schedule(
|
|||
obj.insert("due".into(), serde_json::json!(due.format("%Y-%m-%d").to_string()));
|
||||
|
||||
let json = serde_json::to_string_pretty(&srs).map_err(|e| e.to_string())?;
|
||||
fs::write(&srs_path, json).map_err(|e| e.to_string())
|
||||
atomic_write(&srs_path, &json)
|
||||
}
|
||||
|
||||
/* ── Fold State ─────────────────────────────────────────── */
|
||||
|
|
@ -1313,7 +1392,7 @@ fn save_fold_state(vault_path: String, note_path: String, folds: Vec<usize>) ->
|
|||
|
||||
data.insert(note_path, serde_json::json!(folds));
|
||||
let json = serde_json::to_string_pretty(&data).map_err(|e| e.to_string())?;
|
||||
fs::write(&folds_path, json).map_err(|e| e.to_string())
|
||||
atomic_write(&folds_path, &json)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
|
|
@ -1344,23 +1423,23 @@ fn get_custom_css() -> Result<String, String> {
|
|||
|
||||
#[tauri::command]
|
||||
fn set_custom_css(css: String) -> Result<(), String> {
|
||||
let config_dir = dirs_config_dir();
|
||||
fs::create_dir_all(&config_dir).map_err(|e| e.to_string())?;
|
||||
fs::write(config_dir.join("custom.css"), css).map_err(|e| e.to_string())
|
||||
atomic_write(&dirs_config_dir().join("custom.css"), &css)
|
||||
}
|
||||
|
||||
/* ── Workspace Layouts ──────────────────────────────────── */
|
||||
|
||||
#[tauri::command]
|
||||
fn save_workspace(vault_path: String, name: String, state: String) -> Result<(), String> {
|
||||
let sanitized = safe_name(&name)?;
|
||||
let dir = Path::new(&vault_path).join(".graph-notes").join("workspaces");
|
||||
fs::create_dir_all(&dir).map_err(|e| e.to_string())?;
|
||||
fs::write(dir.join(format!("{}.json", name)), state).map_err(|e| e.to_string())
|
||||
atomic_write(&dir.join(format!("{}.json", sanitized)), &state)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
fn load_workspace(vault_path: String, name: String) -> Result<String, String> {
|
||||
let path = Path::new(&vault_path).join(".graph-notes").join("workspaces").join(format!("{}.json", name));
|
||||
let sanitized = safe_name(&name)?;
|
||||
let path = Path::new(&vault_path).join(".graph-notes").join("workspaces").join(format!("{}.json", sanitized));
|
||||
fs::read_to_string(&path).map_err(|e| e.to_string())
|
||||
}
|
||||
|
||||
|
|
@ -1384,7 +1463,7 @@ fn list_workspaces(vault_path: String) -> Result<Vec<String>, String> {
|
|||
fn save_tabs(vault_path: String, tabs: String) -> Result<(), String> {
|
||||
let dir = Path::new(&vault_path).join(".graph-notes");
|
||||
fs::create_dir_all(&dir).map_err(|e| e.to_string())?;
|
||||
fs::write(dir.join("tabs.json"), tabs).map_err(|e| e.to_string())
|
||||
atomic_write(&dir.join("tabs.json"), &tabs)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
|
|
@ -1398,14 +1477,16 @@ fn load_tabs(vault_path: String) -> Result<String, String> {
|
|||
|
||||
#[tauri::command]
|
||||
fn save_canvas(vault_path: String, name: String, data: String) -> Result<(), String> {
|
||||
let sanitized = safe_name(&name)?;
|
||||
let dir = Path::new(&vault_path).join(".graph-notes").join("canvases");
|
||||
fs::create_dir_all(&dir).map_err(|e| e.to_string())?;
|
||||
fs::write(dir.join(format!("{}.json", name)), data).map_err(|e| e.to_string())
|
||||
atomic_write(&dir.join(format!("{}.json", sanitized)), &data)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
fn load_canvas(vault_path: String, name: String) -> Result<String, String> {
|
||||
let path = Path::new(&vault_path).join(".graph-notes").join("canvases").join(format!("{}.json", name));
|
||||
let sanitized = safe_name(&name)?;
|
||||
let path = Path::new(&vault_path).join(".graph-notes").join("canvases").join(format!("{}.json", sanitized));
|
||||
if !path.exists() { return Ok("{}".to_string()); }
|
||||
fs::read_to_string(&path).map_err(|e| e.to_string())
|
||||
}
|
||||
|
|
@ -1437,7 +1518,6 @@ pub struct FrontmatterRow {
|
|||
fn query_frontmatter(vault_path: String) -> Result<Vec<FrontmatterRow>, String> {
|
||||
let vault = Path::new(&vault_path);
|
||||
let mut rows: Vec<FrontmatterRow> = Vec::new();
|
||||
let fm_re = Regex::new(r"(?s)^---\n(.+?)\n---").unwrap();
|
||||
|
||||
for entry in WalkDir::new(vault)
|
||||
.into_iter()
|
||||
|
|
@ -1449,7 +1529,7 @@ fn query_frontmatter(vault_path: String) -> Result<Vec<FrontmatterRow>, String>
|
|||
if rel.starts_with(".") { continue; }
|
||||
|
||||
if let Ok(content) = fs::read_to_string(path) {
|
||||
if let Some(caps) = fm_re.captures(&content) {
|
||||
if let Some(caps) = FRONTMATTER_RE.captures(&content) {
|
||||
let yaml_str = &caps[1];
|
||||
let mut fields = serde_json::Map::new();
|
||||
for line in yaml_str.lines() {
|
||||
|
|
@ -1523,7 +1603,7 @@ pub struct DataviewResult {
|
|||
#[tauri::command]
|
||||
fn run_dataview_query(vault_path: String, query: String) -> Result<DataviewResult, String> {
|
||||
let vault = Path::new(&vault_path);
|
||||
let fm_re = Regex::new(r"(?s)^---\n(.+?)\n---").unwrap();
|
||||
|
||||
|
||||
// Parse query: TABLE field1, field2 [FROM ""] [WHERE cond] [SORT field [ASC|DESC]]
|
||||
let query_upper = query.to_uppercase();
|
||||
|
|
@ -1560,7 +1640,7 @@ fn run_dataview_query(vault_path: String, query: String) -> Result<DataviewResul
|
|||
let mut fields_map: std::collections::HashMap<String, String> = std::collections::HashMap::new();
|
||||
fields_map.insert("title".into(), rel.replace(".md", ""));
|
||||
|
||||
if let Some(caps) = fm_re.captures(&content) {
|
||||
if let Some(caps) = FRONTMATTER_RE.captures(&content) {
|
||||
for line in caps[1].lines() {
|
||||
if let Some(idx) = line.find(':') {
|
||||
let k = line[..idx].trim().to_lowercase();
|
||||
|
|
@ -1601,23 +1681,34 @@ fn git_status(vault_path: String) -> Result<String, String> {
|
|||
.current_dir(&vault_path)
|
||||
.output()
|
||||
.map_err(|e| e.to_string())?;
|
||||
Ok(String::from_utf8_lossy(&output.stdout).to_string())
|
||||
if output.status.success() {
|
||||
Ok(String::from_utf8_lossy(&output.stdout).to_string())
|
||||
} else {
|
||||
Err(String::from_utf8_lossy(&output.stderr).to_string())
|
||||
}
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
fn git_commit(vault_path: String, message: String) -> Result<String, String> {
|
||||
let _ = Command::new("git")
|
||||
let add_output = Command::new("git")
|
||||
.args(["add", "."])
|
||||
.current_dir(&vault_path)
|
||||
.output()
|
||||
.map_err(|e| e.to_string())?;
|
||||
if !add_output.status.success() {
|
||||
return Err(String::from_utf8_lossy(&add_output.stderr).to_string());
|
||||
}
|
||||
|
||||
let output = Command::new("git")
|
||||
.args(["commit", "-m", &message])
|
||||
.current_dir(&vault_path)
|
||||
.output()
|
||||
.map_err(|e| e.to_string())?;
|
||||
Ok(String::from_utf8_lossy(&output.stdout).to_string())
|
||||
if output.status.success() {
|
||||
Ok(String::from_utf8_lossy(&output.stdout).to_string())
|
||||
} else {
|
||||
Err(String::from_utf8_lossy(&output.stderr).to_string())
|
||||
}
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
|
|
@ -1655,7 +1746,11 @@ fn git_init(vault_path: String) -> Result<String, String> {
|
|||
.current_dir(&vault_path)
|
||||
.output()
|
||||
.map_err(|e| e.to_string())?;
|
||||
Ok(String::from_utf8_lossy(&output.stdout).to_string())
|
||||
if output.status.success() {
|
||||
Ok(String::from_utf8_lossy(&output.stdout).to_string())
|
||||
} else {
|
||||
Err(String::from_utf8_lossy(&output.stderr).to_string())
|
||||
}
|
||||
}
|
||||
|
||||
/* ── v0.8 Commands ──────────────────────────────────────── */
|
||||
|
|
@ -1780,7 +1875,8 @@ fn export_vault_zip(vault_path: String, output_path: String) -> Result<String, S
|
|||
{
|
||||
let path = entry.path();
|
||||
let rel = path.strip_prefix(vault).unwrap_or(path).to_string_lossy().to_string();
|
||||
if rel.starts_with(".") { continue; }
|
||||
// Skip hidden files, in-progress atomic writes (~tmp suffix)
|
||||
if rel.starts_with(".") || rel.ends_with("~tmp") { continue; }
|
||||
|
||||
let content = fs::read(path).map_err(|e| e.to_string())?;
|
||||
zip.start_file(&rel, options).map_err(|e| e.to_string())?;
|
||||
|
|
@ -1820,7 +1916,7 @@ fn import_folder(vault_path: String, source_path: String) -> Result<u32, String>
|
|||
fn save_shortcuts(vault_path: String, shortcuts_json: String) -> Result<(), String> {
|
||||
let dir = Path::new(&vault_path).join(".graph-notes");
|
||||
fs::create_dir_all(&dir).map_err(|e| e.to_string())?;
|
||||
fs::write(dir.join("shortcuts.json"), shortcuts_json).map_err(|e| e.to_string())
|
||||
atomic_write(&dir.join("shortcuts.json"), &shortcuts_json)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
|
|
@ -1849,7 +1945,7 @@ fn set_pinned(vault_path: String, pinned: Vec<String>) -> Result<(), String> {
|
|||
let dir = Path::new(&vault_path).join(".graph-notes");
|
||||
fs::create_dir_all(&dir).map_err(|e| e.to_string())?;
|
||||
let json = serde_json::to_string_pretty(&pinned).map_err(|e| e.to_string())?;
|
||||
fs::write(dir.join("pinned.json"), json).map_err(|e| e.to_string())
|
||||
atomic_write(&dir.join("pinned.json"), &json)
|
||||
}
|
||||
|
||||
#[cfg_attr(mobile, tauri::mobile_entry_point)]
|
||||
|
|
|
|||
|
|
@ -73,18 +73,20 @@ export function Editor() {
|
|||
(value: string) => {
|
||||
setNoteContent(value);
|
||||
if (saveTimeoutRef.current) clearTimeout(saveTimeoutRef.current);
|
||||
// Capture the note path NOW (at call time), not when the timer fires.
|
||||
// This prevents writing old content to a different note after switching.
|
||||
const capturedNote = currentNoteRef.current;
|
||||
const capturedVault = vaultPathRef.current;
|
||||
saveTimeoutRef.current = setTimeout(async () => {
|
||||
const v = vaultPathRef.current;
|
||||
const n = currentNoteRef.current;
|
||||
if (v && n) {
|
||||
if (capturedVault && capturedNote) {
|
||||
setIsSaving(true);
|
||||
await writeNote(v, n, value);
|
||||
await writeNote(capturedVault, capturedNote, value);
|
||||
setIsSaving(false);
|
||||
// Auto-snapshot on save (max 1 per 5 min)
|
||||
const now = Date.now();
|
||||
if (now - lastSnapshotRef2.current > 5 * 60 * 1000 && value.length > 50) {
|
||||
lastSnapshotRef2.current = now;
|
||||
saveSnapshot(v, n).catch(() => { });
|
||||
saveSnapshot(capturedVault, capturedNote).catch(() => { });
|
||||
}
|
||||
}
|
||||
}, 500);
|
||||
|
|
@ -109,6 +111,11 @@ export function Editor() {
|
|||
const contentRenderedRef = useRef(false);
|
||||
useEffect(() => {
|
||||
if (currentNote !== lastNoteRef.current) {
|
||||
// Cancel any pending debounced save from the previous note
|
||||
if (saveTimeoutRef.current) {
|
||||
clearTimeout(saveTimeoutRef.current);
|
||||
saveTimeoutRef.current = undefined;
|
||||
}
|
||||
lastNoteRef.current = currentNote;
|
||||
contentRenderedRef.current = false;
|
||||
renderToDOM(noteContent);
|
||||
|
|
@ -578,7 +585,7 @@ export function Editor() {
|
|||
return;
|
||||
}
|
||||
}
|
||||
}, [vaultPath, currentNote, setNoteContent]);
|
||||
}, [vaultPath, saveContent]);
|
||||
|
||||
// Slash command trigger
|
||||
const checkSlashCommand = useCallback(() => {
|
||||
|
|
@ -627,13 +634,10 @@ export function Editor() {
|
|||
sel.removeAllRanges();
|
||||
sel.addRange(range);
|
||||
|
||||
// Save
|
||||
const raw = ceRef.current?.innerText || "";
|
||||
setNoteContent(raw);
|
||||
if (currentNote && vaultPath) {
|
||||
writeNote(vaultPath, currentNote, raw).catch(() => { });
|
||||
}
|
||||
}, [vaultPath, currentNote, setNoteContent]);
|
||||
// Save through the standard debounced pipeline
|
||||
const raw = domToMarkdown(ceRef.current!);
|
||||
saveContent(raw);
|
||||
}, [saveContent]);
|
||||
|
||||
// Breadcrumb segments
|
||||
const breadcrumbs = currentNote
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue