diff --git a/apps/storage/.gitignore b/apps/storage/.gitignore new file mode 100644 index 0000000..de42418 --- /dev/null +++ b/apps/storage/.gitignore @@ -0,0 +1,38 @@ +# dependencies (bun install) +node_modules + +# output +out +dist +*.tgz + +# code coverage +coverage +*.lcov + +# logs +logs +*.log +report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json + +# dotenv environment variable files +.env +.env.development.local +.env.test.local +.env.production.local +.env.local + +# caches +.eslintcache +.cache +*.tsbuildinfo + +# IntelliJ based IDEs +.idea + +# Finder (MacOS) folder config +.DS_Store + +# Test vaults created by demo.ts / test runs +test-vault/ +demo-vault/ diff --git a/apps/storage/README.md b/apps/storage/README.md new file mode 100644 index 0000000..ce0334a --- /dev/null +++ b/apps/storage/README.md @@ -0,0 +1,45 @@ +# Smart Notes Storage + +What it does — in plain terms +Vault (the notes folder) +Open any folder as a vault +Re-opening the same folder is safe — nothing breaks or duplicates +Works even if you already have existing .md files in the folder +Notes +Create a note — writes a .md file, saves metadata to DB +Read a note — gives you the content + title + tags +Update a note — saves new content, keeps frontmatter intact +Rename a note — renames the file, updates the title inside the file +Move a note to a different folder +Delete a note — moves to trash by default, or permanent delete +Trash +Deleted notes go to a .trash folder, not permanently gone +List everything in the trash +Restore a trashed note back to its original location +Folders +Create a folder (with optional icon and pin-to-top) +Rename a folder — all notes inside automatically get their paths updated +Delete a folder — safely trashes all notes inside first +Tags +Attach tags to a note when creating or updating +Tags stored in both the DB and the .md file frontmatter +Filter notes by tag +List all tags in the vault with usage counts +Search +Search notes by title +Sorting & Pagination +List notes sorted by last updated, created date, or name +Limit results per page (useful for sidebars with many notes) +Conflict Detection +If a note was edited by another app (e.g. VS Code) while you had it open, it won't silently overwrite — saves a conflict copy and alerts you +File Watcher +Detects when any .md file is added, changed, or deleted outside your app +DB automatically stays in sync even if the user edits notes in another editor +Events +Every action fires an event (noteCreated, noteUpdated, noteDeleted, noteRenamed, noteMoved, folderCreated, folderRenamed, folderDeleted, conflictDetected) +Your UI just listens and reacts — no polling needed +RAG / AI pipeline bridge +Tells the RAG pipeline which notes need to be embedded (new, changed, or previously failed) +Tells the RAG pipeline which notes changed since a specific time (for incremental updates) +Lets the RAG pipeline report back when it finishes indexing a note +Bulk-read all notes with content in one call for initial indexing \ No newline at end of file diff --git a/apps/storage/index.ts b/apps/storage/index.ts new file mode 100644 index 0000000..e84b0b6 --- /dev/null +++ b/apps/storage/index.ts @@ -0,0 +1 @@ +export * from "./src/index" \ No newline at end of file diff --git a/apps/storage/package.json b/apps/storage/package.json new file mode 100644 index 0000000..4a861e3 --- /dev/null +++ b/apps/storage/package.json @@ -0,0 +1,31 @@ +{ + "name": "storage", + "module": "index.ts", + "type": "module", + "private": true, + "scripts": { + "test": "vitest run", + "test:watch": "vitest", + "demo": "bun run demo.ts", + "typecheck": "tsc --noEmit" + }, + "dependencies": { + "better-sqlite3": "^12.6.2", + "chokidar": "^5.0.0", + "eventemitter3": "^5.0.4", + "gray-matter": "^4.0.3", + "js-yaml": "^4.1.0", + "uuid": "^13.0.0" + }, + "devDependencies": { + "@types/better-sqlite3": "^7.6.13", + "@types/bun": "latest", + "@types/js-yaml": "^4.0.9", + "@types/node": "^25.3.3", + "@types/uuid": "^11.0.0", + "vitest": "^4.0.18" + }, + "peerDependencies": { + "typescript": "^5" + } +} diff --git a/apps/storage/src/DB.ts b/apps/storage/src/DB.ts new file mode 100644 index 0000000..a937cc2 --- /dev/null +++ b/apps/storage/src/DB.ts @@ -0,0 +1,560 @@ +import path from "path" +import Database from "better-sqlite3" +import { ensureDir } from "./fsutils" +import type { NoteMeta, NoteID, Folder, FolderID, EmbeddingStatus, ListNotesOptions } from "./types" + +// Bump this when the schema changes to trigger migrations +const STORAGE_VERSION = "2.1.0" + +export default class DB { + dbPath: string + db: InstanceType + + constructor(vaultPath: string) { + const grimoireDir = path.join(vaultPath, ".grimoire") + this.dbPath = path.join(grimoireDir, "meta.db") + + ensureDir(grimoireDir) + + this.db = new Database(this.dbPath) + this.db.exec("PRAGMA journal_mode = WAL;") + this.db.exec("PRAGMA foreign_keys = ON;") + this.initializeSchema() + this.runMigrations() + } + + // ==================== SCHEMA ==================== + + initializeSchema() { + this.db.exec(` + CREATE TABLE IF NOT EXISTS notes ( + note_id TEXT PRIMARY KEY, + path TEXT UNIQUE NOT NULL, + display_name TEXT, + folder_path TEXT, + created_at INTEGER, + updated_at INTEGER, + content_hash TEXT, + source TEXT DEFAULT 'md', + deleted_at INTEGER DEFAULT NULL, + embedding_status TEXT DEFAULT 'pending', + last_embedded_at INTEGER DEFAULT NULL, + original_path TEXT DEFAULT NULL + ); + `) + + this.db.exec(` + CREATE TABLE IF NOT EXISTS note_tags ( + note_id TEXT NOT NULL, + tag TEXT NOT NULL, + PRIMARY KEY (note_id, tag), + FOREIGN KEY (note_id) REFERENCES notes(note_id) ON DELETE CASCADE + ); + `) + + this.db.exec(` + CREATE TABLE IF NOT EXISTS folders ( + folder_id TEXT PRIMARY KEY, + path TEXT UNIQUE NOT NULL, + name TEXT NOT NULL, + parent_path TEXT NOT NULL DEFAULT '', + created_at INTEGER NOT NULL, + sort_order INTEGER NOT NULL DEFAULT 0, + icon TEXT DEFAULT NULL, + pinned INTEGER NOT NULL DEFAULT 0 + ); + `) + + this.db.exec(` + CREATE TABLE IF NOT EXISTS config ( + key TEXT PRIMARY KEY, + value TEXT + ); + `) + + this.db.exec(`CREATE INDEX IF NOT EXISTS idx_notes_folder ON notes(folder_path);`) + this.db.exec(`CREATE INDEX IF NOT EXISTS idx_notes_updated ON notes(updated_at);`) + this.db.exec(`CREATE INDEX IF NOT EXISTS idx_notes_embedding ON notes(embedding_status);`) + this.db.exec(`CREATE INDEX IF NOT EXISTS idx_tags_tag ON note_tags(tag);`) + this.db.exec(`CREATE INDEX IF NOT EXISTS idx_folders_parent ON folders(parent_path);`) + } + + // ==================== MIGRATIONS ==================== + + runMigrations() { + const currentVersion = this.getConfig("storage_version") + + if (!currentVersion) { + this.setConfig("storage_version", STORAGE_VERSION) + this.setConfig("initialized_at", Date.now().toString()) + console.log(`[DB] Initialized storage version ${STORAGE_VERSION}`) + return + } + + if (currentVersion === "1.0.0" || currentVersion === "2.0.0") { + console.log(`[DB] Migrating from ${currentVersion} to 2.1.0`) + try { this.db.exec(`ALTER TABLE notes ADD COLUMN embedding_status TEXT DEFAULT 'pending';`) } catch (_) {} + try { this.db.exec(`ALTER TABLE notes ADD COLUMN last_embedded_at INTEGER DEFAULT NULL;`) } catch (_) {} + try { this.db.exec(`ALTER TABLE notes ADD COLUMN original_path TEXT DEFAULT NULL;`) } catch (_) {} + this.db.prepare(`UPDATE notes SET embedding_status = 'pending' WHERE embedding_status IS NULL`).run() + this.setConfig("storage_version", STORAGE_VERSION) + console.log(`[DB] Migration to 2.1.0 complete`) + } + } + + // ==================== CONFIG ==================== + + getConfig(key: string): string | null { + const row = this.db.prepare("SELECT value FROM config WHERE key = ?").get(key) as any + return row ? row.value : null + } + + setConfig(key: string, value: string): void { + this.db.prepare("INSERT OR REPLACE INTO config (key, value) VALUES (?, ?)").run(key, value) + } + + // ==================== NOTE CRUD ==================== + + /** + * Bulk upsert notes. On path collision, updates metadata but PRESERVES the existing note_id. + * This is the fix for the UUID-regeneration bug: re-scanning never replaces an existing ID. + */ + bulkUpsertNotes(notes: NoteMeta[]) { + const upsert = this.db.prepare(` + INSERT INTO notes (note_id, path, display_name, folder_path, created_at, updated_at, content_hash, source, embedding_status) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?) + ON CONFLICT(path) DO UPDATE SET + display_name = excluded.display_name, + folder_path = excluded.folder_path, + updated_at = excluded.updated_at, + content_hash = excluded.content_hash, + source = excluded.source, + deleted_at = NULL, + embedding_status = CASE + WHEN notes.content_hash != excluded.content_hash THEN 'stale' + ELSE notes.embedding_status + END + `) + + const transaction = this.db.transaction((notes: NoteMeta[]) => { + for (const n of notes) { + upsert.run( + n.note_id, + n.path, + n.display_name, + n.folder_path, + n.created_at, + n.updated_at, + n.content_hash ?? null, + n.source ?? "md", + n.embedding_status ?? "pending", + ) + } + }) + + transaction(notes) + } + + insertNote(note: NoteMeta): void { + this.db.prepare(` + INSERT INTO notes (note_id, path, display_name, folder_path, created_at, updated_at, content_hash, source, embedding_status) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?) + `).run( + note.note_id, + note.path, + note.display_name, + note.folder_path, + note.created_at, + note.updated_at, + note.content_hash ?? null, + note.source ?? "md", + note.embedding_status ?? "pending", + ) + } + + getNoteById(noteId: NoteID): NoteMeta | null { + const row = this.db + .prepare("SELECT * FROM notes WHERE note_id = ? AND deleted_at IS NULL") + .get(noteId) as any + return row ? this.rowToNoteMeta(row) : null + } + + getNoteByPath(notePath: string): NoteMeta | null { + const row = this.db + .prepare("SELECT * FROM notes WHERE path = ? AND deleted_at IS NULL") + .get(notePath) as any + return row ? this.rowToNoteMeta(row) : null + } + + /** Get a note regardless of deleted_at — used for trash/restore */ + getNoteByPathIncDeleted(notePath: string): NoteMeta | null { + const row = this.db + .prepare("SELECT * FROM notes WHERE path = ?") + .get(notePath) as any + return row ? this.rowToNoteMeta(row) : null + } + + updateNote(noteId: NoteID, updates: Partial>): void { + const fields = Object.keys(updates).map((key) => `${key} = ?`).join(", ") + const values = Object.values(updates) + this.db.prepare(`UPDATE notes SET ${fields} WHERE note_id = ?`).run(...values, noteId) + } + + updateNoteHashAndTime(noteId: NoteID, contentHash: string, updatedAt: number): void { + this.db.prepare(` + UPDATE notes + SET content_hash = ?, updated_at = ?, embedding_status = 'stale' + WHERE note_id = ? + `).run(contentHash, updatedAt, noteId) + } + + softDeleteNote(noteId: NoteID): void { + this.db.prepare("UPDATE notes SET deleted_at = ? WHERE note_id = ?").run(Date.now(), noteId) + } + + /** + * Soft delete and record the trash path so we can restore later. + * Stores the pre-trash path in `original_path`, then updates `path` to trashPath. + */ + softDeleteNoteWithPath( + noteId: NoteID, + trashRelativePath: string, + originalPath: string + ): void { + this.db.prepare(` + UPDATE notes + SET deleted_at = ?, original_path = ?, path = ? + WHERE note_id = ? + `).run(Date.now(), originalPath, trashRelativePath, noteId) + } + + /** + * Restore a trashed note: swap path back to original_path and clear deleted_at / original_path. + * Returns the original path so the caller can move the file. + */ + restoreNoteFromTrash(noteId: NoteID): string | null { + const row = this.db + .prepare("SELECT original_path, path FROM notes WHERE note_id = ?") + .get(noteId) as any + + if (!row || !row.original_path) return null + + this.db.prepare(` + UPDATE notes + SET deleted_at = NULL, path = original_path, original_path = NULL, + folder_path = ?, embedding_status = 'stale' + WHERE note_id = ? + `).run( + row.original_path.split("/").slice(0, -1).join("/") || "", + noteId + ) + + return row.original_path + } + + deleteNote(noteId: NoteID): void { + this.db.prepare("DELETE FROM notes WHERE note_id = ?").run(noteId) + } + + restoreNote(noteId: NoteID): void { + this.db.prepare("UPDATE notes SET deleted_at = NULL WHERE note_id = ?").run(noteId) + } + + // ==================== NOTE QUERIES ==================== + + listNotes(options: ListNotesOptions = {}): NoteMeta[] { + const { + folderPath, + offset = 0, + limit, + orderBy = "updated_at", + direction = "desc", + } = options + + // Whitelist to prevent SQL injection + const safeOrder = ["updated_at", "created_at", "display_name"].includes(orderBy) + ? orderBy + : "updated_at" + const safeDir = direction === "asc" ? "ASC" : "DESC" + + let query = `SELECT * FROM notes WHERE deleted_at IS NULL` + const params: any[] = [] + + if (folderPath !== undefined) { + query += " AND folder_path = ?" + params.push(folderPath) + } + + query += ` ORDER BY ${safeOrder} ${safeDir}` + + if (limit !== undefined) { + query += " LIMIT ? OFFSET ?" + params.push(limit, offset) + } + + const rows = this.db.prepare(query).all(...params) as any[] + return rows.map(this.rowToNoteMeta) + } + + /** List notes currently in trash (soft-deleted) */ + listTrashedNotes(): NoteMeta[] { + const rows = this.db + .prepare("SELECT * FROM notes WHERE deleted_at IS NOT NULL ORDER BY deleted_at DESC") + .all() as any[] + return rows.map(this.rowToNoteMeta) + } + + searchNotes(searchTerm: string): NoteMeta[] { + const rows = this.db.prepare(` + SELECT * FROM notes + WHERE deleted_at IS NULL + AND display_name LIKE ? + ORDER BY updated_at DESC + `).all(`%${searchTerm}%`) as any[] + return rows.map(this.rowToNoteMeta) + } + + searchByTags(tags: string[]): NoteMeta[] { + if (tags.length === 0) return [] + const placeholders = tags.map(() => "?").join(", ") + const rows = this.db.prepare(` + SELECT DISTINCT n.* FROM notes n + JOIN note_tags t ON n.note_id = t.note_id + WHERE n.deleted_at IS NULL + AND t.tag IN (${placeholders}) + ORDER BY n.updated_at DESC + `).all(...tags) as any[] + return rows.map(this.rowToNoteMeta) + } + + countNotes(): number { + const row = this.db + .prepare("SELECT COUNT(*) as count FROM notes WHERE deleted_at IS NULL") + .get() as any + return row.count + } + + getFolderPaths(): string[] { + const rows = this.db.prepare(` + SELECT DISTINCT folder_path + FROM notes + WHERE deleted_at IS NULL + ORDER BY folder_path + `).all() as any[] + return rows.map((r) => r.folder_path) + } + + // ==================== RAG METHODS ==================== + + /** + * Notes modified after a given timestamp — for incremental RAG indexing. + * Returns notes where updated_at > sinceMs OR embedding_status = 'pending'. + */ + getNotesModifiedSince(sinceMs: number): NoteMeta[] { + const rows = this.db.prepare(` + SELECT * FROM notes + WHERE deleted_at IS NULL + AND updated_at > ? + ORDER BY updated_at ASC + `).all(sinceMs) as any[] + return rows.map(this.rowToNoteMeta) + } + + /** + * Notes that the RAG pipeline needs to (re)index. + * Covers: never embedded (pending), content changed (stale), or previous failure (error). + */ + getNotesNeedingReindex(): NoteMeta[] { + const rows = this.db.prepare(` + SELECT * FROM notes + WHERE deleted_at IS NULL + AND ( + embedding_status IN ('pending', 'stale', 'error') + OR (embedding_status = 'indexed' AND updated_at > last_embedded_at) + ) + ORDER BY updated_at ASC + `).all() as any[] + return rows.map(this.rowToNoteMeta) + } + + /** Update the embedding state for a note (called by the RAG pipeline after indexing) */ + updateEmbeddingStatus(noteId: NoteID, status: EmbeddingStatus): void { + const lastEmbeddedAt = status === "indexed" ? Date.now() : null + this.db.prepare(` + UPDATE notes + SET embedding_status = ?, last_embedded_at = ? + WHERE note_id = ? + `).run(status, lastEmbeddedAt, noteId) + } + + // ==================== TAG METHODS ==================== + + /** Replace all tags on a note (atomic: deletes then inserts) */ + setNoteTags(noteId: NoteID, tags: string[]): void { + const del = this.db.prepare("DELETE FROM note_tags WHERE note_id = ?") + const ins = this.db.prepare("INSERT OR IGNORE INTO note_tags (note_id, tag) VALUES (?, ?)") + + this.db.transaction(() => { + del.run(noteId) + for (const tag of tags) { + if (tag.trim()) ins.run(noteId, tag.trim().toLowerCase()) + } + })() + } + + getNoteTags(noteId: NoteID): string[] { + const rows = this.db + .prepare("SELECT tag FROM note_tags WHERE note_id = ? ORDER BY tag") + .all(noteId) as any[] + return rows.map((r) => r.tag) + } + + /** List all unique tags used in the vault */ + getAllTags(): { tag: string; count: number }[] { + return this.db.prepare(` + SELECT t.tag, COUNT(*) as count + FROM note_tags t + JOIN notes n ON n.note_id = t.note_id + WHERE n.deleted_at IS NULL + GROUP BY t.tag + ORDER BY count DESC, t.tag ASC + `).all() as any[] + } + + // ==================== FOLDER CRUD ==================== + + insertFolder(folder: Folder): void { + this.db.prepare(` + INSERT OR IGNORE INTO folders (folder_id, path, name, parent_path, created_at, sort_order, icon, pinned) + VALUES (?, ?, ?, ?, ?, ?, ?, ?) + `).run( + folder.folder_id, + folder.path, + folder.name, + folder.parent_path, + folder.created_at, + folder.sort_order ?? 0, + folder.icon ?? null, + folder.pinned ? 1 : 0, + ) + } + + getFolderByPath(folderPath: string): Folder | null { + const row = this.db + .prepare("SELECT * FROM folders WHERE path = ?") + .get(folderPath) as any + return row ? this.rowToFolder(row) : null + } + + getFolderById(folderId: FolderID): Folder | null { + const row = this.db + .prepare("SELECT * FROM folders WHERE folder_id = ?") + .get(folderId) as any + return row ? this.rowToFolder(row) : null + } + + listFolders(parentPath?: string): Folder[] { + if (parentPath !== undefined) { + const rows = this.db + .prepare("SELECT * FROM folders WHERE parent_path = ? ORDER BY sort_order ASC, name ASC") + .all(parentPath) as any[] + return rows.map(this.rowToFolder) + } + const rows = this.db + .prepare("SELECT * FROM folders ORDER BY sort_order ASC, name ASC") + .all() as any[] + return rows.map(this.rowToFolder) + } + + updateFolder(folderId: FolderID, updates: Partial>): void { + const fields = Object.keys(updates).map((k) => `${k} = ?`).join(", ") + const values = Object.values(updates) + this.db.prepare(`UPDATE folders SET ${fields} WHERE folder_id = ?`).run(...values, folderId) + } + + /** Rename a folder path — also updates all child note paths and child folder paths */ + renameFolderPath(oldPath: string, newPath: string): number { + // Update notes whose folder_path matches exactly + const r1 = this.db.prepare(` + UPDATE notes SET folder_path = ? WHERE folder_path = ? AND deleted_at IS NULL + `).run(newPath, oldPath) as any + + // Update notes in sub-folders (e.g. oldPath/sub -> newPath/sub) + this.db.prepare(` + UPDATE notes + SET folder_path = ? || SUBSTR(folder_path, LENGTH(?) + 1) + WHERE folder_path LIKE ? AND deleted_at IS NULL + `).run(newPath, oldPath, oldPath + "/%") + + // Update note paths themselves + this.db.prepare(` + UPDATE notes + SET path = ? || SUBSTR(path, LENGTH(?) + 1) + WHERE path LIKE ? AND deleted_at IS NULL + `).run(newPath, oldPath, oldPath + "/%") + + // Update child folder paths + this.db.prepare(` + UPDATE folders + SET path = ? || SUBSTR(path, LENGTH(?) + 1), + parent_path = CASE + WHEN parent_path = ? THEN ? + ELSE ? || SUBSTR(parent_path, LENGTH(?) + 1) + END + WHERE path LIKE ? + `).run(newPath, oldPath, oldPath, newPath, newPath, oldPath, oldPath + "/%") + + // Update the folder itself + this.db.prepare("UPDATE folders SET path = ?, name = ? WHERE path = ?").run( + newPath, + newPath.split("/").pop() ?? newPath, + oldPath + ) + + return (r1.changes as number) + } + + deleteFolder(folderId: FolderID): void { + this.db.prepare("DELETE FROM folders WHERE folder_id = ?").run(folderId) + } + + deleteFolderByPath(folderPath: string): void { + this.db.prepare("DELETE FROM folders WHERE path = ?").run(folderPath) + } + + // ==================== HELPERS ==================== + + private rowToNoteMeta(row: any): NoteMeta { + return { + note_id: row.note_id, + path: row.path, + display_name: row.display_name, + folder_path: row.folder_path, + created_at: row.created_at, + updated_at: row.updated_at, + content_hash: row.content_hash, + source: row.source, + deleted_at: row.deleted_at ?? null, + embedding_status: row.embedding_status ?? "pending", + last_embedded_at: row.last_embedded_at ?? null, + } + } + + private rowToFolder(row: any): Folder { + return { + folder_id: row.folder_id, + path: row.path, + name: row.name, + parent_path: row.parent_path, + created_at: row.created_at, + sort_order: row.sort_order, + icon: row.icon ?? undefined, + pinned: row.pinned === 1, + } + } + + close(): void { + this.db.close() + } +} + diff --git a/apps/storage/src/Watcher.ts b/apps/storage/src/Watcher.ts new file mode 100644 index 0000000..18909c7 --- /dev/null +++ b/apps/storage/src/Watcher.ts @@ -0,0 +1,200 @@ +import chokidar from "chokidar" +import type { FSWatcher } from "chokidar" +import path from "path" +import type { NoteID } from "./types" + +/** + * File system watcher with debouncing and internal write suppression + * Detects external changes to notes (edits in VS Code, etc.) + */ + +export type WatcherEventType = "add" | "change" | "unlink" + +export interface WatcherEvent { + type: WatcherEventType + path: string // Absolute path + relativePath: string // Vault-relative POSIX path + isInternal: boolean // True if triggered by our own write +} + +export type WatcherHandler = (event: WatcherEvent) => void | Promise + +export default class Watcher { + private vaultPath: string + private watcher: FSWatcher | null = null + private handler: WatcherHandler | null = null + + // Track recent internal writes to suppress echoes + private recentWrites = new Map() // path -> expiry timestamp + private suppressionWindow = 500 // ms + + // Debounce settings + private debounceTimers = new Map() + private debounceDelay = 300 // ms + + constructor(vaultPath: string) { + this.vaultPath = vaultPath + } + + /** + * Start watching the vault folder + */ + start(handler: WatcherHandler): void { + if (this.watcher) { + console.warn("Watcher already started") + return + } + + this.handler = handler + + this.watcher = chokidar.watch(this.vaultPath, { + ignored: [ + // Ignore system folders + /(^|[\/\\])\../, // Dot files/folders (.grimoire, .conflicts, .git) + "**/node_modules/**", + "**/.trash/**", + "**/.conflicts/**", + "**/.derived/**", + ], + ignoreInitial: true, // Don't fire events for existing files + persistent: true, + awaitWriteFinish: { + stabilityThreshold: 100, // Wait 100ms for file to stabilize + pollInterval: 50, + }, + }) + + // Listen to events + this.watcher + .on("add", (filePath) => this.handleEvent("add", filePath)) + .on("change", (filePath) => this.handleEvent("change", filePath)) + .on("unlink", (filePath) => this.handleEvent("unlink", filePath)) + .on("error", (error) => console.error("Watcher error:", error)) + + console.log(`Watcher started for: ${this.vaultPath}`) + } + + /** + * Stop watching + */ + async stop(): Promise { + if (this.watcher) { + // Clear pending debounce timers + for (const timer of this.debounceTimers.values()) { + clearTimeout(timer) + } + this.debounceTimers.clear() + this.recentWrites.clear() + + await this.watcher.close() + this.watcher = null + this.handler = null + console.log("Watcher stopped") + } + } + + /** + * Mark a path as recently written internally + * This prevents the watcher from treating our own writes as external edits + */ + markInternalWrite(relativePath: string): void { + // Normalize to POSIX so suppression keys match chokidar event paths + const normalizedPath = relativePath.split(path.sep).join("/") + const expiryTime = Date.now() + this.suppressionWindow + this.recentWrites.set(normalizedPath, expiryTime) + + // Clean up after expiry + setTimeout(() => { + this.recentWrites.delete(normalizedPath) + }, this.suppressionWindow) + } + + /** + * Check if a path was recently written internally + */ + private isInternalWrite(relativePath: string): boolean { + const expiry = this.recentWrites.get(relativePath) + if (!expiry) return false + + const now = Date.now() + if (now > expiry) { + this.recentWrites.delete(relativePath) + return false + } + + return true + } + + /** + * Handle file system event with debouncing + */ + private handleEvent(type: WatcherEventType, absolutePath: string): void { + // Only watch .md files for now + if (!absolutePath.endsWith(".md")) { + return + } + + const relativePath = this.toRelativePosix(absolutePath) + + // Clear existing debounce timer + const timerId = this.debounceTimers.get(relativePath) + if (timerId) { + clearTimeout(timerId) + } + + // Set new debounce timer + const newTimer = setTimeout(() => { + this.debounceTimers.delete(relativePath) + this.emitEvent(type, absolutePath, relativePath) + }, this.debounceDelay) + + this.debounceTimers.set(relativePath, newTimer) + } + + /** + * Emit the event to handler + */ + private emitEvent( + type: WatcherEventType, + absolutePath: string, + relativePath: string + ): void { + if (!this.handler) return + + const isInternal = this.isInternalWrite(relativePath) + + const event: WatcherEvent = { + type, + path: absolutePath, + relativePath, + isInternal, + } + + // Skip internal writes (they're echo events from our own operations) + if (isInternal) { + console.log(`[Watcher] Suppressed internal write: ${relativePath}`) + return + } + + console.log( + `[Watcher] External ${type}: ${relativePath} (internal=${isInternal})` + ) + + const result = this.handler(event) + if (result instanceof Promise) { + result.catch((err) => console.error("[Watcher] Handler error:", err)) + } + } + + /** + * Convert absolute path to vault-relative POSIX path + */ + private toRelativePosix(absolutePath: string): string { + let relative = path.relative(this.vaultPath, absolutePath) + + // Convert to POSIX (forward slashes) + relative = relative.split(path.sep).join("/") + + return relative + } +} diff --git a/apps/storage/src/conflicts.ts b/apps/storage/src/conflicts.ts new file mode 100644 index 0000000..b018f29 --- /dev/null +++ b/apps/storage/src/conflicts.ts @@ -0,0 +1,143 @@ +import fs from "fs/promises" +import path from "path" +import { ensureDir } from "./fsutils" +import type { NoteID, Conflict } from "./types" + +/** + * Conflict detection and resolution helpers + * Uses hash comparison to detect external modifications + */ + +/** + * Detect if a conflict exists between DB and disk + * @returns true if hashes don't match (conflict exists) + */ +export function detectConflict(dbHash: string, diskHash: string): boolean { + return dbHash !== diskHash +} + +/** + * Save conflicting content to .conflicts directory + * @returns path to saved conflict file + */ +export async function saveConflict( + vaultPath: string, + noteId: NoteID, + notePath: string, + content: string, + dbHash: string, + diskHash: string +): Promise { + const conflictsDir = path.join(vaultPath, ".conflicts") + await ensureDir(conflictsDir) + + // Create unique conflict filename: noteId_timestamp.md + const timestamp = Date.now() + const basename = path.basename(notePath, ".md") + const conflictFilename = `${basename}_${noteId.slice(0, 8)}_${timestamp}.md` + const conflictPath = path.join(conflictsDir, conflictFilename) + + // Add header explaining the conflict + const header = ` + +` + + await fs.writeFile(conflictPath, header + content, "utf8") + + // Log conflict to conflicts.json for audit trail + await logConflict(vaultPath, { + note_id: noteId, + originalPath: notePath, + conflictPath: conflictPath, + timestamp, + dbHash, + diskHash, + resolved: false, + }) + + return conflictPath +} + +/** + * Log conflict to .grimoire/conflicts.json for audit + */ +async function logConflict(vaultPath: string, conflict: Conflict) { + const conflictsLog = path.join(vaultPath, ".grimoire", "conflicts.json") + + let conflicts: Conflict[] = [] + + try { + const existing = await fs.readFile(conflictsLog, "utf8") + conflicts = JSON.parse(existing) + } catch (err) { + // File doesn't exist yet, that's fine + } + + conflicts.push(conflict) + + await fs.writeFile(conflictsLog, JSON.stringify(conflicts, null, 2), "utf8") +} + +/** + * Get list of unresolved conflicts + */ +export async function getUnresolvedConflicts( + vaultPath: string +): Promise { + const conflictsLog = path.join(vaultPath, ".grimoire", "conflicts.json") + + try { + const data = await fs.readFile(conflictsLog, "utf8") + const conflicts: Conflict[] = JSON.parse(data) + return conflicts.filter((c) => !c.resolved) + } catch (err) { + return [] + } +} + +/** + * Mark a conflict as resolved + */ +export async function markConflictResolved( + vaultPath: string, + noteId: NoteID, + timestamp: number +) { + const conflictsLog = path.join(vaultPath, ".grimoire", "conflicts.json") + + try { + const data = await fs.readFile(conflictsLog, "utf8") + const conflicts: Conflict[] = JSON.parse(data) + + const conflict = conflicts.find( + (c) => c.note_id === noteId && c.timestamp === timestamp + ) + + if (conflict) { + conflict.resolved = true + await fs.writeFile( + conflictsLog, + JSON.stringify(conflicts, null, 2), + "utf8" + ) + } + } catch (err) { + // Log doesn't exist, that's fine + } +} diff --git a/apps/storage/src/events.ts b/apps/storage/src/events.ts new file mode 100644 index 0000000..1e8a913 --- /dev/null +++ b/apps/storage/src/events.ts @@ -0,0 +1,17 @@ +// src/events.ts +// Event name constants for VaultManager +export const EVENTS = { + VAULT_READY: "vaultReady", + NOTE_CREATED: "noteCreated", + NOTE_UPDATED: "noteUpdated", + NOTE_DELETED: "noteDeleted", + NOTE_RENAMED: "noteRenamed", + NOTE_MOVED: "noteMoved", + CONFLICT_DETECTED: "conflictDetected", + FOLDER_CREATED: "folderCreated", + FOLDER_RENAMED: "folderRenamed", + FOLDER_DELETED: "folderDeleted", + FOLDER_MOVED: "folderMoved", +} as const + +export type EventName = (typeof EVENTS)[keyof typeof EVENTS] \ No newline at end of file diff --git a/apps/storage/src/frontmatter.ts b/apps/storage/src/frontmatter.ts new file mode 100644 index 0000000..4b84e33 --- /dev/null +++ b/apps/storage/src/frontmatter.ts @@ -0,0 +1,71 @@ +import matter from "gray-matter" +import yaml from "js-yaml" + +/** + * Parse frontmatter from markdown content (DB-First approach) + * - Does NOT inject system IDs into files + * - Only extracts user-provided metadata + * - Preserves all user frontmatter as-is + */ +export function parseFrontmatter(rawText: string) { + const parsed = matter(rawText) + + // Extract user metadata if present (optional fields) + const title = parsed.data.title || null + const tags = parsed.data.tags || [] + const created_at = parsed.data.created_at || null + const description = parsed.data.description || null + + return { + // User frontmatter (preserved) + title, + tags, + created_at, + description, + userFrontmatter: parsed.data, // Full user frontmatter + + // Content + content: parsed.content, + rawText, // Original unmodified text + } +} + +/** + * Create frontmatter string from object (for new notes) + */ +export function stringifyFrontmatter( + content: string, + frontmatter?: Record +): string { + if (!frontmatter || Object.keys(frontmatter).length === 0) { + return content // No frontmatter, just content + } + + // Use yaml.dump directly (js-yaml v4 removed safeDump; matter.stringify uses it) + const frontmatterStr = yaml.dump(frontmatter, { lineWidth: -1 }).trimEnd() + const body = content.startsWith("\n") ? content : `\n${content}` + return `---\n${frontmatterStr}\n---${body}` +} + +/** + * Remove specific keys from frontmatter (migration helper) + * Used to clean up old system-injected IDs if they exist + */ +export function removeFrontmatterKeys( + rawText: string, + keysToRemove: string[] +): string { + const parsed = matter(rawText) + + // Remove specified keys + keysToRemove.forEach((key) => { + delete parsed.data[key] + }) + + // If no frontmatter left, return just content + if (Object.keys(parsed.data).length === 0) { + return parsed.content + } + + return matter.stringify(parsed.content, parsed.data) +} \ No newline at end of file diff --git a/apps/storage/src/fsutils.ts b/apps/storage/src/fsutils.ts new file mode 100644 index 0000000..bd28d78 --- /dev/null +++ b/apps/storage/src/fsutils.ts @@ -0,0 +1,50 @@ +import fs from "fs/promises" +import path from "path" +import crypto from "crypto" + +/** + * Atomic write with Windows compatibility + * On Windows, we can't rename over an existing file, so we: + * 1. Write to temp file + * 2. Copy temp to destination (overwriting) + * 3. Delete temp file + */ +export async function atomicWrite(filePath: string, content: string) { + const dir = path.dirname(filePath) + const base = path.basename(filePath) + const tempPath = path.join( + dir, + base + ".tmp-" + Math.random().toString(36).slice(2) + ) + + try { + // Write to temp file + await fs.writeFile(tempPath, content, "utf8") + + // Use copyFile instead of rename (works better on Windows) + await fs.copyFile(tempPath, filePath) + + // Clean up temp file + await fs.unlink(tempPath) + } catch (err: any) { + // If operation fails, try to clean up temp file + try { + await fs.unlink(tempPath) + } catch (_) { + // Ignore cleanup errors + } + throw err + } +} + +export function computeHash(content: string): string { + return crypto.createHash("sha256").update(content).digest("hex") +} + +export function toPosix(p: string): string { + return p.split(path.sep).join("/") +} + +export async function ensureDir(dirPath: string) { + await fs.mkdir(dirPath, { recursive: true }) +} \ No newline at end of file diff --git a/apps/storage/src/gray-matter.d.ts b/apps/storage/src/gray-matter.d.ts new file mode 100644 index 0000000..1a95384 --- /dev/null +++ b/apps/storage/src/gray-matter.d.ts @@ -0,0 +1,60 @@ +// Type declarations for gray-matter (no official types available) +declare module "gray-matter" { + interface GrayMatterOption< + I extends gray.Input, + O extends gray.GrayMatterOption + > { + excerpt?: boolean | ((input: I, options: O) => string) + excerpt_separator?: string + engines?: { + [index: string]: (input: string) => object + } + language?: string + delimiters?: string | [string, string] + } + + interface GrayMatterFile { + data: { [key: string]: any } + content: string + excerpt?: string + orig: Buffer | I + language: string + matter: string + stringify(lang: string): string + } + + namespace gray { + type Input = string | Buffer + type GrayMatterOption< + I extends Input, + O extends GrayMatterOption + > = GrayMatterOption + type GrayMatterFile = GrayMatterFile + + function stringify( + content: string | { content: string }, + data?: object, + options?: GrayMatterOption> + ): string + + function read( + fp: string, + options?: GrayMatterOption> + ): GrayMatterFile + + function test(str: string | Buffer): boolean + + const engines: { + [key: string]: (input: string) => object + } + } + + function gray>( + input: I, + options?: O + ): gray.GrayMatterFile + + namespace gray {} + + export = gray +} diff --git a/apps/storage/src/index.ts b/apps/storage/src/index.ts new file mode 100644 index 0000000..971bec9 --- /dev/null +++ b/apps/storage/src/index.ts @@ -0,0 +1,4 @@ +export { default as VaultManager } from "./vaultManager" +export * from "./types" +export { EVENTS } from "./events" +export type { WatcherEvent, WatcherEventType } from "./Watcher" diff --git a/apps/storage/src/types.ts b/apps/storage/src/types.ts new file mode 100644 index 0000000..db2df99 --- /dev/null +++ b/apps/storage/src/types.ts @@ -0,0 +1,199 @@ +// src/types.ts +// Type definitions for the Storage layer + +export type NoteID = string +export type FolderID = string + +/** + * Status of a note in the RAG embedding pipeline + * - pending: note exists but has never been embedded + * - indexed: content is in ChromaDB and up-to-date + * - stale: content changed since last embedding (updated_at > last_embedded_at) + * - error: last embedding attempt failed + */ +export type EmbeddingStatus = "pending" | "indexed" | "stale" | "error" + +/** + * Note metadata stored in database + * ID is ONLY in DB, never injected into files + */ +export interface NoteMeta { + note_id: NoteID // UUID generated by system + path: string // vault-relative POSIX path, e.g. "notes/todo.md" + display_name: string // filename or title from frontmatter + folder_path: string // vault-relative folder, "" for root + created_at: number // epoch ms + updated_at: number // epoch ms + content_hash?: string // SHA256 for conflict detection + source?: "md" | "pdf" | "import" + deleted_at?: number | null // soft delete timestamp + embedding_status?: EmbeddingStatus // RAG pipeline state + last_embedded_at?: number | null // epoch ms of last successful embed +} + +/** + * A folder tracked in the database + */ +export interface Folder { + folder_id: FolderID // UUID + path: string // vault-relative POSIX path, e.g. "projects/work" + name: string // display name (last path segment) + parent_path: string // parent folder path, "" for root + created_at: number // epoch ms + sort_order: number // manual sort position + icon?: string // optional emoji or icon name + pinned: boolean // pinned to top of sidebar +} + +/** + * Options for listing notes with pagination + */ +export interface ListNotesOptions { + folderPath?: string // filter by folder + offset?: number // pagination offset + limit?: number // max results (default: all) + orderBy?: "updated_at" | "created_at" | "display_name" + direction?: "asc" | "desc" +} + +/** + * Lightweight note info returned for bulk RAG operations + * Avoids loading full content unnecessarily + */ +export interface RAGNoteInfo { + meta: NoteMeta + content: string +} + +/** + * Full note with content + metadata + */ +export interface NoteWithContent { + meta: NoteMeta + content: string + userFrontmatter?: Record // User's original frontmatter +} + +/** + * Vault statistics + */ +export interface VaultStats { + totalNotes: number + totalFolders: number + vaultPath: string + lastScan: number + storageVersion: string +} + +// ==================== EVENT PAYLOADS ==================== + +export interface VaultReadyPayload { + vaultPath: string + totalNotes: number + totalFolders: number + scanTimeMs: number +} + +export interface NoteCreatedPayload { + note_id: NoteID + path: string + folder_path: string + display_name: string + created_at: number + source: "internal" | "external" +} + +export interface NoteUpdatedPayload { + note_id: NoteID + path: string + updated_at: number + content_hash: string + source: "internal" | "external" +} + +export interface NoteDeletedPayload { + note_id: NoteID + path: string + trashed: boolean + source: "internal" | "external" +} + +export interface NoteRenamedPayload { + note_id: NoteID + oldPath: string + newPath: string + oldDisplayName: string + newDisplayName: string +} + +export interface NoteMovedPayload { + note_id: NoteID + oldPath: string + newPath: string + oldFolderPath: string + newFolderPath: string +} + +export interface ConflictDetectedPayload { + note_id: NoteID + path: string + dbHash: string + diskHash: string + conflictPath: string + timestamp: number +} + +export interface FolderCreatedPayload { + folder_id: FolderID + path: string + name: string + parent_path: string +} + +export interface FolderRenamedPayload { + folder_id: FolderID + oldPath: string + newPath: string + oldName: string + newName: string + affectedNotes: number +} + +export interface FolderDeletedPayload { + folder_id: FolderID + path: string + affectedNotes: number +} + +export interface FolderMovedPayload { + oldPath: string + newPath: string + affectedNotes: number +} + +// ==================== MISC ==================== + +/** + * Conflict information + */ +export interface Conflict { + note_id: NoteID + originalPath: string + conflictPath: string + timestamp: number + dbHash: string + diskHash: string + resolved: boolean +} + +/** + * Import report + */ +export interface ImportReport { + totalFiles: number + imported: number + skipped: number + errors: string[] + duration: number +} + diff --git a/apps/storage/src/vaultManager.ts b/apps/storage/src/vaultManager.ts new file mode 100644 index 0000000..80185f3 --- /dev/null +++ b/apps/storage/src/vaultManager.ts @@ -0,0 +1,1106 @@ +import EventEmitter from "eventemitter3" +import fs from "fs/promises" +import path from "path" +import { v4 as uuidv4 } from "uuid" +import DB from "./DB" +import Watcher from "./Watcher" +import { parseFrontmatter, stringifyFrontmatter } from "./frontmatter" +import { atomicWrite, computeHash, toPosix, ensureDir } from "./fsutils" +import { detectConflict, saveConflict } from "./conflicts" +import { EVENTS } from "./events" +import type { + NoteMeta, + NoteID, + NoteWithContent, + VaultStats, + Folder, + FolderID, + EmbeddingStatus, + RAGNoteInfo, + ListNotesOptions, + VaultReadyPayload, + NoteCreatedPayload, + NoteUpdatedPayload, + NoteDeletedPayload, + NoteRenamedPayload, + NoteMovedPayload, + ConflictDetectedPayload, + FolderCreatedPayload, + FolderRenamedPayload, + FolderDeletedPayload, + FolderMovedPayload, +} from "./types" + +/** + * VaultManager - Main API for storage operations + * + * DB-First Architecture: + * - Note IDs stored ONLY in database + * - User files contain only user content (no system IDs) + * - File system is source of truth for content + * - Database is source of truth for metadata + */ +export default class VaultManager extends EventEmitter { + vaultPath: string | null = null + db: DB | null = null + watcher: Watcher | null = null + private isWatcherActive = false + + constructor() { + super() + } + + // ==================== INITIALIZATION ==================== + + /** + * Initialize vault: scan files, build database, prepare for operations. + * Safe to call multiple times — closes any previous state first. + */ + async initializeVault(vaultPath: string): Promise { + // Idempotent: close previous state if already initialized + if (this.vaultPath) { + await this.close() + } + + const start = Date.now() + + const stat = await fs.stat(vaultPath) + if (!stat.isDirectory()) { + throw new Error("Vault path must be a directory") + } + + this.vaultPath = vaultPath + + await ensureDir(path.join(vaultPath, ".grimoire")) + await ensureDir(path.join(vaultPath, ".conflicts")) + await ensureDir(path.join(vaultPath, ".trash")) + + this.db = new DB(vaultPath) + + // === UUID BUG FIX === + // Load all existing notes from DB into a Map keyed by path. + // processFile will reuse these UUIDs instead of generating new ones, + // so re-opening the vault never changes a note's ID. + const existingByPath = new Map() + for (const note of this.db.listNotes()) { + existingByPath.set(note.path, note) + } + + console.log("[VaultManager] Scanning vault...") + const files = await this.walk(vaultPath) + const notes: NoteMeta[] = [] + + for (const file of files) { + if (!file.endsWith(".md")) continue + try { + const noteMeta = await this.processFile(file, existingByPath) + notes.push(noteMeta) + } catch (err) { + console.error(`[VaultManager] Error processing ${file}:`, err) + } + } + + // Bulk upsert: ON CONFLICT(path) preserves existing note_id, marks stale if hash changed + if (notes.length > 0) { + this.db.bulkUpsertNotes(notes) + } + + // Sync tags for all notes from their frontmatter + for (const note of notes) { + const absPath = path.join(vaultPath, note.path) + try { + const raw = await fs.readFile(absPath, "utf8") + const parsed = parseFrontmatter(raw) + if (parsed.tags && parsed.tags.length > 0) { + this.db.setNoteTags(note.note_id, parsed.tags) + } + } catch (_) {} + } + + // Soft-delete notes that exist in DB but are no longer on disk + const scannedPaths = new Set(notes.map((n) => n.path)) + for (const [existingPath, existing] of existingByPath) { + if (!scannedPaths.has(existingPath)) { + this.db.softDeleteNote(existing.note_id) + console.log(`[VaultManager] Note gone from disk, soft-deleted: ${existingPath}`) + } + } + + const end = Date.now() + console.log(`[VaultManager] Initialized ${notes.length} notes in ${end - start}ms`) + + this.emit(EVENTS.VAULT_READY, { + vaultPath, + totalNotes: notes.length, + totalFolders: this.db.listFolders().length, + scanTimeMs: end - start, + } as VaultReadyPayload) + } + + /** + * Process a single file: extract metadata, reuse existing UUID if known. + * Never modifies the file — DB-first approach. + */ + private async processFile( + absolutePath: string, + existingByPath?: Map + ): Promise { + const raw = await fs.readFile(absolutePath, "utf8") + const parsed = parseFrontmatter(raw) + const stat = await fs.stat(absolutePath) + + const relative = path.relative(this.vaultPath!, absolutePath) + const posixPath = toPosix(relative) + const folderPath = toPosix(path.dirname(relative)) + + // Reuse existing UUID — never generate a new one for a known path + const existing = existingByPath?.get(posixPath) + const noteId = existing?.note_id ?? uuidv4() + + const displayName = parsed.title || path.basename(absolutePath, ".md") + const createdAt = parsed.created_at || existing?.created_at || stat.birthtimeMs || Date.now() + + return { + note_id: noteId, + path: posixPath, + display_name: displayName, + folder_path: folderPath === "." ? "" : folderPath, + created_at: createdAt, + updated_at: stat.mtimeMs, + content_hash: computeHash(raw), + source: "md", + embedding_status: existing?.embedding_status ?? "pending", + last_embedded_at: existing?.last_embedded_at ?? null, + } + } + + /** + * Recursively walk directory tree + */ + private async walk(dir: string): Promise { + const entries = await fs.readdir(dir, { withFileTypes: true }) + const files: string[] = [] + + for (const entry of entries) { + const fullPath = path.join(dir, entry.name) + + if (entry.isDirectory()) { + // Skip system directories + if ( + entry.name === ".grimoire" || + entry.name === ".conflicts" || + entry.name === ".trash" || + entry.name === ".git" || + entry.name === ".derived" || + entry.name === "node_modules" + ) { + continue + } + + const sub = await this.walk(fullPath) + files.push(...sub) + } else { + files.push(fullPath) + } + } + + return files + } + + // ==================== WATCHER CONTROL ==================== + + /** + * Start watching for external file changes + */ + startWatcher(): void { + if (!this.vaultPath) { + throw new Error("Vault not initialized") + } + + if (this.isWatcherActive) { + console.warn("[VaultManager] Watcher already active") + return + } + + this.watcher = new Watcher(this.vaultPath) + this.watcher.start(async (event) => { + await this.handleWatcherEvent(event) + }) + + this.isWatcherActive = true + console.log("[VaultManager] Watcher started") + } + + /** + * Stop watching + */ + async stopWatcher(): Promise { + if (this.watcher) { + await this.watcher.stop() + this.watcher = null + this.isWatcherActive = false + console.log("[VaultManager] Watcher stopped") + } + } + + /** + * Handle file system events from watcher + */ + private async handleWatcherEvent(event: any): Promise { + try { + switch (event.type) { + case "add": + await this.handleExternalAdd(event.relativePath) + break + case "change": + await this.handleExternalChange(event.relativePath) + break + case "unlink": + await this.handleExternalDelete(event.relativePath) + break + } + } catch (err) { + console.error("[VaultManager] Watcher event error:", err) + } + } + + private async handleExternalAdd(relativePath: string): Promise { + const absolutePath = path.join(this.vaultPath!, relativePath) + const noteMeta = await this.processFile(absolutePath) + + this.db!.insertNote(noteMeta) + + this.emit(EVENTS.NOTE_CREATED, { + note_id: noteMeta.note_id, + path: noteMeta.path, + folder_path: noteMeta.folder_path, + display_name: noteMeta.display_name, + created_at: noteMeta.created_at, + source: "external", + } as NoteCreatedPayload) + + console.log(`[VaultManager] External add: ${relativePath}`) + } + + private async handleExternalChange(relativePath: string): Promise { + const existing = this.db!.getNoteByPath(relativePath) + if (!existing) { + console.warn(`[VaultManager] Change event for unknown note: ${relativePath}`) + return + } + + const absolutePath = path.join(this.vaultPath!, relativePath) + const raw = await fs.readFile(absolutePath, "utf8") + const newHash = computeHash(raw) + const parsed = parseFrontmatter(raw) + + // Sync display_name from frontmatter title + const newDisplayName = parsed.title || path.basename(relativePath, ".md") + + this.db!.updateNoteHashAndTime(existing.note_id, newHash, Date.now()) + if (newDisplayName !== existing.display_name) { + this.db!.updateNote(existing.note_id, { display_name: newDisplayName }) + } + + // Sync tags from frontmatter + if (parsed.tags) { + this.db!.setNoteTags(existing.note_id, parsed.tags) + } + + this.emit(EVENTS.NOTE_UPDATED, { + note_id: existing.note_id, + path: relativePath, + updated_at: Date.now(), + content_hash: newHash, + source: "external", + } as NoteUpdatedPayload) + + console.log(`[VaultManager] External change: ${relativePath}`) + } + + private async handleExternalDelete(relativePath: string): Promise { + const existing = this.db!.getNoteByPath(relativePath) + if (!existing) { + return + } + + this.db!.softDeleteNote(existing.note_id) + + this.emit(EVENTS.NOTE_DELETED, { + note_id: existing.note_id, + path: relativePath, + trashed: false, + source: "external", + } as NoteDeletedPayload) + + console.log(`[VaultManager] External delete: ${relativePath}`) + } + + // ==================== CRUD OPERATIONS ==================== + + /** + * Create a new note + */ + async createNote( + folderPath: string, + title: string, + content: string = "", + tags: string[] = [] + ): Promise { + if (!this.vaultPath || !this.db) { + throw new Error("Vault not initialized") + } + + // === INPUT VALIDATION === + const trimmedTitle = title.trim() + if (!trimmedTitle) throw new Error("Note title cannot be empty") + if (trimmedTitle.length > 200) throw new Error("Note title too long (max 200 chars)") + + // Prevent path traversal + const sanitizedFolder = folderPath.replace(/\.\./g, "").replace(/^\//, "") + + const noteId = uuidv4() + const createdAt = Date.now() + + const filename = this.sanitizeFilename(trimmedTitle) + ".md" + const fullFolderPath = path.join(this.vaultPath, sanitizedFolder) + await ensureDir(fullFolderPath) + + const absolutePath = path.join(fullFolderPath, filename) + const relativePath = toPosix(path.relative(this.vaultPath, absolutePath)) + + // Check for path collision + if (this.db.getNoteByPath(relativePath)) { + throw new Error(`A note already exists at path: ${relativePath}`) + } + + const frontmatter: Record = { title: trimmedTitle } + if (tags.length > 0) frontmatter.tags = tags + + const fileContent = stringifyFrontmatter(content, frontmatter) + await atomicWrite(absolutePath, fileContent) + + if (this.watcher) { + this.watcher.markInternalWrite(relativePath) + } + + const noteMeta: NoteMeta = { + note_id: noteId, + path: relativePath, + display_name: trimmedTitle, + folder_path: sanitizedFolder === "." ? "" : sanitizedFolder, + created_at: createdAt, + updated_at: createdAt, + content_hash: computeHash(fileContent), + source: "md", + embedding_status: "pending", + } + + this.db.insertNote(noteMeta) + + if (tags.length > 0) { + this.db.setNoteTags(noteId, tags) + } + + this.emit(EVENTS.NOTE_CREATED, { + note_id: noteId, + path: relativePath, + folder_path: noteMeta.folder_path, + display_name: trimmedTitle, + created_at: createdAt, + source: "internal", + } as NoteCreatedPayload) + + return noteMeta + } + + /** + * Read note content + metadata + */ + async readNote(noteId: NoteID): Promise { + if (!this.vaultPath || !this.db) { + throw new Error("Vault not initialized") + } + + const meta = this.db.getNoteById(noteId) + if (!meta) { + throw new Error(`Note not found: ${noteId}`) + } + + const absolutePath = path.join(this.vaultPath, meta.path) + const raw = await fs.readFile(absolutePath, "utf8") + const parsed = parseFrontmatter(raw) + + return { + meta, + content: parsed.content, + userFrontmatter: parsed.userFrontmatter, + } + } + + /** + * Update note content (with conflict detection). + * Also accepts an optional tags array to update tags in DB. + */ + async updateNote( + noteId: NoteID, + newContent: string, + tags?: string[] + ): Promise { + if (!this.vaultPath || !this.db) { + throw new Error("Vault not initialized") + } + + const meta = this.db.getNoteById(noteId) + if (!meta) { + throw new Error(`Note not found: ${noteId}`) + } + + const absolutePath = path.join(this.vaultPath, meta.path) + + const diskContent = await fs.readFile(absolutePath, "utf8") + const diskHash = computeHash(diskContent) + + if (meta.content_hash && detectConflict(meta.content_hash, diskHash)) { + const conflictPath = await saveConflict( + this.vaultPath, + noteId, + meta.path, + newContent, + meta.content_hash, + diskHash + ) + + this.emit(EVENTS.CONFLICT_DETECTED, { + note_id: noteId, + path: meta.path, + dbHash: meta.content_hash, + diskHash, + conflictPath, + timestamp: Date.now(), + } as ConflictDetectedPayload) + + throw new Error(`Conflict detected: file changed externally. Check ${conflictPath}`) + } + + const parsed = parseFrontmatter(diskContent) + + // Merge new tags into frontmatter if provided + const frontmatter = { ...parsed.userFrontmatter } + if (tags !== undefined) { + if (tags.length > 0) { + frontmatter.tags = tags + } else { + delete frontmatter.tags + } + } + + const updatedContent = stringifyFrontmatter(newContent, frontmatter) + await atomicWrite(absolutePath, updatedContent) + + if (this.watcher) { + this.watcher.markInternalWrite(meta.path) + } + + const newHash = computeHash(updatedContent) + this.db.updateNoteHashAndTime(noteId, newHash, Date.now()) + + // Sync display_name if frontmatter title changed + const newDisplayName = frontmatter.title || path.basename(meta.path, ".md") + if (newDisplayName !== meta.display_name) { + this.db.updateNote(noteId, { display_name: newDisplayName }) + } + + // Sync tags in DB + if (tags !== undefined) { + this.db.setNoteTags(noteId, tags) + } else if (parsed.tags) { + this.db.setNoteTags(noteId, parsed.tags) + } + + this.emit(EVENTS.NOTE_UPDATED, { + note_id: noteId, + path: meta.path, + updated_at: Date.now(), + content_hash: newHash, + source: "internal", + } as NoteUpdatedPayload) + } + + /** + * Delete note. + * - Soft delete (default): moves file to .trash, records original path for restore. + * - Permanent: hard deletes file and DB record. + */ + async deleteNote(noteId: NoteID, permanent: boolean = false): Promise { + if (!this.vaultPath || !this.db) { + throw new Error("Vault not initialized") + } + + const meta = this.db.getNoteById(noteId) + if (!meta) { + throw new Error(`Note not found: ${noteId}`) + } + + const absolutePath = path.join(this.vaultPath, meta.path) + + if (permanent) { + await fs.unlink(absolutePath) + this.db.deleteNote(noteId) + } else { + // Move to .trash with a unique filename + const trashDir = path.join(this.vaultPath, ".trash") + const timestamp = Date.now() + const trashFilename = `${path.basename(meta.path, ".md")}_${timestamp}.md` + const trashAbsPath = path.join(trashDir, trashFilename) + const trashRelPath = toPosix(path.relative(this.vaultPath, trashAbsPath)) + + await fs.rename(absolutePath, trashAbsPath) + + // Record original path so restore is possible + this.db.softDeleteNoteWithPath(noteId, trashRelPath, meta.path) + } + + if (this.watcher) { + this.watcher.markInternalWrite(meta.path) + } + + this.emit(EVENTS.NOTE_DELETED, { + note_id: noteId, + path: meta.path, + trashed: !permanent, + source: "internal", + } as NoteDeletedPayload) + } + + // ==================== QUERY OPERATIONS ==================== + + /** + * List notes with optional filter, sort, and pagination. + */ + async listNotes(options: ListNotesOptions = {}): Promise { + if (!this.db) throw new Error("Vault not initialized") + return this.db.listNotes(options) + } + + /** Search notes by title (LIKE match) */ + async searchNotes(query: string): Promise { + if (!this.db) throw new Error("Vault not initialized") + return this.db.searchNotes(query) + } + + /** Search notes that have ALL of the given tags */ + async searchByTags(tags: string[]): Promise { + if (!this.db) throw new Error("Vault not initialized") + return this.db.searchByTags(tags) + } + + /** Get all unique tags used in the vault with usage counts */ + async getAllTags(): Promise<{ tag: string; count: number }[]> { + if (!this.db) throw new Error("Vault not initialized") + return this.db.getAllTags() + } + + /** List all DB-tracked folders, optionally filtered by parent */ + async listFolders(parentPath?: string): Promise { + if (!this.db) throw new Error("Vault not initialized") + return this.db.listFolders(parentPath) + } + + /** List notes currently in the trash */ + async listTrashedNotes(): Promise { + if (!this.db) throw new Error("Vault not initialized") + return this.db.listTrashedNotes() + } + + /** + * Get vault statistics + */ + async getVaultStats(): Promise { + if (!this.vaultPath || !this.db) throw new Error("Vault not initialized") + return { + totalNotes: this.db.countNotes(), + totalFolders: this.db.listFolders().length, + vaultPath: this.vaultPath, + lastScan: Date.now(), + storageVersion: this.db.getConfig("storage_version") || "unknown", + } + } + + // ==================== NOTE RENAME / MOVE ==================== + + /** + * Rename a note: updates filename, frontmatter title, and DB display_name. + */ + async renameNote(noteId: NoteID, newTitle: string): Promise { + if (!this.vaultPath || !this.db) throw new Error("Vault not initialized") + + const trimmed = newTitle.trim() + if (!trimmed) throw new Error("Title cannot be empty") + if (trimmed.length > 200) throw new Error("Title too long (max 200 chars)") + + const meta = this.db.getNoteById(noteId) + if (!meta) throw new Error(`Note not found: ${noteId}`) + + const oldAbsPath = path.join(this.vaultPath, meta.path) + const newFilename = this.sanitizeFilename(trimmed) + ".md" + const newAbsPath = path.join(path.dirname(oldAbsPath), newFilename) + const newRelPath = toPosix(path.relative(this.vaultPath, newAbsPath)) + + if (newAbsPath !== oldAbsPath && this.db.getNoteByPath(newRelPath)) { + throw new Error(`A note already exists at path: ${newRelPath}`) + } + + // Update frontmatter title in file + const raw = await fs.readFile(oldAbsPath, "utf8") + const parsed = parseFrontmatter(raw) + const updatedContent = stringifyFrontmatter(parsed.content, { + ...parsed.userFrontmatter, + title: trimmed, + }) + + await atomicWrite(newAbsPath, updatedContent) + + if (newAbsPath !== oldAbsPath) { + await fs.unlink(oldAbsPath) + if (this.watcher) { + this.watcher.markInternalWrite(meta.path) + this.watcher.markInternalWrite(newRelPath) + } + } else if (this.watcher) { + this.watcher.markInternalWrite(newRelPath) + } + + const newHash = computeHash(updatedContent) + this.db.updateNote(noteId, { + path: newRelPath, + display_name: trimmed, + content_hash: newHash, + updated_at: Date.now(), + embedding_status: "stale", + }) + + const updated = this.db.getNoteById(noteId)! + + this.emit(EVENTS.NOTE_RENAMED, { + note_id: noteId, + oldPath: meta.path, + newPath: newRelPath, + oldDisplayName: meta.display_name, + newDisplayName: trimmed, + } as NoteRenamedPayload) + + return updated + } + + /** + * Move a note to a different folder. + */ + async moveNote(noteId: NoteID, newFolderPath: string): Promise { + if (!this.vaultPath || !this.db) throw new Error("Vault not initialized") + + const sanitizedFolder = newFolderPath.replace(/\.\./g, "").replace(/^\//, "") + const meta = this.db.getNoteById(noteId) + if (!meta) throw new Error(`Note not found: ${noteId}`) + + const basename = path.basename(meta.path) + const newRelPath = sanitizedFolder + ? toPosix(path.join(sanitizedFolder, basename)) + : basename + + if (newRelPath === meta.path) return meta // No-op + + if (this.db.getNoteByPath(newRelPath)) { + throw new Error(`A note already exists at: ${newRelPath}`) + } + + const oldAbsPath = path.join(this.vaultPath, meta.path) + const newAbsPath = path.join(this.vaultPath, newRelPath) + await ensureDir(path.dirname(newAbsPath)) + await fs.rename(oldAbsPath, newAbsPath) + + if (this.watcher) { + this.watcher.markInternalWrite(meta.path) + this.watcher.markInternalWrite(newRelPath) + } + + this.db.updateNote(noteId, { + path: newRelPath, + folder_path: sanitizedFolder, + updated_at: Date.now(), + }) + + const updated = this.db.getNoteById(noteId)! + + this.emit(EVENTS.NOTE_MOVED, { + note_id: noteId, + oldPath: meta.path, + newPath: newRelPath, + oldFolderPath: meta.folder_path, + newFolderPath: sanitizedFolder, + } as NoteMovedPayload) + + return updated + } + + /** + * Update only the tags on a note (without touching content). + */ + async updateNoteTags(noteId: NoteID, tags: string[]): Promise { + if (!this.db) throw new Error("Vault not initialized") + const meta = this.db.getNoteById(noteId) + if (!meta) throw new Error(`Note not found: ${noteId}`) + + this.db.setNoteTags(noteId, tags) + + // Also persist tags to frontmatter + if (this.vaultPath) { + const absPath = path.join(this.vaultPath, meta.path) + const raw = await fs.readFile(absPath, "utf8") + const parsed = parseFrontmatter(raw) + const frontmatter = { ...parsed.userFrontmatter } + if (tags.length > 0) { + frontmatter.tags = tags + } else { + delete frontmatter.tags + } + const updated = stringifyFrontmatter(parsed.content, frontmatter) + await atomicWrite(absPath, updated) + if (this.watcher) this.watcher.markInternalWrite(meta.path) + const newHash = computeHash(updated) + this.db.updateNoteHashAndTime(noteId, newHash, Date.now()) + } + } + + // ==================== FOLDER OPERATIONS ==================== + + /** + * Create a new folder on disk and register it in the DB. + */ + async createFolder( + folderPath: string, + options: { icon?: string; pinned?: boolean } = {} + ): Promise { + if (!this.vaultPath || !this.db) throw new Error("Vault not initialized") + + const sanitized = folderPath.replace(/\.\./g, "").replace(/^\//, "") + if (!sanitized) throw new Error("Folder path cannot be empty") + + const absPath = path.join(this.vaultPath, sanitized) + await ensureDir(absPath) + + // Check if already tracked + const existing = this.db.getFolderByPath(sanitized) + if (existing) return existing + + const segments = sanitized.split("/") + const name = segments[segments.length - 1] ?? sanitized + const parentPath = segments.slice(0, -1).join("/") + + const folder: Folder = { + folder_id: uuidv4(), + path: sanitized, + name, + parent_path: parentPath, + created_at: Date.now(), + sort_order: 0, + icon: options.icon, + pinned: options.pinned ?? false, + } + + this.db.insertFolder(folder) + + this.emit(EVENTS.FOLDER_CREATED, { + folder_id: folder.folder_id, + path: folder.path, + name: folder.name, + parent_path: folder.parent_path, + } as FolderCreatedPayload) + + return folder + } + + /** + * Rename a folder — renames the directory on disk and updates all + * affected note paths and child folder paths in the DB. + */ + async renameFolder(folderId: FolderID, newName: string): Promise { + if (!this.vaultPath || !this.db) throw new Error("Vault not initialized") + + const trimmedName = newName.trim() + if (!trimmedName) throw new Error("Folder name cannot be empty") + + const folder = this.db.getFolderById(folderId) + if (!folder) throw new Error(`Folder not found: ${folderId}`) + + const segments = folder.path.split("/") + segments[segments.length - 1] = this.sanitizeFilename(trimmedName) + const newPath = segments.join("/") + + if (newPath === folder.path) return folder // No-op + + if (this.db.getFolderByPath(newPath)) { + throw new Error(`A folder already exists at: ${newPath}`) + } + + const oldAbsPath = path.join(this.vaultPath, folder.path) + const newAbsPath = path.join(this.vaultPath, newPath) + await fs.rename(oldAbsPath, newAbsPath) + + const affectedNotes = this.db.renameFolderPath(folder.path, newPath) + + const updated = this.db.getFolderById(folderId)! + + this.emit(EVENTS.FOLDER_RENAMED, { + folder_id: folderId, + oldPath: folder.path, + newPath, + oldName: folder.name, + newName: trimmedName, + affectedNotes, + } as FolderRenamedPayload) + + return updated + } + + async moveFolder(folderId: FolderID, newParentPath: string): Promise { + if (!this.vaultPath || !this.db) throw new Error("Vault not initialized") + + const sanitized = newParentPath.replace(/\.\./g, "").replace(/^\//, "") + const folder = this.db.getFolderById(folderId) + if (!folder) throw new Error(`Folder not found: ${folderId}`) + + const newPath = sanitized ? `${sanitized}/${folder.name}` : folder.name + if (newPath === folder.path) return folder + + if (this.db.getFolderByPath(newPath)) { + throw new Error(`A folder already exists at: ${newPath}`) + } + + const oldAbsPath = path.join(this.vaultPath, folder.path) + const newAbsPath = path.join(this.vaultPath, newPath) + await ensureDir(path.dirname(newAbsPath)) + await fs.rename(oldAbsPath, newAbsPath) + + const affectedNotes = this.db.renameFolderPath(folder.path, newPath) + this.db.updateFolder(folderId, { parent_path: sanitized }) + + const updated = this.db.getFolderById(folderId)! + + this.emit(EVENTS.FOLDER_MOVED, { + oldPath: folder.path, + newPath, + affectedNotes, + } as FolderMovedPayload) + + return updated + } + + async deleteFolder(folderId: FolderID, force: boolean = false): Promise { + if (!this.vaultPath || !this.db) throw new Error("Vault not initialized") + + const folder = this.db.getFolderById(folderId) + if (!folder) throw new Error(`Folder not found: ${folderId}`) + + // Find all notes in this folder tree + const allNotes = this.db.listNotes() + const affectedNotes = allNotes.filter( + (n) => n.folder_path === folder.path || n.folder_path.startsWith(folder.path + "/") + ) + + if (affectedNotes.length > 0 && !force) { + throw new Error( + `Folder "${folder.name}" contains ${affectedNotes.length} note(s). Use force=true to trash them all.` + ) + } + + // Trash all notes in folder tree + for (const note of affectedNotes) { + try { + await this.deleteNote(note.note_id, false) + } catch (err) { + console.error(`[VaultManager] Failed to trash note ${note.note_id}:`, err) + } + } + + // Remove folder from disk (should be empty or only system files now) + const absPath = path.join(this.vaultPath, folder.path) + try { + await fs.rm(absPath, { recursive: true, force: true }) + } catch (err) { + console.error(`[VaultManager] Failed to delete folder on disk: ${folder.path}`, err) + } + + // Remove folder and descendants from DB + this.db.deleteFolderByPath(folder.path) + + this.emit(EVENTS.FOLDER_DELETED, { + folder_id: folderId, + path: folder.path, + affectedNotes: affectedNotes.length, + } as FolderDeletedPayload) + } + + // ==================== TRASH OPERATIONS ==================== + + /** + * Restore a note from the trash to its original location. + */ + async restoreFromTrash(noteId: NoteID): Promise { + if (!this.vaultPath || !this.db) throw new Error("Vault not initialized") + + // Get the note including its deleted_at (must use a raw DB query) + const trashNotes = this.db.listTrashedNotes() + const trashed = trashNotes.find((n) => n.note_id === noteId) + if (!trashed) throw new Error(`No trashed note found with id: ${noteId}`) + + // Get the trash file path (stored as current `path` in DB) + const trashAbsPath = path.join(this.vaultPath, trashed.path) + const originalRelPath = await this.db.restoreNoteFromTrash(noteId)! + + if (!originalRelPath) { + throw new Error(`Cannot restore: original path unknown for note ${noteId}`) + } + + const originalAbsPath = path.join(this.vaultPath, originalRelPath) + await ensureDir(path.dirname(originalAbsPath)) + + // Move file back from trash + try { + await fs.rename(trashAbsPath, originalAbsPath) + } catch (err) { + // If rename fails (cross-device), fall back to copy + delete + await fs.copyFile(trashAbsPath, originalAbsPath) + await fs.unlink(trashAbsPath) + } + + if (this.watcher) { + this.watcher.markInternalWrite(originalRelPath) + } + + const restored = this.db.getNoteById(noteId)! + + this.emit(EVENTS.NOTE_CREATED, { + note_id: noteId, + path: originalRelPath, + folder_path: restored.folder_path, + display_name: restored.display_name, + created_at: restored.created_at, + source: "internal", + } as NoteCreatedPayload) + + return restored + } + + // ==================== RAG OPERATIONS ==================== + + /** + * Read all active notes with their full content. + * Intended for initial RAG indexing of the entire vault. + */ + async getAllNotesWithContent(): Promise { + if (!this.vaultPath || !this.db) throw new Error("Vault not initialized") + + const notes = this.db.listNotes() + const results: RAGNoteInfo[] = [] + + for (const meta of notes) { + try { + const absPath = path.join(this.vaultPath, meta.path) + const raw = await fs.readFile(absPath, "utf8") + const parsed = parseFrontmatter(raw) + results.push({ meta, content: parsed.content }) + } catch (err) { + console.error(`[VaultManager] Failed to read note ${meta.path}:`, err) + } + } + + return results + } + + /** + * Get notes modified since a given timestamp (epoch ms). + * Use this for incremental RAG re-indexing runs. + */ + async getNotesModifiedSince(sinceMs: number): Promise { + if (!this.vaultPath || !this.db) throw new Error("Vault not initialized") + + const notes = this.db.getNotesModifiedSince(sinceMs) + const results: RAGNoteInfo[] = [] + + for (const meta of notes) { + try { + const absPath = path.join(this.vaultPath, meta.path) + const raw = await fs.readFile(absPath, "utf8") + const parsed = parseFrontmatter(raw) + results.push({ meta, content: parsed.content }) + } catch (err) { + console.error(`[VaultManager] Failed to read note ${meta.path}:`, err) + } + } + + return results + } + + /** + * Get notes that the RAG pipeline needs to (re)index. + * Covers: never embedded (pending), content changed (stale), or previous error. + */ + async getNotesNeedingReindex(): Promise { + if (!this.vaultPath || !this.db) throw new Error("Vault not initialized") + + const notes = this.db.getNotesNeedingReindex() + const results: RAGNoteInfo[] = [] + + for (const meta of notes) { + try { + const absPath = path.join(this.vaultPath, meta.path) + const raw = await fs.readFile(absPath, "utf8") + const parsed = parseFrontmatter(raw) + results.push({ meta, content: parsed.content }) + } catch (err) { + console.error(`[VaultManager] Failed to read note ${meta.path}:`, err) + } + } + + return results + } + + /** + * Update the embedding status for a note. + * Called by the RAG pipeline after successfully indexing (or on failure). + * + * @example + * await vault.updateEmbeddingStatus(noteId, "indexed") + * await vault.updateEmbeddingStatus(noteId, "error") + */ + async updateEmbeddingStatus(noteId: NoteID, status: EmbeddingStatus): Promise { + if (!this.db) throw new Error("Vault not initialized") + this.db.updateEmbeddingStatus(noteId, status) + } + + // ==================== HELPERS ==================== + + /** + * Sanitize filename (remove invalid characters) + */ + private sanitizeFilename(filename: string): string { + return filename + .replace(/[<>:"/\\|?*\x00-\x1F]/g, "") // Remove invalid chars + .replace(/\s+/g, "-") // Replace spaces with dashes + .toLowerCase() + .slice(0, 100) // Limit length + } + + /** + * Close database and cleanup + */ + async close(): Promise { + await this.stopWatcher() + if (this.db) { + this.db.close() + this.db = null + } + this.vaultPath = null + } +} diff --git a/apps/storage/test/vaultManager.test.ts b/apps/storage/test/vaultManager.test.ts new file mode 100644 index 0000000..b0b33f4 --- /dev/null +++ b/apps/storage/test/vaultManager.test.ts @@ -0,0 +1,701 @@ +import { describe, it, expect, beforeEach, afterEach } from "vitest" +import os from "os" +import path from "path" +import fs from "fs/promises" +import { VaultManager, EVENTS } from "../src/index" +import type { + NoteCreatedPayload, + NoteUpdatedPayload, + NoteDeletedPayload, + NoteRenamedPayload, + NoteMovedPayload, + FolderCreatedPayload, + FolderMovedPayload, + VaultReadyPayload, +} from "../src/types" + +const makeTempDir = () => fs.mkdtemp(path.join(os.tmpdir(), "smart-notes-test-")) + +describe("VaultManager", () => { + let vault: VaultManager + let vaultPath: string + + beforeEach(async () => { + vault = new VaultManager() + vaultPath = await makeTempDir() + await vault.initializeVault(vaultPath) + }) + + afterEach(async () => { + await vault.close() + await fs.rm(vaultPath, { recursive: true, force: true }) + }) + + describe("Vault initialization", () => { + it("creates system directories on init", async () => { + await expect(fs.stat(path.join(vaultPath, ".grimoire"))).resolves.toBeTruthy() + await expect(fs.stat(path.join(vaultPath, ".trash"))).resolves.toBeTruthy() + await expect(fs.stat(path.join(vaultPath, ".conflicts"))).resolves.toBeTruthy() + }) + + it("emits VAULT_READY with correct payload", async () => { + const v2 = new VaultManager() + const dir = await makeTempDir() + const events: VaultReadyPayload[] = [] + v2.on(EVENTS.VAULT_READY, (p: VaultReadyPayload) => events.push(p)) + await v2.initializeVault(dir) + await v2.close() + await fs.rm(dir, { recursive: true, force: true }) + expect(events).toHaveLength(1) + expect(events[0]).toBeDefined() + expect(events[0]!.vaultPath).toBe(dir) + expect(typeof events[0]!.scanTimeMs).toBe("number") + }) + + it("preserves note IDs across vault re-opens", async () => { + const meta = await vault.createNote("", "Stable ID Note", "content") + await vault.close() + vault = new VaultManager() + await vault.initializeVault(vaultPath) + const notes = await vault.listNotes() + expect(notes).toHaveLength(1) + expect(notes[0]).toBeDefined() + expect(notes[0]!.note_id).toBe(meta.note_id) + }) + + it("soft-deletes DB-tracked notes that no longer exist on disk", async () => { + const meta = await vault.createNote("", "Will Vanish", "") + const absPath = path.join(vaultPath, meta.path) + await fs.unlink(absPath) + await vault.close() + vault = new VaultManager() + await vault.initializeVault(vaultPath) + const notes = await vault.listNotes() + expect(notes).toHaveLength(0) + }) + + it("throws when vault path is not a directory", async () => { + const v2 = new VaultManager() + const filePath = path.join(vaultPath, "file.txt") + await fs.writeFile(filePath, "content") + await expect(v2.initializeVault(filePath)).rejects.toThrow("directory") + await v2.close() + }) + + it("is safe to call initializeVault twice on same instance", async () => { + await vault.createNote("", "Note A", "") + await vault.initializeVault(vaultPath) + const notes = await vault.listNotes() + expect(notes).toHaveLength(1) + }) + }) + + describe("Note CRUD", () => { + it("creates a note and returns metadata", async () => { + const meta = await vault.createNote("", "Hello World", "body text") + expect(meta.note_id).toBeTruthy() + expect(meta.display_name).toBe("Hello World") + expect(meta.folder_path).toBe("") + expect(meta.embedding_status).toBe("pending") + expect(meta.source).toBe("md") + }) + + it("creates the note file with frontmatter on disk", async () => { + const meta = await vault.createNote("", "Disk Check", "some body") + const raw = await fs.readFile(path.join(vaultPath, meta.path), "utf8") + expect(raw).toContain("title: Disk Check") + expect(raw).toContain("some body") + }) + + it("creates a note inside a subfolder, creating directories", async () => { + const meta = await vault.createNote("projects/work", "Work Note", "") + expect(meta.folder_path).toBe("projects/work") + await expect( + fs.stat(path.join(vaultPath, "projects", "work", "work-note.md")) + ).resolves.toBeTruthy() + }) + + it("reads note content and user frontmatter", async () => { + const m = await vault.createNote("", "Read Test", "Hello content", ["tag1"]) + const { content, userFrontmatter } = await vault.readNote(m.note_id) + expect(content.trim()).toBe("Hello content") + expect(userFrontmatter?.tags).toContain("tag1") + }) + + it("updates note content and marks embedding as stale", async () => { + const meta = await vault.createNote("", "Update Test", "original") + await vault.updateNote(meta.note_id, "updated body") + const { content } = await vault.readNote(meta.note_id) + expect(content.trim()).toBe("updated body") + const [updated] = await vault.listNotes() + expect(updated).toBeDefined() + expect(updated!.embedding_status).toBe("stale") + }) + + it("updates note with new tags", async () => { + const meta = await vault.createNote("", "Tag Update", "body", ["old"]) + await vault.updateNote(meta.note_id, "body", ["new1", "new2"]) + const tags = await vault.getAllTags() + const tagNames = tags.map((t) => t.tag) + expect(tagNames).toContain("new1") + expect(tagNames).toContain("new2") + expect(tagNames).not.toContain("old") + }) + + it("soft-deletes a note to trash", async () => { + const meta = await vault.createNote("", "To Trash", "") + await vault.deleteNote(meta.note_id) + expect(await vault.listNotes()).toHaveLength(0) + const trashed = await vault.listTrashedNotes() + expect(trashed).toHaveLength(1) + expect(trashed[0]).toBeDefined() + expect(trashed[0]!.note_id).toBe(meta.note_id) + }) + + it("soft-delete moves file to .trash directory", async () => { + const meta = await vault.createNote("", "Trash File", "") + const originalAbs = path.join(vaultPath, meta.path) + await vault.deleteNote(meta.note_id) + await expect(fs.stat(originalAbs)).rejects.toThrow() + const trashEntries = await fs.readdir(path.join(vaultPath, ".trash")) + expect(trashEntries.length).toBeGreaterThan(0) + }) + + it("permanently deletes a note from disk and DB", async () => { + const meta = await vault.createNote("", "Permanent Delete", "") + const absPath = path.join(vaultPath, meta.path) + await vault.deleteNote(meta.note_id, true) + await expect(fs.stat(absPath)).rejects.toThrow() + expect(await vault.listNotes()).toHaveLength(0) + expect(await vault.listTrashedNotes()).toHaveLength(0) + }) + + it("throws on read for unknown note ID", async () => { + await expect(vault.readNote("nonexistent-id")).rejects.toThrow("not found") + }) + + it("throws when creating a note with an empty title", async () => { + await expect(vault.createNote("", " ", "body")).rejects.toThrow("empty") + }) + + it("throws on title exceeding 200 characters", async () => { + await expect(vault.createNote("", "a".repeat(201), "")).rejects.toThrow("long") + }) + + it("throws on duplicate note path in same folder", async () => { + await vault.createNote("", "Duplicate", "") + await expect(vault.createNote("", "Duplicate", "")).rejects.toThrow() + }) + + it("sanitizes path traversal in folder path", async () => { + const meta = await vault.createNote("../../etc", "Traversal Note", "") + expect(meta.folder_path).not.toContain("..") + }) + }) + + describe("Conflict detection", () => { + it("detects conflict when file is modified externally between read and write", async () => { + const meta = await vault.createNote("", "Conflict Note", "original content") + const absPath = path.join(vaultPath, meta.path) + await fs.writeFile(absPath, "---\ntitle: Conflict Note\n---\nexternally modified\n", "utf8") + const events: any[] = [] + vault.on(EVENTS.CONFLICT_DETECTED, (p) => events.push(p)) + await expect(vault.updateNote(meta.note_id, "my attempted change")).rejects.toThrow("Conflict detected") + expect(events).toHaveLength(1) + expect(events[0]!.note_id).toBe(meta.note_id) + }) + + it("saves conflict file to .conflicts directory", async () => { + const meta = await vault.createNote("", "Conflict Save", "original") + const absPath = path.join(vaultPath, meta.path) + await fs.writeFile(absPath, "---\ntitle: Conflict Save\n---\nexternal edit\n", "utf8") + try { + await vault.updateNote(meta.note_id, "attempted") + } catch {} + const conflictEntries = await fs.readdir(path.join(vaultPath, ".conflicts")) + expect(conflictEntries.some((f) => !f.endsWith(".json"))).toBe(true) + }) + }) + + describe("Note rename and move", () => { + it("renames a note — updates file name, frontmatter title, and DB display_name", async () => { + const meta = await vault.createNote("", "Old Title", "body") + const updated = await vault.renameNote(meta.note_id, "New Title") + expect(updated.display_name).toBe("New Title") + const { userFrontmatter } = await vault.readNote(meta.note_id) + expect(userFrontmatter?.title).toBe("New Title") + await expect(fs.stat(path.join(vaultPath, "new-title.md"))).resolves.toBeTruthy() + await expect(fs.stat(path.join(vaultPath, "old-title.md"))).rejects.toThrow() + }) + + it("rename is a no-op when the sanitized filename does not change", async () => { + const meta = await vault.createNote("", "Same Name", "") + const updated = await vault.renameNote(meta.note_id, "Same Name") + expect(updated.note_id).toBe(meta.note_id) + }) + + it("throws on rename to empty title", async () => { + const meta = await vault.createNote("", "Rename Me", "") + await expect(vault.renameNote(meta.note_id, " ")).rejects.toThrow("empty") + }) + + it("throws on rename collision with existing note", async () => { + const a = await vault.createNote("", "Note A", "") + await vault.createNote("", "Note B", "") + await expect(vault.renameNote(a.note_id, "Note B")).rejects.toThrow() + }) + + it("moves a note to a different folder", async () => { + const meta = await vault.createNote("", "Move Me", "body") + await vault.createFolder("archive") + const moved = await vault.moveNote(meta.note_id, "archive") + expect(moved.folder_path).toBe("archive") + await expect(fs.stat(path.join(vaultPath, "archive", "move-me.md"))).resolves.toBeTruthy() + await expect(fs.stat(path.join(vaultPath, "move-me.md"))).rejects.toThrow() + }) + + it("moveNote to same folder is a no-op", async () => { + const meta = await vault.createNote("base", "No-op Move", "") + const result = await vault.moveNote(meta.note_id, "base") + expect(result.path).toBe(meta.path) + }) + + it("emits NOTE_RENAMED with old and new display names", async () => { + const meta = await vault.createNote("", "Rename Event", "") + const events: NoteRenamedPayload[] = [] + vault.on(EVENTS.NOTE_RENAMED, (p: NoteRenamedPayload) => events.push(p)) + await vault.renameNote(meta.note_id, "Renamed") + expect(events[0]).toBeDefined() + expect(events[0]!.oldDisplayName).toBe("Rename Event") + expect(events[0]!.newDisplayName).toBe("Renamed") + }) + + it("emits NOTE_MOVED with correct folder paths", async () => { + const meta = await vault.createNote("", "Move Event", "") + await vault.createFolder("dest") + const events: NoteMovedPayload[] = [] + vault.on(EVENTS.NOTE_MOVED, (p: NoteMovedPayload) => events.push(p)) + await vault.moveNote(meta.note_id, "dest") + expect(events[0]).toBeDefined() + expect(events[0]!.oldFolderPath).toBe("") + expect(events[0]!.newFolderPath).toBe("dest") + }) + }) + + describe("Tags", () => { + it("creates a note with tags stored in DB and frontmatter", async () => { + const meta = await vault.createNote("", "Tagged Note", "", ["work", "urgent"]) + const tags = await vault.getAllTags() + const tagNames = tags.map((t) => t.tag) + expect(tagNames).toContain("work") + expect(tagNames).toContain("urgent") + const { userFrontmatter } = await vault.readNote(meta.note_id) + expect(userFrontmatter?.tags).toContain("work") + }) + + it("searches notes by a single tag", async () => { + await vault.createNote("", "Note A", "", ["alpha"]) + await vault.createNote("", "Note B", "", ["beta"]) + const results = await vault.searchByTags(["alpha"]) + expect(results).toHaveLength(1) + expect(results[0]).toBeDefined() + expect(results[0]!.display_name).toBe("Note A") + }) + + it("searches notes matching any of multiple tags", async () => { + await vault.createNote("", "Note A", "", ["alpha"]) + await vault.createNote("", "Note B", "", ["beta"]) + await vault.createNote("", "Note C", "", ["gamma"]) + const results = await vault.searchByTags(["alpha", "beta"]) + expect(results).toHaveLength(2) + }) + + it("updateNoteTags persists to DB and frontmatter without touching content", async () => { + const meta = await vault.createNote("", "Tag Update", "original content") + await vault.updateNoteTags(meta.note_id, ["new-tag"]) + const { content } = await vault.readNote(meta.note_id) + expect(content.trim()).toBe("original content") + const tags = await vault.getAllTags() + expect(tags.map((t) => t.tag)).toContain("new-tag") + }) + + it("getAllTags returns usage counts", async () => { + await vault.createNote("", "A", "", ["shared"]) + await vault.createNote("", "B", "", ["shared"]) + await vault.createNote("", "C", "", ["unique"]) + const tags = await vault.getAllTags() + const shared = tags.find((t) => t.tag === "shared") + expect(shared?.count).toBe(2) + }) + + it("clears all tags when empty array is passed to updateNoteTags", async () => { + const meta = await vault.createNote("", "Clear Tags", "", ["old"]) + await vault.updateNoteTags(meta.note_id, []) + const tags = await vault.getAllTags() + expect(tags).toHaveLength(0) + }) + + it("normalises tags to lowercase", async () => { + const meta = await vault.createNote("", "Case Tags", "", ["Work", "URGENT"]) + const tags = await vault.getAllTags() + const tagNames = tags.map((t) => t.tag) + expect(tagNames).toContain("work") + expect(tagNames).toContain("urgent") + }) + }) + + describe("Folder operations", () => { + it("creates a root-level folder and registers it in DB", async () => { + const folder = await vault.createFolder("projects") + expect(folder.name).toBe("projects") + expect(folder.parent_path).toBe("") + expect(folder.folder_id).toBeTruthy() + await expect(fs.stat(path.join(vaultPath, "projects"))).resolves.toBeTruthy() + }) + + it("creates a nested folder with correct parent_path", async () => { + const folder = await vault.createFolder("projects/client-x") + expect(folder.parent_path).toBe("projects") + expect(folder.name).toBe("client-x") + }) + + it("createFolder is idempotent — returns same folder if already exists", async () => { + const f1 = await vault.createFolder("dup") + const f2 = await vault.createFolder("dup") + expect(f1.folder_id).toBe(f2.folder_id) + }) + + it("creates a folder with icon and pinned options", async () => { + const folder = await vault.createFolder("pinned-folder", { icon: "📌", pinned: true }) + expect(folder.icon).toBe("📌") + expect(folder.pinned).toBe(true) + }) + + it("renames a folder on disk and cascades path updates to all notes", async () => { + const folder = await vault.createFolder("old-name") + await vault.createNote("old-name", "Inner Note", "body") + const renamed = await vault.renameFolder(folder.folder_id, "new-name") + expect(renamed.name).toBe("new-name") + expect(renamed.path).toBe("new-name") + const notes = await vault.listNotes() + expect(notes[0]).toBeDefined() + expect(notes[0]!.folder_path).toBe("new-name") + await expect(fs.stat(path.join(vaultPath, "new-name"))).resolves.toBeTruthy() + await expect(fs.stat(path.join(vaultPath, "old-name"))).rejects.toThrow() + }) + + it("renames a folder containing a nested subfolder and cascades all paths", async () => { + await vault.createFolder("parent") + await vault.createFolder("parent/child") + await vault.createNote("parent/child", "Deep Note", "") + const [parent] = await vault.listFolders("") as any[] + await vault.renameFolder(parent.folder_id, "renamed-parent") + const notes = await vault.listNotes() + expect(notes[0]).toBeDefined() + expect(notes[0]!.folder_path).toBe("renamed-parent/child") + const childFolders = await vault.listFolders("renamed-parent") + expect(childFolders).toHaveLength(1) + expect(childFolders[0]).toBeDefined() + expect(childFolders[0]!.path).toBe("renamed-parent/child") + }) + + it("throws on rename to empty name", async () => { + const folder = await vault.createFolder("to-rename") + await expect(vault.renameFolder(folder.folder_id, " ")).rejects.toThrow("empty") + }) + + it("throws on rename collision with existing folder", async () => { + const f1 = await vault.createFolder("folder-a") + await vault.createFolder("folder-b") + await expect(vault.renameFolder(f1.folder_id, "folder-b")).rejects.toThrow() + }) + + it("moves a folder to a new parent and updates all note paths", async () => { + const source = await vault.createFolder("source") + await vault.createFolder("destination") + await vault.createNote("source", "Source Note", "body") + const moved = await vault.moveFolder(source.folder_id, "destination") + expect(moved.path).toBe("destination/source") + expect(moved.parent_path).toBe("destination") + const notes = await vault.listNotes() + expect(notes[0]).toBeDefined() + expect(notes[0]!.folder_path).toBe("destination/source") + await expect( + fs.stat(path.join(vaultPath, "destination", "source")) + ).resolves.toBeTruthy() + await expect(fs.stat(path.join(vaultPath, "source"))).rejects.toThrow() + }) + + it("moves a nested folder to the vault root with empty parent path", async () => { + await vault.createFolder("parent") + const child = await vault.createFolder("parent/child") + const moved = await vault.moveFolder(child.folder_id, "") + expect(moved.path).toBe("child") + expect(moved.parent_path).toBe("") + await expect(fs.stat(path.join(vaultPath, "child"))).resolves.toBeTruthy() + }) + + it("moveFolder is a no-op when destination resolves to the same path", async () => { + const folder = await vault.createFolder("stable") + const result = await vault.moveFolder(folder.folder_id, "") + expect(result.path).toBe("stable") + }) + + it("throws on moveFolder collision", async () => { + const movable = await vault.createFolder("movable") + await vault.createFolder("target") + await vault.createFolder("target/movable") + await expect(vault.moveFolder(movable.folder_id, "target")).rejects.toThrow() + }) + + it("emits FOLDER_MOVED event with correct paths", async () => { + const movable = await vault.createFolder("movable") + await vault.createFolder("target") + const events: FolderMovedPayload[] = [] + vault.on(EVENTS.FOLDER_MOVED, (p: FolderMovedPayload) => events.push(p)) + await vault.moveFolder(movable.folder_id, "target") + expect(events).toHaveLength(1) + expect(events[0]).toBeDefined() + expect(events[0]!.oldPath).toBe("movable") + expect(events[0]!.newPath).toBe("target/movable") + }) + + it("safe-deletes an empty folder from disk and DB", async () => { + const folder = await vault.createFolder("empty") + await vault.deleteFolder(folder.folder_id) + await expect(fs.stat(path.join(vaultPath, "empty"))).rejects.toThrow() + const folders = await vault.listFolders() + expect(folders.find((f) => f.folder_id === folder.folder_id)).toBeUndefined() + }) + + it("throws when deleting a non-empty folder without force flag", async () => { + const folder = await vault.createFolder("occupied") + await vault.createNote("occupied", "Resident", "") + await expect(vault.deleteFolder(folder.folder_id)).rejects.toThrow() + }) + + it("force-deletes folder and moves all contained notes to trash", async () => { + const folder = await vault.createFolder("to-nuke") + await vault.createNote("to-nuke", "Will Be Trashed", "") + await vault.deleteFolder(folder.folder_id, true) + expect(await vault.listNotes()).toHaveLength(0) + expect(await vault.listTrashedNotes()).toHaveLength(1) + }) + + it("listFolders filters by parent path", async () => { + await vault.createFolder("root-a") + await vault.createFolder("root-a/child-1") + await vault.createFolder("root-a/child-2") + await vault.createFolder("root-b") + const children = await vault.listFolders("root-a") + expect(children).toHaveLength(2) + expect(children.every((f) => f.parent_path === "root-a")).toBe(true) + }) + + it("emits FOLDER_CREATED with correct payload", async () => { + const events: FolderCreatedPayload[] = [] + vault.on(EVENTS.FOLDER_CREATED, (p: FolderCreatedPayload) => events.push(p)) + await vault.createFolder("new-folder") + expect(events).toHaveLength(1) + expect(events[0]).toBeDefined() + expect(events[0]!.name).toBe("new-folder") + expect(events[0]!.parent_path).toBe("") + }) + }) + + describe("Trash and restore", () => { + it("restores a note from trash to its original path", async () => { + const meta = await vault.createNote("", "Restorable", "restore content") + const originalPath = meta.path + await vault.deleteNote(meta.note_id) + const restored = await vault.restoreFromTrash(meta.note_id) + expect(restored.path).toBe(originalPath) + await expect(fs.stat(path.join(vaultPath, originalPath))).resolves.toBeTruthy() + expect(await vault.listTrashedNotes()).toHaveLength(0) + expect(await vault.listNotes()).toHaveLength(1) + }) + + it("restored note content is intact after restore", async () => { + const meta = await vault.createNote("", "Content Restore", "important body") + await vault.deleteNote(meta.note_id) + await vault.restoreFromTrash(meta.note_id) + const { content } = await vault.readNote(meta.note_id) + expect(content.trim()).toBe("important body") + }) + + it("throws when restoring a non-existent trash item", async () => { + await expect(vault.restoreFromTrash("fake-id")).rejects.toThrow() + }) + }) + + describe("Search and listing", () => { + it("lists notes sorted by display_name ascending", async () => { + await vault.createNote("", "Zeta", "") + await vault.createNote("", "Alpha", "") + const notes = await vault.listNotes({ orderBy: "display_name", direction: "asc" }) + expect(notes[0]).toBeDefined() + expect(notes[0]!.display_name).toBe("Alpha") + expect(notes[1]).toBeDefined() + expect(notes[1]!.display_name).toBe("Zeta") + }) + + it("paginates results with limit and offset", async () => { + await vault.createNote("", "Note 1", "") + await vault.createNote("", "Note 2", "") + await vault.createNote("", "Note 3", "") + const page1 = await vault.listNotes({ limit: 2, offset: 0, orderBy: "display_name", direction: "asc" }) + const page2 = await vault.listNotes({ limit: 2, offset: 2, orderBy: "display_name", direction: "asc" }) + expect(page1).toHaveLength(2) + expect(page2).toHaveLength(1) + }) + + it("filters notes by folder path", async () => { + await vault.createNote("", "Root Note", "") + await vault.createNote("sub", "Sub Note", "") + const sub = await vault.listNotes({ folderPath: "sub" }) + expect(sub).toHaveLength(1) + expect(sub[0]).toBeDefined() + expect(sub[0]!.display_name).toBe("Sub Note") + }) + + it("searches notes by title with partial match", async () => { + await vault.createNote("", "Alpha Beta", "") + await vault.createNote("", "Gamma Delta", "") + const results = await vault.searchNotes("alpha") + expect(results).toHaveLength(1) + expect(results[0]).toBeDefined() + expect(results[0]!.display_name).toBe("Alpha Beta") + }) + + it("search returns empty array when no matches", async () => { + await vault.createNote("", "Something", "") + const results = await vault.searchNotes("xxxxxxx") + expect(results).toHaveLength(0) + }) + + it("searchByTags returns empty array when given no tags", async () => { + await vault.createNote("", "Tagged", "", ["x"]) + const results = await vault.searchByTags([]) + expect(results).toHaveLength(0) + }) + }) + + describe("Vault stats", () => { + it("returns correct note count, folder count, and vaultPath", async () => { + await vault.createNote("", "Note 1", "") + await vault.createNote("", "Note 2", "") + await vault.createFolder("folder1") + const stats = await vault.getVaultStats() + expect(stats.totalNotes).toBe(2) + expect(stats.totalFolders).toBe(1) + expect(stats.vaultPath).toBe(vaultPath) + expect(stats.storageVersion).toBeTruthy() + }) + + it("does not count trashed notes", async () => { + const meta = await vault.createNote("", "Trashed", "") + await vault.deleteNote(meta.note_id) + const stats = await vault.getVaultStats() + expect(stats.totalNotes).toBe(0) + }) + }) + + describe("RAG pipeline methods", () => { + it("getNotesNeedingReindex returns notes with pending status", async () => { + await vault.createNote("", "Unindexed", "") + const reindex = await vault.getNotesNeedingReindex() + expect(reindex.length).toBeGreaterThan(0) + expect(reindex[0]!.meta.embedding_status).toBe("pending") + }) + + it("updateEmbeddingStatus marks a note as indexed", async () => { + const meta = await vault.createNote("", "Index Me", "") + await vault.updateEmbeddingStatus(meta.note_id, "indexed") + const [note] = await vault.listNotes() + expect(note!.embedding_status).toBe("indexed") + }) + + it("updateEmbeddingStatus marks a note as error", async () => { + const meta = await vault.createNote("", "Fail Index", "") + await vault.updateEmbeddingStatus(meta.note_id, "error") + const reindex = await vault.getNotesNeedingReindex() + const found = reindex.find((r) => r.meta.note_id === meta.note_id) + expect(found).toBeTruthy() + }) + + it("indexed notes do not appear in getNotesNeedingReindex", async () => { + const meta = await vault.createNote("", "Already Indexed", "") + await vault.updateEmbeddingStatus(meta.note_id, "indexed") + const reindex = await vault.getNotesNeedingReindex() + const found = reindex.find((r) => r.meta.note_id === meta.note_id) + expect(found).toBeUndefined() + }) + + it("getNotesModifiedSince returns notes created after the given timestamp", async () => { + const before = Date.now() - 5000 + await vault.createNote("", "Recent Note", "") + const results = await vault.getNotesModifiedSince(before) + expect(results).toHaveLength(1) + expect(results[0]!.meta.display_name).toBe("Recent Note") + }) + + it("getNotesModifiedSince excludes notes created before the timestamp", async () => { + const after = Date.now() + 5000 + await vault.createNote("", "Old Note", "") + const results = await vault.getNotesModifiedSince(after) + expect(results).toHaveLength(0) + }) + + it("getAllNotesWithContent returns full note content", async () => { + await vault.createNote("", "Content Note", "Full body here") + const results = await vault.getAllNotesWithContent() + expect(results).toHaveLength(1) + expect(results[0]!.content.trim()).toBe("Full body here") + }) + + it("getAllNotesWithContent excludes trashed notes", async () => { + const meta = await vault.createNote("", "Will Trash", "") + await vault.deleteNote(meta.note_id) + const results = await vault.getAllNotesWithContent() + expect(results).toHaveLength(0) + }) + }) + + describe("Event system", () => { + it("emits NOTE_CREATED with internal source on create", async () => { + const events: NoteCreatedPayload[] = [] + vault.on(EVENTS.NOTE_CREATED, (p: NoteCreatedPayload) => events.push(p)) + const meta = await vault.createNote("", "Event Note", "") + expect(events).toHaveLength(1) + expect(events[0]!.note_id).toBe(meta.note_id) + expect(events[0]!.source).toBe("internal") + }) + + it("emits NOTE_UPDATED on content update", async () => { + const meta = await vault.createNote("", "Update Event", "") + const events: NoteUpdatedPayload[] = [] + vault.on(EVENTS.NOTE_UPDATED, (p: NoteUpdatedPayload) => events.push(p)) + await vault.updateNote(meta.note_id, "new content") + expect(events).toHaveLength(1) + expect(events[0]!.note_id).toBe(meta.note_id) + expect(events[0]!.source).toBe("internal") + }) + + it("emits NOTE_DELETED with trashed=true on soft delete", async () => { + const meta = await vault.createNote("", "Delete Event", "") + const events: NoteDeletedPayload[] = [] + vault.on(EVENTS.NOTE_DELETED, (p: NoteDeletedPayload) => events.push(p)) + await vault.deleteNote(meta.note_id) + expect(events[0]!.trashed).toBe(true) + expect(events[0]!.note_id).toBe(meta.note_id) + }) + + it("emits NOTE_DELETED with trashed=false on permanent delete", async () => { + const meta = await vault.createNote("", "Perm Delete Event", "") + const events: NoteDeletedPayload[] = [] + vault.on(EVENTS.NOTE_DELETED, (p: NoteDeletedPayload) => events.push(p)) + await vault.deleteNote(meta.note_id, true) + expect(events[0]!.trashed).toBe(false) + }) + }) +}) diff --git a/apps/storage/tsconfig.json b/apps/storage/tsconfig.json new file mode 100644 index 0000000..9b2ed6b --- /dev/null +++ b/apps/storage/tsconfig.json @@ -0,0 +1,30 @@ +{ + "compilerOptions": { + // Environment setup & latest features + "lib": ["ESNext"], + "target": "ESNext", + "module": "Preserve", + "moduleDetection": "force", + "jsx": "react-jsx", + "allowJs": true, + + // Bundler mode + "moduleResolution": "bundler", + "allowImportingTsExtensions": true, + "verbatimModuleSyntax": true, + "noEmit": true, + + // Best practices + "strict": true, + "skipLibCheck": true, + "noFallthroughCasesInSwitch": true, + "noUncheckedIndexedAccess": true, + "noImplicitOverride": true, + + // Some stricter flags (disabled by default) + "noUnusedLocals": false, + "noUnusedParameters": false, + "noPropertyAccessFromIndexSignature": false, + "esModuleInterop": true + } +} diff --git a/apps/storage/vitest.config.ts b/apps/storage/vitest.config.ts new file mode 100644 index 0000000..74d7f0b --- /dev/null +++ b/apps/storage/vitest.config.ts @@ -0,0 +1,15 @@ +import { defineConfig } from "vitest/config" + +export default defineConfig({ + test: { + environment: "node", + globals: false, + testTimeout: 15000, + pool: "forks", + poolOptions: { + forks: { + singleFork: false, + }, + }, + }, +})