diff --git a/api/Cargo.lock b/api/Cargo.lock index 6956e571..6ccee505 100644 --- a/api/Cargo.lock +++ b/api/Cargo.lock @@ -166,6 +166,7 @@ dependencies = [ "anyhow", "async-trait", "base64 0.21.7", + "chacha20poly1305", "chrono", "contracts", "domain", @@ -1113,6 +1114,30 @@ version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" +[[package]] +name = "chacha20" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3613f74bd2eac03dad61bd53dbe620703d4371614fe0bc3b9f04dd36fe4e818" +dependencies = [ + "cfg-if", + "cipher", + "cpufeatures", +] + +[[package]] +name = "chacha20poly1305" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "10cd79432192d1c0f4e1a0fef9527696cc039165d729fb41b3f4f4f354c2dc35" +dependencies = [ + "aead", + "chacha20", + "cipher", + "poly1305", + "zeroize", +] + [[package]] name = "chrono" version = "0.4.42" @@ -1135,6 +1160,7 @@ checksum = "773f3b9af64447d2ce9850330c473515014aa235e6a783b02db81ff39e4a3dad" dependencies = [ "crypto-common", "inout", + "zeroize", ] [[package]] @@ -1585,6 +1611,33 @@ dependencies = [ "cipher", ] +[[package]] +name = "curve25519-dalek" +version = "4.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97fb8b7c4503de7d6ae7b42ab72a5a59857b4c937ec27a3d4539dba95b5ab2be" +dependencies = [ + "cfg-if", + "cpufeatures", + "curve25519-dalek-derive", + "digest", + "fiat-crypto", + "rustc_version", + "subtle", + "zeroize", +] + +[[package]] +name = "curve25519-dalek-derive" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.106", +] + [[package]] name = "darling" version = "0.14.4" @@ -1824,6 +1877,30 @@ dependencies = [ "signature 1.6.4", ] +[[package]] +name = "ed25519" +version = "2.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "115531babc129696a58c64a4fef0a8bf9e9698629fb97e9e40767d235cfbcd53" +dependencies = [ + "pkcs8 0.10.2", + "signature 2.2.0", +] + +[[package]] +name = "ed25519-dalek" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70e796c081cee67dc755e1a36a0a172b897fab85fc3f6bc48307991f64e4eca9" +dependencies = [ + "curve25519-dalek", + "ed25519", + "serde", + "sha2", + "subtle", + "zeroize", +] + [[package]] name = "either" version = "1.15.0" @@ -2040,6 +2117,12 @@ dependencies = [ "subtle", ] +[[package]] +name = "fiat-crypto" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28dea519a9695b9977216879a3ebfddf92f1c08c05d984f8996aecd6ecdc811d" + [[package]] name = "filetime" version = "0.2.26" @@ -2331,21 +2414,6 @@ dependencies = [ "stable_deref_trait", ] -[[package]] -name = "git2" -version = "0.18.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "232e6a7bfe35766bf715e55a88b39a700596c0ccfd88cd3680b4cdb40d66ef70" -dependencies = [ - "bitflags 2.9.4", - "libc", - "libgit2-sys", - "log", - "openssl-probe", - "openssl-sys", - "url", -] - [[package]] name = "glob" version = "0.3.3" @@ -2884,14 +2952,15 @@ dependencies = [ "aws-config", "aws-sdk-s3", "base64 0.21.7", + "chacha20poly1305", "chrono", "comrak", "domain", "dotenvy", + "ed25519-dalek", "extism", "futures-core", "futures-util", - "git2", "hex", "hmac", "htmlescape", @@ -2900,7 +2969,6 @@ dependencies = [ "mime_guess", "notify", "once_cell", - "pandoc", "password-hash 0.5.0", "rand 0.8.5", "redis", @@ -2923,6 +2991,7 @@ dependencies = [ "walkdir", "yrs", "yrs-warp", + "zeroize", "zip 0.6.6", ] @@ -3139,20 +3208,6 @@ version = "0.2.175" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6a82ae493e598baaea5209805c49bbf2ea7de956d50d7da0da1164f9c6d28543" -[[package]] -name = "libgit2-sys" -version = "0.16.2+1.7.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee4126d8b4ee5c9d9ea891dd875cfdc1e9d0950437179104b183d7d8a74d24e8" -dependencies = [ - "cc", - "libc", - "libssh2-sys", - "libz-sys", - "openssl-sys", - "pkg-config", -] - [[package]] name = "libloading" version = "0.8.8" @@ -3723,15 +3778,6 @@ dependencies = [ "sha2", ] -[[package]] -name = "pandoc" -version = "0.8.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "463d53d1a77a4291203dbf9d461365609e6857c95bd7d807098bffdc0a02a65c" -dependencies = [ - "itertools 0.12.1", -] - [[package]] name = "parking" version = "2.2.1" @@ -3965,6 +4011,17 @@ dependencies = [ "time", ] +[[package]] +name = "poly1305" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8159bd90725d2df49889a078b54f4f79e87f1f8a8444194cdca81d38f5393abf" +dependencies = [ + "cpufeatures", + "opaque-debug", + "universal-hash", +] + [[package]] name = "polyval" version = "0.6.2" @@ -4026,14 +4083,18 @@ dependencies = [ "anyhow", "application", "axum", + "base64 0.22.1", + "bytes", "chrono", "contracts", "domain", "futures-util", "http 1.3.1", "rand 0.8.5", + "reqwest 0.12.23", "serde", "serde_json", + "ssh2", "tokio", "tracing", "utoipa", @@ -5355,6 +5416,18 @@ dependencies = [ "uuid", ] +[[package]] +name = "ssh2" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f84d13b3b8a0d4e91a2629911e951db1bb8671512f5c09d7d4ba34500ba68c8" +dependencies = [ + "bitflags 2.9.4", + "libc", + "libssh2-sys", + "parking_lot", +] + [[package]] name = "stable_deref_trait" version = "1.2.0" @@ -7435,6 +7508,20 @@ name = "zeroize" version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" +dependencies = [ + "zeroize_derive", +] + +[[package]] +name = "zeroize_derive" +version = "1.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85a5b4158499876c763cb03bc4e49185d3cccbabb15b33c627f7884f43db852e" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.106", +] [[package]] name = "zerotrie" diff --git a/api/Dockerfile b/api/Dockerfile index 2f9819a1..10b2fff1 100644 --- a/api/Dockerfile +++ b/api/Dockerfile @@ -24,9 +24,6 @@ RUN apt-get update \ ca-certificates \ wget \ gosu \ - pandoc \ - wkhtmltopdf \ - fonts-noto-cjk \ && rm -rf /var/lib/apt/lists/* ENV RUST_LOG=api=info,axum=info,tower_http=info \ diff --git a/api/crates/application/Cargo.toml b/api/crates/application/Cargo.toml index 1768a623..3dd41088 100644 --- a/api/crates/application/Cargo.toml +++ b/api/crates/application/Cargo.toml @@ -31,6 +31,7 @@ tracing = "0.1" urlencoding = "2" uuid = { version = "1", features = ["v4", "serde"] } yrs = { version = "0.24", features = ["sync"] } +chacha20poly1305 = "0.10" zip = { version = "0.6" } [dev-dependencies] diff --git a/api/crates/application/src/core/ports/storage/storage_port.rs b/api/crates/application/src/core/ports/storage/storage_port.rs index 2add6edd..e7fcf7c0 100644 --- a/api/crates/application/src/core/ports/storage/storage_port.rs +++ b/api/crates/application/src/core/ports/storage/storage_port.rs @@ -29,6 +29,41 @@ pub trait StorageResolverPort: Send + Sync { original_filename: Option<&str>, bytes: &[u8], ) -> PortResult; + + // --- Public file storage (for E2EE decrypted files) --- + + /// Store a public (decrypted) file for a published document + /// Returns the storage path: public/{workspace_id}/{document_id}/{file_id} + async fn store_public_file( + &self, + workspace_id: Uuid, + document_id: Uuid, + file_id: Uuid, + bytes: &[u8], + ) -> PortResult; + + /// Read a public file + async fn read_public_file( + &self, + workspace_id: Uuid, + document_id: Uuid, + file_id: Uuid, + ) -> PortResult>; + + /// Delete a public file + async fn delete_public_file( + &self, + workspace_id: Uuid, + document_id: Uuid, + file_id: Uuid, + ) -> PortResult<()>; + + /// Delete all public files for a document + async fn delete_public_files_for_document( + &self, + workspace_id: Uuid, + document_id: Uuid, + ) -> PortResult<()>; } #[async_trait] diff --git a/api/crates/application/src/core/services/mod.rs b/api/crates/application/src/core/services/mod.rs index 084206c6..13a52c80 100644 --- a/api/crates/application/src/core/services/mod.rs +++ b/api/crates/application/src/core/services/mod.rs @@ -8,6 +8,5 @@ pub mod markdown; pub mod markdown_render; pub mod metrics; pub mod storage; -pub mod tagging; pub mod utils; pub mod worker; diff --git a/api/crates/application/src/core/services/storage/ingest/attachments.rs b/api/crates/application/src/core/services/storage/ingest/attachments.rs index d05ec35b..1c4a02ab 100644 --- a/api/crates/application/src/core/services/storage/ingest/attachments.rs +++ b/api/crates/application/src/core/services/storage/ingest/attachments.rs @@ -27,17 +27,29 @@ impl StorageIngestService { } Err(err) => return Err(err.into()), }; + + // E2EE: Validate RME1 format + if bytes.len() < 4 || &bytes[0..4] != RME1_MAGIC { + warn!( + file_id = %file_id, + doc_id = %doc_id, + repo_path = repo_path, + "storage_ingest_attachment_invalid_rme1_format" + ); + return Ok(()); + } + let size = bytes.len() as i64; - let hash = sha256_hex(&bytes); + let encrypted_hash = sha256_hex(&bytes); self.files_repo - .update_hash_and_size(file_id, size, &hash) + .update_size_and_hash(file_id, size, &encrypted_hash) .await?; let mut payload_obj = serde_json::Map::new(); payload_obj.insert("repo_path".into(), json!(repo_path)); payload_obj.insert("storage_path".into(), json!(rel_path)); payload_obj.insert("backend".into(), json!(event.backend.as_str())); payload_obj.insert("size".into(), json!(size)); - payload_obj.insert("content_hash".into(), json!(hash)); + payload_obj.insert("encrypted_hash".into(), json!(encrypted_hash)); if let Some(prev) = previous_repo_path { payload_obj.insert("previous_path".into(), json!(prev)); } diff --git a/api/crates/application/src/core/services/storage/ingest/documents.rs b/api/crates/application/src/core/services/storage/ingest/documents.rs index 31c59f2e..12424180 100644 --- a/api/crates/application/src/core/services/storage/ingest/documents.rs +++ b/api/crates/application/src/core/services/storage/ingest/documents.rs @@ -9,12 +9,13 @@ impl StorageIngestService { payload: MarkdownIngestPayload, previous_repo_path: Option<&str>, ) -> anyhow::Result<()> { + // E2EE: Skip recent export check using encrypted_hash if event.backend.is_fs_watcher() && event.actor_id.is_none() && self.recent_exports.is_recent_match( event.workspace_id, repo_path, - &payload.content_hash, + &payload.encrypted_hash, ) { debug!( @@ -24,25 +25,15 @@ impl StorageIngestService { ); return Ok(()); } - let snapshot = snapshot_from_markdown(&payload.body); - self.realtime - .apply_snapshot(&doc.id.to_string(), snapshot.as_slice()) - .await?; - // Persist back to storage only for API/actor initiated ingests; fs_watcher/reconcile events - // originate from the filesystem itself and writing would re-trigger the watcher endlessly. - if event.actor_id.is_some() - && let Err(err) = self.realtime.force_persist(&doc.id.to_string()).await - { - warn!( - error = ?err, - doc_id = %doc.id, - "storage_ingest_force_persist_failed" - ); - } + + // E2EE: No Yjs snapshot conversion - encrypted data is handled by client via WebSocket + // Server only stores encrypted bytes as-is + let mut payload_obj = serde_json::Map::new(); payload_obj.insert("repo_path".into(), json!(repo_path)); payload_obj.insert("backend".into(), json!(event.backend.as_str())); - payload_obj.insert("content_hash".into(), json!(payload.content_hash)); + payload_obj.insert("encrypted_hash".into(), json!(payload.encrypted_hash)); + payload_obj.insert("size".into(), json!(payload.size)); payload_obj.insert("doc_type".into(), json!(doc.doc_type.as_str())); if let Some(prev) = previous_repo_path { payload_obj.insert("previous_path".into(), json!(prev)); @@ -73,28 +64,7 @@ impl StorageIngestService { parse_markdown_payload(bytes) } - pub(super) async fn resolve_doc_from_front_matter( - &self, - user_id: Uuid, - payload: &MarkdownIngestPayload, - ) -> anyhow::Result> { - let Some(doc_id) = payload.doc_id_hint else { - return Ok(None); - }; - let Some(meta) = self - .document_repo - .get_meta_for_owner(doc_id, user_id) - .await? - else { - return Ok(None); - }; - Ok(Some(ResolvedDocument::new( - doc_id, - meta.doc_type, - meta.path, - meta.archived_at.is_some(), - ))) - } + // E2EE: resolve_doc_from_front_matter removed - document ID resolved from storage path pub(super) async fn handle_doc_delete( &self, diff --git a/api/crates/application/src/core/services/storage/ingest/handler.rs b/api/crates/application/src/core/services/storage/ingest/handler.rs index 3fda8bd3..eed91532 100644 --- a/api/crates/application/src/core/services/storage/ingest/handler.rs +++ b/api/crates/application/src/core/services/storage/ingest/handler.rs @@ -153,61 +153,14 @@ impl StorageIngestHandler for StorageIngestService { return Ok(()); } + // E2EE: No front-matter resolution - document ID must be resolved from storage path + // New documents are created via API, not from storage ingest if event.kind == StorageIngestKind::Upsert && rel_path.ends_with(".md") { - let payload = match self.load_markdown_payload(&rel_path).await { - Ok(payload) => payload, - Err(err) if is_not_found_error(&err) => { - info!( - user_id = %event.workspace_id, - repo_path = repo_path, - "storage_ingest_missing_source_skipped" - ); - self.storage_projection - .delete_relative_path(&rel_path) - .await?; - return Ok(()); - } - Err(err) => return Err(err), - }; - if let Some(doc) = self - .resolve_doc_from_front_matter(event.workspace_id, &payload) - .await? - { - if doc.is_folder() { - warn!( - doc_id = %doc.id, - repo_path = repo_path, - "storage_ingest_folder_event_skipped" - ); - } else if doc.is_archived() { - warn!( - doc_id = %doc.id, - repo_path = repo_path, - "storage_ingest_archived_doc_skipped" - ); - } else { - if !self - .reconcile_repo_path(&doc, event.workspace_id, &rel_path) - .await? - { - warn!( - doc_id = %doc.id, - repo_path = repo_path, - "storage_ingest_repo_path_rejected" - ); - return Ok(()); - } - self.handle_doc_upsert( - &doc, - &repo_path, - event, - payload, - payload_previous_repo_path.as_deref(), - ) - .await?; - } - return Ok(()); - } + info!( + user_id = %event.workspace_id, + repo_path = repo_path, + "storage_ingest_orphan_encrypted_file" + ); } if event.kind == StorageIngestKind::Delete { diff --git a/api/crates/application/src/core/services/storage/ingest/markdown.rs b/api/crates/application/src/core/services/storage/ingest/markdown.rs index e344fcad..38f1db98 100644 --- a/api/crates/application/src/core/services/storage/ingest/markdown.rs +++ b/api/crates/application/src/core/services/storage/ingest/markdown.rs @@ -2,78 +2,25 @@ use super::*; #[derive(Debug, Clone)] pub(super) struct MarkdownIngestPayload { - pub(super) doc_id_hint: Option, - pub(super) body: String, - pub(super) content_hash: String, -} - -#[derive(Debug, Deserialize)] -struct MarkdownFrontMatter { - id: Option, + pub(super) encrypted_hash: String, + pub(super) size: i64, + /// True if file is encrypted (RME1 format), false if legacy plaintext + #[cfg_attr(not(test), allow(dead_code))] + pub(super) is_encrypted: bool, } +/// Parse file payload - supports both RME1 encrypted format and legacy plaintext pub(super) fn parse_markdown_payload(bytes: Vec) -> anyhow::Result { - let content_hash = sha256_hex(&bytes); - // Accept lossy UTF-8 to avoid retry storms on malformed files; non-UTF8 bytes become U+FFFD. - let text = String::from_utf8_lossy(&bytes).to_string(); - let trimmed = text.trim_start_matches('\u{feff}'); - if let Some((front, body)) = split_front_matter(trimmed) - && let Ok(front_matter) = serde_yaml::from_str::(front) - && let Some(doc_id) = front_matter.id - { - return Ok(MarkdownIngestPayload { - doc_id_hint: Some(doc_id), - body: body.to_string(), - content_hash, - }); - } - Ok(MarkdownIngestPayload { - doc_id_hint: None, - body: trimmed.to_string(), - content_hash, - }) -} + let is_encrypted = bytes.len() >= 4 && &bytes[0..4] == RME1_MAGIC; -fn split_front_matter(input: &str) -> Option<(&str, &str)> { - let after_open = input - .strip_prefix("---\r\n") - .or_else(|| input.strip_prefix("---\n"))?; - if let Some((front_len, body_start)) = find_front_matter_end(after_open) { - let front = &after_open[..front_len]; - let body = &after_open[body_start..]; - return Some((front, body)); - } - None -} + let encrypted_hash = sha256_hex(&bytes); + let size = bytes.len() as i64; -fn find_front_matter_end(s: &str) -> Option<(usize, usize)> { - let bytes = s.as_bytes(); - let mut idx = 0; - while idx < bytes.len() { - if bytes[idx] == b'\n' { - let after_newline = &s[idx + 1..]; - if after_newline.starts_with("---") { - let mut body_start = idx + 1 + 3; - let mut remainder = &s[body_start..]; - // Skip any trailing newlines so we don't feed extra blank lines - // back into the realtime layer when the projection re-imports. - while remainder.starts_with("\r\n") || remainder.starts_with('\n') { - if remainder.starts_with("\r\n") { - body_start += 2; - let (_, rest) = remainder.split_at(2); - remainder = rest; - } else { - body_start += 1; - let (_, rest) = remainder.split_at(1); - remainder = rest; - } - } - return Some((idx, body_start)); - } - } - idx += 1; - } - None + Ok(MarkdownIngestPayload { + encrypted_hash, + size, + is_encrypted, + }) } #[cfg(test)] @@ -81,19 +28,33 @@ mod tests { use super::*; #[test] - fn preserves_body_when_front_matter_has_no_id() { - let markdown = "---\ntitle: Foo\n---\n\nBody".to_string(); - let payload = parse_markdown_payload(markdown.clone().into_bytes()).unwrap(); - assert!(payload.doc_id_hint.is_none()); - assert_eq!(payload.body, markdown); + fn parses_valid_rme1_format() { + let mut bytes = b"RME1".to_vec(); + bytes.extend_from_slice(&[0x01, 0x00, 0x00, 0x00, 0x10]); // version + header length + bytes.extend_from_slice(&[0u8; 16]); // dummy header + bytes.extend_from_slice(&[0u8; 24]); // dummy content nonce + bytes.extend_from_slice(b"encrypted content"); + + let payload = parse_markdown_payload(bytes.clone()).unwrap(); + assert_eq!(payload.size, bytes.len() as i64); + assert!(!payload.encrypted_hash.is_empty()); + assert!(payload.is_encrypted); + } + + #[test] + fn parses_legacy_plaintext() { + let bytes = b"# Hello World\n\nThis is plaintext markdown.".to_vec(); + let payload = parse_markdown_payload(bytes.clone()).unwrap(); + assert_eq!(payload.size, bytes.len() as i64); + assert!(!payload.encrypted_hash.is_empty()); + assert!(!payload.is_encrypted); } #[test] - fn extracts_id_when_front_matter_is_valid() { - let doc_id = Uuid::new_v4(); - let markdown = format!("---\nid: {}\n---\n\nHello", doc_id); - let payload = parse_markdown_payload(markdown.into_bytes()).unwrap(); - assert_eq!(payload.doc_id_hint, Some(doc_id)); - assert_eq!(payload.body.trim_start_matches('\n'), "Hello"); + fn parses_short_data_as_plaintext() { + let bytes = b"RM".to_vec(); + let payload = parse_markdown_payload(bytes.clone()).unwrap(); + assert_eq!(payload.size, bytes.len() as i64); + assert!(!payload.is_encrypted); } } diff --git a/api/crates/application/src/core/services/storage/ingest/mod.rs b/api/crates/application/src/core/services/storage/ingest/mod.rs index b432b0af..ae5fc4ff 100644 --- a/api/crates/application/src/core/services/storage/ingest/mod.rs +++ b/api/crates/application/src/core/services/storage/ingest/mod.rs @@ -3,7 +3,6 @@ use std::path::PathBuf; use std::sync::Arc; use async_trait::async_trait; -use serde::Deserialize; use serde_json::{Value, json}; use tracing::{debug, info, warn}; use uuid::Uuid; @@ -19,7 +18,6 @@ use crate::documents::ports::document_repository::DocumentRepository; use crate::documents::ports::files::files_repository::FilesRepository; use crate::documents::ports::realtime::realtime_port::RealtimeEngine; use crate::documents::services::DocumentService; -use crate::documents::services::realtime::snapshot::snapshot_from_markdown; use crate::workspaces::services::{ WorkspacePermissionResolver, permission_snapshot::permission_set_from_snapshot, }; @@ -34,6 +32,9 @@ mod permissions; mod resolved_document; mod utils; +/// RME1 (RefMD Encrypted v1) magic number for E2EE file format +pub const RME1_MAGIC: &[u8; 4] = b"RME1"; + pub use domain::documents::path::normalize_repo_path; use markdown::{MarkdownIngestPayload, parse_markdown_payload}; @@ -46,9 +47,13 @@ pub trait StorageIngestHandler: Send + Sync { } pub struct StorageIngestService { + // TODO(e2ee): Remove after E2EE migration complete - was used for resolve_doc_from_front_matter + #[allow(dead_code)] document_repo: Arc, document_paths: Arc, files_repo: Arc, + // TODO(e2ee): Remove after E2EE migration complete - was used for Yjs snapshot conversion + #[allow(dead_code)] realtime: Arc, storage: Arc, storage_projection: Arc, diff --git a/api/crates/application/src/core/services/storage/reconcile/mod.rs b/api/crates/application/src/core/services/storage/reconcile/mod.rs index 8fb1eaeb..a1c71632 100644 --- a/api/crates/application/src/core/services/storage/reconcile/mod.rs +++ b/api/crates/application/src/core/services/storage/reconcile/mod.rs @@ -1,3 +1,9 @@ +//! Storage reconciliation service +//! +//! E2EE: This service operates on encrypted files (RME1 format). +//! It only performs path-level consistency checks without reading file contents. +//! Actual content verification (RME1 magic, encrypted_hash) is done by ingest handlers. + use std::collections::HashSet; use std::sync::Arc; diff --git a/api/crates/application/src/core/services/tagging/mod.rs b/api/crates/application/src/core/services/tagging/mod.rs deleted file mode 100644 index 569411fd..00000000 --- a/api/crates/application/src/core/services/tagging/mod.rs +++ /dev/null @@ -1,42 +0,0 @@ -use crate::documents::ports::tagging::tagging_repository::TaggingRepository; -use once_cell::sync::Lazy; -use regex::Regex; -use uuid::Uuid; - -static TAG_RE: Lazy = Lazy::new(|| { - // same ranges as frontend hashtag plugin - Regex::new(r"\B#([a-zA-Z0-9\u{3040}-\u{309F}\u{30A0}-\u{30FF}\u{4E00}-\u{9FAF}\u{3400}-\u{4DBF}\u{AC00}-\u{D7AF}_-]+)").unwrap() -}); - -pub async fn update_document_tags( - repo: &R, - doc_id: Uuid, - owner_id: Uuid, - content: &str, -) -> anyhow::Result<()> { - use std::collections::HashSet; - let mut set: HashSet = HashSet::new(); - for cap in TAG_RE.captures_iter(content) { - if let Some(m) = cap.get(1) { - let mut t = m.as_str().to_string(); - if t.len() > 64 { - t.truncate(64); - } - if !t.is_empty() { - set.insert(t.to_lowercase()); - } - } - } - // clear existing - repo.clear_document_tags(doc_id).await?; - // insert tags and associations - for name in set { - // upsert tag (global unique by name) - let tag_id = repo.upsert_tag_return_id(&name).await?; - // associate if document belongs to owner - if repo.owner_doc_exists(doc_id, owner_id).await? { - repo.associate_document_tag(doc_id, tag_id).await?; - } - } - Ok(()) -} diff --git a/api/crates/application/src/documents/dtos/document_export.rs b/api/crates/application/src/documents/dtos/document_export.rs deleted file mode 100644 index 59209e69..00000000 --- a/api/crates/application/src/documents/dtos/document_export.rs +++ /dev/null @@ -1,116 +0,0 @@ -#[derive(Clone, Copy, Debug, PartialEq, Eq)] -pub enum DocumentDownloadFormat { - Archive, - Markdown, - Html, - Html5, - Pdf, - Docx, - Latex, - Beamer, - Context, - Man, - MediaWiki, - Dokuwiki, - Textile, - Org, - Texinfo, - Opml, - Docbook, - OpenDocument, - Odt, - Rtf, - Epub, - Epub3, - Fb2, - Asciidoc, - Icml, - Slidy, - Slideous, - Dzslides, - Revealjs, - S5, - Json, - Plain, - Commonmark, - CommonmarkX, - MarkdownStrict, - MarkdownPhpextra, - MarkdownGithub, - Rst, - Native, - Haddock, -} - -impl DocumentDownloadFormat { - pub fn extension(&self) -> &'static str { - use DocumentDownloadFormat::*; - match self { - Archive => "zip", - Markdown => "md", - Html | Html5 => "html", - Pdf => "pdf", - Docx => "docx", - Latex | Beamer | Context => "tex", - Man => "man", - MediaWiki => "mediawiki", - Dokuwiki => "txt", - Textile => "textile", - Org => "org", - Texinfo => "texi", - Opml => "opml", - Docbook => "xml", - OpenDocument => "fodt", - Odt => "odt", - Rtf => "rtf", - Epub | Epub3 => "epub", - Fb2 => "fb2", - Asciidoc => "adoc", - Icml => "icml", - Slidy | Slideous | Dzslides | Revealjs | S5 => "html", - Json => "json", - Plain => "txt", - Commonmark | CommonmarkX | MarkdownStrict | MarkdownPhpextra | MarkdownGithub => "md", - Rst => "rst", - Native => "hs", - Haddock => "txt", - } - } - - pub fn content_type(&self) -> &'static str { - use DocumentDownloadFormat::*; - match self { - Archive => "application/zip", - Markdown | Commonmark | CommonmarkX | MarkdownStrict | MarkdownPhpextra - | MarkdownGithub => "text/markdown; charset=utf-8", - Html | Html5 | Slidy | Slideous | Dzslides | Revealjs | S5 => { - "text/html; charset=utf-8" - } - Pdf => "application/pdf", - Docx => "application/vnd.openxmlformats-officedocument.wordprocessingml.document", - Latex | Beamer | Context => "application/x-tex", - Man => "text/troff", - MediaWiki | Dokuwiki | Textile | Org | Texinfo | Plain | Rst | Native | Haddock => { - "text/plain; charset=utf-8" - } - Opml | Docbook => "application/xml", - OpenDocument | Odt => "application/vnd.oasis.opendocument.text", - Rtf => "application/rtf", - Epub | Epub3 => "application/epub+zip", - Fb2 => "application/x-fictionbook+xml", - Asciidoc => "text/plain; charset=utf-8", - Icml => "application/vnd.adobe.indesign-icml", - Json => "application/json", - } - } - - pub fn file_name(&self, base: &str) -> String { - format!("{}.{}", base, self.extension()) - } -} - -pub struct DocumentDownload { - pub filename: String, - pub content_type: String, - pub bytes: Vec, -} diff --git a/api/crates/application/src/documents/dtos/documents.rs b/api/crates/application/src/documents/dtos/documents.rs index cf32c788..1af5a2e4 100644 --- a/api/crates/application/src/documents/dtos/documents.rs +++ b/api/crates/application/src/documents/dtos/documents.rs @@ -31,6 +31,9 @@ pub struct SnapshotSummaryDto { pub created_by: Option, pub byte_size: i64, pub content_hash: String, + // E2EE fields + pub nonce: Option>, + pub signature: Option>, } #[derive(Debug, Clone)] @@ -63,6 +66,44 @@ impl From for SnapshotSummaryDto { created_by: record.created_by, byte_size: record.byte_size, content_hash: record.content_hash, + nonce: record.nonce, + signature: record.signature, } } } + +/// Snapshot detail DTO +/// - For E2EE documents: content is encrypted, nonce is Some +/// - For non-E2EE documents: content is plaintext Yjs state, nonce is None +#[derive(Debug, Clone)] +pub struct SnapshotDetailDto { + pub id: Uuid, + /// Yjs snapshot bytes (encrypted for E2EE, plaintext for non-E2EE) + pub content: Vec, + /// Nonce for decryption (present for E2EE documents) + pub nonce: Option>, + pub created_at: DateTime, +} + +/// Encrypted update entry (for E2EE documents) +#[derive(Debug, Clone)] +pub struct ContentUpdateEntry { + pub seq: i64, + pub data: Vec, + pub nonce: Option>, + pub signature: Option>, + pub public_key: Option>, +} + +/// Document content DTO (E2EE encrypted) +#[derive(Debug, Clone)] +pub struct ContentDto { + /// Encrypted Yjs snapshot bytes + pub content: Vec, + /// Nonce for decryption + pub nonce: Option>, + /// Sequence number at which the snapshot was taken + pub seq_at_snapshot: Option, + /// Pending encrypted updates since the snapshot + pub updates: Option>, +} diff --git a/api/crates/application/src/documents/dtos/keys.rs b/api/crates/application/src/documents/dtos/keys.rs new file mode 100644 index 00000000..95be3011 --- /dev/null +++ b/api/crates/application/src/documents/dtos/keys.rs @@ -0,0 +1,32 @@ +use uuid::Uuid; + +use domain::identity::keys::KdfParams; + +#[derive(Debug, Clone)] +pub struct DocumentEncryptedKeyDto { + pub document_id: Uuid, + pub encrypted_dek: Vec, + pub nonce: Vec, + pub key_version: i32, + pub created_at: chrono::DateTime, + pub updated_at: chrono::DateTime, +} + +#[derive(Debug, Clone)] +pub struct ShareEncryptedKeyDto { + pub share_id: Uuid, + pub encrypted_dek: Vec, + pub salt: Option>, + pub kdf_params: Option, + /// Share key encrypted with creator's KEK (for URL recovery by creator) + pub creator_encrypted_share_key: Option>, + /// Nonce for creator_encrypted_share_key + pub creator_share_key_nonce: Option>, + pub created_at: chrono::DateTime, +} + +impl ShareEncryptedKeyDto { + pub fn is_password_protected(&self) -> bool { + self.salt.is_some() + } +} diff --git a/api/crates/application/src/documents/dtos/mod.rs b/api/crates/application/src/documents/dtos/mod.rs index 480b3893..451bd66c 100644 --- a/api/crates/application/src/documents/dtos/mod.rs +++ b/api/crates/application/src/documents/dtos/mod.rs @@ -1,11 +1,11 @@ -mod document_export; mod documents; +mod keys; mod public; mod shares; mod tags; -pub use document_export::*; pub use documents::*; +pub use keys::*; pub use public::*; pub use shares::*; pub use tags::*; diff --git a/api/crates/application/src/documents/dtos/shares.rs b/api/crates/application/src/documents/dtos/shares.rs index 9de659cc..a0f58596 100644 --- a/api/crates/application/src/documents/dtos/shares.rs +++ b/api/crates/application/src/documents/dtos/shares.rs @@ -23,6 +23,10 @@ pub struct ShareItemDto { pub document_id: Uuid, pub document_type: String, pub parent_share_id: Option, + /// Share key encrypted with creator's KEK (for URL recovery) + pub creator_encrypted_share_key: Option>, + /// Nonce for creator_encrypted_share_key + pub creator_share_key_nonce: Option>, } #[derive(Debug, Clone)] @@ -61,6 +65,10 @@ pub struct ShareBrowseTreeItemDto { pub r#type: String, pub created_at: chrono::DateTime, pub updated_at: chrono::DateTime, + /// Child share token for documents within a folder share + pub share_token: Option, + /// Encrypted DEK for this document (base64, nonce prepended) + pub encrypted_dek: Option, } #[derive(Debug, Clone)] @@ -70,6 +78,7 @@ pub struct ShareBrowseResponseDto { #[derive(Debug, Clone)] pub struct CreatedShareDto { + pub share_id: Uuid, pub token: String, pub document_id: Uuid, pub document_type: String, diff --git a/api/crates/application/src/documents/dtos/tags.rs b/api/crates/application/src/documents/dtos/tags.rs index f76f86fa..21440bf3 100644 --- a/api/crates/application/src/documents/dtos/tags.rs +++ b/api/crates/application/src/documents/dtos/tags.rs @@ -1,5 +1,23 @@ +use chrono::{DateTime, Utc}; +use uuid::Uuid; + #[derive(Debug, Clone)] pub struct TagItemDto { pub name: String, pub count: i64, } + +/// Encrypted tag item with Base64-encoded tag +#[derive(Debug, Clone)] +pub struct EncryptedTagItemDto { + pub encrypted_tag: Vec, + pub count: i64, +} + +/// Encrypted tag entry for a document +#[derive(Debug, Clone)] +pub struct EncryptedTagEntryDto { + pub id: Uuid, + pub encrypted_tag: Vec, + pub created_at: DateTime, +} diff --git a/api/crates/application/src/documents/ports/document_exporter.rs b/api/crates/application/src/documents/ports/document_exporter.rs deleted file mode 100644 index 8e41a59b..00000000 --- a/api/crates/application/src/documents/ports/document_exporter.rs +++ /dev/null @@ -1,27 +0,0 @@ -use async_trait::async_trait; - -use crate::core::ports::errors::PortResult; -use crate::documents::dtos::{DocumentDownload, DocumentDownloadFormat}; - -#[derive(Debug, Clone)] -pub struct DocumentExportAttachment { - pub relative_path: String, - pub bytes: Vec, -} - -#[derive(Debug, Clone)] -pub struct DocumentExportAssets { - pub safe_title: String, - pub display_title: Option, - pub markdown: Vec, - pub attachments: Vec, -} - -#[async_trait] -pub trait DocumentExporter: Send + Sync { - async fn export( - &self, - assets: DocumentExportAssets, - format: DocumentDownloadFormat, - ) -> PortResult; -} diff --git a/api/crates/application/src/documents/ports/document_keys_repository.rs b/api/crates/application/src/documents/ports/document_keys_repository.rs new file mode 100644 index 00000000..37323f5f --- /dev/null +++ b/api/crates/application/src/documents/ports/document_keys_repository.rs @@ -0,0 +1,35 @@ +use async_trait::async_trait; +use uuid::Uuid; + +use crate::core::ports::errors::PortResult; + +#[derive(Debug, Clone)] +pub struct DocumentEncryptedKeyRow { + pub document_id: Uuid, + pub encrypted_dek: Vec, + pub nonce: Vec, + pub key_version: i32, + pub created_at: chrono::DateTime, + pub updated_at: chrono::DateTime, +} + +#[async_trait] +pub trait DocumentKeysRepository: Send + Sync { + /// Get the encrypted DEK for a document + async fn get_encrypted_dek( + &self, + document_id: Uuid, + ) -> PortResult>; + + /// Store or update an encrypted DEK for a document + async fn upsert_encrypted_dek( + &self, + document_id: Uuid, + encrypted_dek: &[u8], + nonce: &[u8], + key_version: i32, + ) -> PortResult; + + /// Delete an encrypted DEK (when document is deleted) + async fn delete_encrypted_dek(&self, document_id: Uuid) -> PortResult; +} diff --git a/api/crates/application/src/documents/ports/document_repository.rs b/api/crates/application/src/documents/ports/document_repository.rs index 403bb5a4..1f6a2fa9 100644 --- a/api/crates/application/src/documents/ports/document_repository.rs +++ b/api/crates/application/src/documents/ports/document_repository.rs @@ -4,7 +4,6 @@ use uuid::Uuid; use domain::documents::doc_type::DocumentType; use domain::documents::document::Document as DomainDocument; -use domain::documents::document::SearchHit; pub use domain::documents::meta::DocMeta; use domain::documents::path::{DesiredPath, Slug}; use domain::documents::title::Title; @@ -41,7 +40,6 @@ pub trait DocumentRepository: Send + Sync { async fn list_for_user( &self, workspace_id: Uuid, - query: Option, tag: Option, state: DocumentListState, ) -> DocumentRepoResult>; @@ -55,13 +53,6 @@ pub trait DocumentRepository: Send + Sync { async fn get_by_id(&self, id: Uuid) -> DocumentRepoResult>; - async fn search_for_user( - &self, - workspace_id: Uuid, - query: Option, - limit: i64, - ) -> DocumentRepoResult>; - #[allow(clippy::too_many_arguments)] async fn create_for_user( &self, @@ -119,6 +110,14 @@ pub trait DocumentRepository: Send + Sync { workspace_id: Uuid, root_id: Uuid, ) -> DocumentRepoResult>; + + /// Update encrypted title fields for E2EE documents + async fn update_encrypted_title( + &self, + doc_id: Uuid, + encrypted_title: Vec, + encrypted_title_nonce: Vec, + ) -> DocumentRepoResult<()>; } #[async_trait] diff --git a/api/crates/application/src/documents/ports/document_snapshot_archive_repository.rs b/api/crates/application/src/documents/ports/document_snapshot_archive_repository.rs index 3640c30f..88524006 100644 --- a/api/crates/application/src/documents/ports/document_snapshot_archive_repository.rs +++ b/api/crates/application/src/documents/ports/document_snapshot_archive_repository.rs @@ -29,6 +29,9 @@ pub struct SnapshotArchiveRecord { pub created_by: Option, pub byte_size: i64, pub content_hash: String, + // E2EE fields + pub nonce: Option>, + pub signature: Option>, } #[derive(Debug, Clone)] diff --git a/api/crates/application/src/documents/ports/files/files_repository.rs b/api/crates/application/src/documents/ports/files/files_repository.rs index ddede632..f1d53c89 100644 --- a/api/crates/application/src/documents/ports/files/files_repository.rs +++ b/api/crates/application/src/documents/ports/files/files_repository.rs @@ -6,15 +6,22 @@ use crate::core::ports::errors::PortResult; #[derive(Debug, Clone)] pub struct FileMeta { pub storage_path: String, - pub content_type: Option, pub document_id: Uuid, pub workspace_id: Uuid, + /// Encrypted file metadata (filename, content_type, etc.) + /// None for legacy files uploaded before E2EE + pub encrypted_metadata: Option>, + /// Nonce for encrypted metadata + /// None for legacy files uploaded before E2EE + pub encrypted_metadata_nonce: Option>, + /// Hash of encrypted content + /// None for legacy files uploaded before E2EE + pub encrypted_hash: Option, } #[derive(Debug, Clone)] pub struct FilePathMeta { pub storage_path: String, - pub content_type: Option, } #[derive(Debug, Clone)] @@ -27,21 +34,11 @@ pub struct StoredFileScope { #[async_trait] pub trait FilesRepository: Send + Sync { async fn is_workspace_document(&self, doc_id: Uuid, workspace_id: Uuid) -> PortResult; - async fn insert_file( - &self, - doc_id: Uuid, - filename: &str, - content_type: Option<&str>, - size: i64, - storage_path: &str, - content_hash: &str, - ) -> PortResult; + + /// Insert a file record + async fn insert_file(&self, input: FileInsert<'_>) -> PortResult; + async fn get_file_meta(&self, file_id: Uuid) -> PortResult>; - async fn get_file_path_by_doc_and_name( - &self, - doc_id: Uuid, - filename: &str, - ) -> PortResult>; async fn list_storage_paths_for_document(&self, doc_id: Uuid) -> PortResult>; @@ -55,11 +52,11 @@ pub trait FilesRepository: Send + Sync { async fn update_storage_path(&self, file_id: Uuid, storage_path: &str) -> PortResult<()>; - async fn update_hash_and_size( + async fn update_size_and_hash( &self, file_id: Uuid, size: i64, - content_hash: &str, + encrypted_hash: &str, ) -> PortResult<()>; async fn delete_by_id(&self, file_id: Uuid) -> PortResult<()>; @@ -73,9 +70,29 @@ pub trait FilesRepositoryTx: Send { #[derive(Debug, Clone)] pub struct FileRecord { pub id: Uuid, - pub filename: String, - pub content_type: Option, pub size: i64, pub storage_path: String, - pub content_hash: String, + /// Encrypted file metadata (filename, content_type, etc.) + /// None for legacy files uploaded before E2EE + pub encrypted_metadata: Option>, + /// Nonce for encrypted metadata + /// None for legacy files uploaded before E2EE + pub encrypted_metadata_nonce: Option>, + /// Hash of encrypted content + /// None for legacy files uploaded before E2EE + pub encrypted_hash: Option, +} + +/// Input for file insert +#[derive(Debug, Clone)] +pub struct FileInsert<'a> { + pub doc_id: Uuid, + pub size: i64, + pub storage_path: &'a str, + /// Encrypted file metadata (filename, content_type, etc.) + pub encrypted_metadata: &'a [u8], + /// Nonce for encrypted metadata + pub encrypted_metadata_nonce: &'a [u8], + /// Hash of encrypted content + pub encrypted_hash: &'a str, } diff --git a/api/crates/application/src/documents/ports/mod.rs b/api/crates/application/src/documents/ports/mod.rs index 09fd2b4b..f2befd98 100644 --- a/api/crates/application/src/documents/ports/mod.rs +++ b/api/crates/application/src/documents/ports/mod.rs @@ -1,6 +1,6 @@ pub mod access_repository; pub mod doc_event_log; -pub mod document_exporter; +pub mod document_keys_repository; pub mod document_path_repository; pub mod document_repository; pub mod document_snapshot_archive_repository; @@ -8,6 +8,7 @@ pub mod files; pub mod linkgraph_repository; pub mod publishing; pub mod realtime; +pub mod share_keys_repository; pub mod sharing; pub mod tagging; pub mod tx_runner; diff --git a/api/crates/application/src/documents/ports/publishing/public_repository.rs b/api/crates/application/src/documents/ports/publishing/public_repository.rs index 5c8d8cfc..8440d793 100644 --- a/api/crates/application/src/documents/ports/publishing/public_repository.rs +++ b/api/crates/application/src/documents/ports/publishing/public_repository.rs @@ -14,6 +14,7 @@ pub struct WorkspaceTitleAndSlug { pub struct PublishStatusRow { pub slug: String, pub workspace_slug: String, + pub noindex: bool, } #[derive(Debug, Clone)] @@ -31,7 +32,8 @@ pub trait PublicRepository: Send + Sync { doc_id: Uuid, workspace_id: Uuid, ) -> PortResult>; - async fn upsert_public_document(&self, doc_id: Uuid, slug: &str) -> PortResult<()>; + async fn upsert_public_document(&self, doc_id: Uuid, slug: &str, noindex: bool) -> PortResult<()>; + async fn update_noindex(&self, doc_id: Uuid, noindex: bool) -> PortResult; async fn slug_exists(&self, slug: &str) -> PortResult; async fn is_workspace_document(&self, doc_id: Uuid, workspace_id: Uuid) -> PortResult; async fn delete_public_document(&self, doc_id: Uuid) -> PortResult; @@ -54,4 +56,91 @@ pub trait PublicRepository: Send + Sync { workspace_slug: &str, doc_id: Uuid, ) -> PortResult; + + /// Get noindex flag for a published document + async fn get_noindex_by_workspace_and_id( + &self, + workspace_slug: &str, + doc_id: Uuid, + ) -> PortResult>; + + /// Store or update plaintext content for a published document (for E2EE mode) + async fn store_public_content( + &self, + doc_id: Uuid, + title: &str, + content: &str, + content_hash: &str, + ) -> PortResult<()>; + + /// Get stored plaintext content for a published document + async fn get_public_content(&self, doc_id: Uuid) -> PortResult>; + + /// Delete stored public content when unpublishing + async fn delete_public_content(&self, doc_id: Uuid) -> PortResult<()>; + + // --- Public file methods (for E2EE attachments) --- + + /// Store metadata for a public file + async fn store_public_file(&self, input: StorePublicFileInput) -> PortResult; + + /// Get all public files for a document + async fn get_public_files(&self, doc_id: Uuid) -> PortResult>; + + /// Get a single public file by document and file ID + async fn get_public_file( + &self, + doc_id: Uuid, + file_id: Uuid, + ) -> PortResult>; + + /// Get a single public file by document and logical filename + async fn get_public_file_by_logical_filename( + &self, + doc_id: Uuid, + logical_filename: &str, + ) -> PortResult>; + + /// Delete all public files for a document (when unpublishing) + async fn delete_public_files(&self, doc_id: Uuid) -> PortResult; +} + +/// Stored plaintext content for published document +#[derive(Debug, Clone)] +pub struct PublicContentRow { + pub document_id: Uuid, + pub title: String, + pub content: String, + pub content_hash: String, + pub updated_at: chrono::DateTime, +} + +/// Metadata for a public file (decrypted attachment) +#[derive(Debug, Clone)] +pub struct PublicFileRow { + pub id: Uuid, + pub document_id: Uuid, + pub workspace_id: Uuid, + pub file_id: Uuid, + pub original_filename: String, + pub logical_filename: String, + pub mime_type: String, + pub size: i64, + pub storage_path: String, + pub content_hash: String, + pub created_at: chrono::DateTime, +} + +/// Input for storing a public file +#[derive(Debug, Clone)] +pub struct StorePublicFileInput { + pub document_id: Uuid, + pub workspace_id: Uuid, + pub file_id: Uuid, + pub original_filename: String, + pub logical_filename: String, + pub mime_type: String, + pub size: i64, + pub storage_path: String, + pub content_hash: String, } diff --git a/api/crates/application/src/documents/ports/realtime/realtime_persistence_port.rs b/api/crates/application/src/documents/ports/realtime/realtime_persistence_port.rs index 86bb797b..63d2c83a 100644 --- a/api/crates/application/src/documents/ports/realtime/realtime_persistence_port.rs +++ b/api/crates/application/src/documents/ports/realtime/realtime_persistence_port.rs @@ -21,6 +21,38 @@ pub struct PersistenceTask { pub struct SnapshotEntry { pub version: i64, pub bytes: Vec, + pub nonce: Option>, + pub signature: Option>, + /// The seq number at the time this snapshot was created (for E2EE sync) + pub seq_at_snapshot: Option, +} + +/// Encryption metadata for E2EE content +#[derive(Debug, Clone, Default)] +pub struct ContentEncryptionMeta { + pub nonce: Option>, + pub signature: Option>, + /// The seq number at the time this snapshot was created (for E2EE sync) + pub seq_at_snapshot: Option, +} + +/// Encrypted update data for E2EE documents +#[derive(Debug, Clone)] +pub struct EncryptedUpdateData { + pub data: Vec, + pub nonce: Option>, + pub signature: Option>, + pub public_key: Option>, +} + +/// Encrypted update entry with sequence number (for retrieval) +#[derive(Debug, Clone)] +pub struct EncryptedUpdateEntry { + pub seq: i64, + pub data: Vec, + pub nonce: Option>, + pub signature: Option>, + pub public_key: Option>, } #[async_trait] @@ -32,6 +64,14 @@ pub trait DocPersistencePort: Send + Sync { update: &[u8], ) -> PortResult<()>; + /// Append encrypted update with E2EE metadata + async fn append_encrypted_update_with_seq( + &self, + doc_id: &Uuid, + seq: i64, + update: &EncryptedUpdateData, + ) -> PortResult<()>; + async fn latest_update_seq(&self, doc_id: &Uuid) -> PortResult>; async fn persist_snapshot( @@ -39,6 +79,7 @@ pub trait DocPersistencePort: Send + Sync { doc_id: &Uuid, version: i64, snapshot: &[u8], + encryption_meta: Option<&ContentEncryptionMeta>, ) -> PortResult<()>; async fn latest_snapshot_entry(&self, doc_id: &Uuid) -> PortResult>; @@ -50,6 +91,13 @@ pub trait DocPersistencePort: Send + Sync { async fn prune_updates_before(&self, doc_id: &Uuid, seq_inclusive: i64) -> PortResult<()>; async fn clear_updates(&self, doc_id: &Uuid) -> PortResult<()>; + + /// Get encrypted updates since a given sequence number (for E2EE sync) + async fn get_updates_since( + &self, + doc_id: &Uuid, + since_seq: i64, + ) -> PortResult>; } #[async_trait] diff --git a/api/crates/application/src/documents/ports/realtime/realtime_port.rs b/api/crates/application/src/documents/ports/realtime/realtime_port.rs index d41a0feb..ee52b877 100644 --- a/api/crates/application/src/documents/ports/realtime/realtime_port.rs +++ b/api/crates/application/src/documents/ports/realtime/realtime_port.rs @@ -37,6 +37,10 @@ pub trait RealtimeEngine: Send + Sync { async fn get_content(&self, doc_id: &str) -> PortResult>; + /// Get Yjs snapshot with E2EE metadata (nonce, signature) + /// Returns snapshot data including nonce for decryption + async fn get_snapshot(&self, doc_id: &str) -> PortResult>; + async fn force_persist(&self, doc_id: &str) -> PortResult<()>; async fn force_save_to_fs(&self, doc_id: &str) -> PortResult<()> { @@ -45,7 +49,67 @@ pub trait RealtimeEngine: Send + Sync { async fn apply_snapshot(&self, doc_id: &str, snapshot: &[u8]) -> PortResult<()>; + /// Apply encrypted snapshot with E2EE metadata + async fn apply_encrypted_snapshot( + &self, + doc_id: &str, + snapshot: &[u8], + _nonce: Option<&[u8]>, + _signature: Option<&[u8]>, + ) -> PortResult<()> { + // Default implementation ignores encryption metadata + self.apply_snapshot(doc_id, snapshot).await + } + + /// Apply encrypted updates (delta) for E2EE documents + /// This appends encrypted Yjs updates without processing them + async fn apply_encrypted_updates( + &self, + doc_id: &str, + updates: &[EncryptedUpdate], + ) -> PortResult<()>; + async fn set_document_editable(&self, _doc_id: &str, _editable: bool) -> PortResult<()> { Ok(()) } + + /// Get encrypted updates since a given sequence number (for E2EE content retrieval) + /// Returns empty vector if no updates exist + async fn get_updates_since( + &self, + doc_id: &str, + since_seq: i64, + ) -> PortResult>; +} + +/// Encrypted update entry with sequence number (for retrieval via REST API) +#[derive(Debug, Clone)] +pub struct EncryptedUpdateEntry { + pub seq: i64, + pub data: Vec, + pub nonce: Option>, + pub signature: Option>, + pub public_key: Option>, +} + +/// Encrypted Yjs update for E2EE documents +#[derive(Debug, Clone)] +pub struct EncryptedUpdate { + pub data: Vec, + pub nonce: Option>, + pub signature: Option>, + pub public_key: Option>, +} + +/// Snapshot data with E2EE metadata +#[derive(Debug, Clone)] +pub struct SnapshotData { + /// Yjs snapshot bytes + pub data: Vec, + /// Nonce for decryption + pub nonce: Option>, + /// Signature for verification + pub signature: Option>, + /// The seq number at the time this snapshot was created (for E2EE sync) + pub seq_at_snapshot: Option, } diff --git a/api/crates/application/src/documents/ports/realtime/realtime_types.rs b/api/crates/application/src/documents/ports/realtime/realtime_types.rs index d974de89..5329d3ab 100644 --- a/api/crates/application/src/documents/ports/realtime/realtime_types.rs +++ b/api/crates/application/src/documents/ports/realtime/realtime_types.rs @@ -1,6 +1,7 @@ use std::pin::Pin; use futures_util::{Sink, Stream}; +use serde::{Deserialize, Serialize}; use super::realtime_port::RealtimeError; @@ -8,3 +9,125 @@ pub type DynRealtimeSink = Pin, Error = RealtimeError> + Send + Sync + 'static>>; pub type DynRealtimeStream = Pin, RealtimeError>> + Send + Sync + 'static>>; + +// ============================================================================ +// E2EE Message Types (secsync-compatible) +// ============================================================================ + +/// Signature domains for E2EE messages (domain separation) +pub mod signature_domains { + pub const SNAPSHOT: &str = "refmd_snapshot"; + pub const UPDATE: &str = "refmd_update"; + pub const EPHEMERAL: &str = "refmd_ephemeral"; +} + +/// E2EE realtime message types +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum MessageType { + Update, + Snapshot, + Awareness, +} + +/// E2EE realtime message (JSON format over WebSocket) +/// Field names follow secsync specification (camelCase) +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct RealtimeMessage { + /// Message type + #[serde(rename = "type")] + pub msg_type: MessageType, + /// Base64-encoded ciphertext (XChaCha20-Poly1305) + pub ciphertext: String, + /// Base64-encoded nonce (24 bytes for XChaCha20-Poly1305) + pub nonce: String, + /// Base64-encoded Ed25519 signature + pub signature: String, + /// Public metadata (not encrypted, but authenticated via signature) + /// This is a Base64-encoded canonicalized JSON string + pub public_data: String, +} + +/// Update public data structure (for parsing publicData field) +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct UpdatePublicData { + /// Document ID + pub doc_id: String, + /// Ed25519 public key (Base64-encoded, 32 bytes) + pub pub_key: String, + /// Reference snapshot ID + pub ref_snapshot_id: String, + /// Logical clock for ordering (per client) + pub clock: u64, +} + +/// Snapshot public data structure (for parsing publicData field) +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct SnapshotPublicData { + /// Document ID + pub doc_id: String, + /// Ed25519 public key (Base64-encoded, 32 bytes) + pub pub_key: String, + /// Snapshot ID + pub snapshot_id: String, + /// Parent snapshot ID + pub parent_snapshot_id: String, + /// Parent snapshot proof + pub parent_snapshot_proof: String, + /// Update clocks at the time of snapshot (pubKey -> clock) + pub parent_snapshot_update_clocks: std::collections::HashMap, +} + +/// Ephemeral message public data structure (for Awareness) +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct EphemeralPublicData { + /// Document ID + pub doc_id: String, + /// Ed25519 public key (Base64-encoded, 32 bytes) + pub pub_key: String, +} + +impl RealtimeMessage { + /// Get the signature domain for this message type + pub fn signature_domain(&self) -> &'static str { + match self.msg_type { + MessageType::Update => signature_domains::UPDATE, + MessageType::Snapshot => signature_domains::SNAPSHOT, + MessageType::Awareness => signature_domains::EPHEMERAL, + } + } + + /// Parse the publicData field as UpdatePublicData + pub fn parse_update_public_data(&self) -> anyhow::Result { + let decoded = base64::Engine::decode( + &base64::engine::general_purpose::STANDARD, + &self.public_data, + )?; + let json_str = String::from_utf8(decoded)?; + Ok(serde_json::from_str(&json_str)?) + } + + /// Parse the publicData field as SnapshotPublicData + pub fn parse_snapshot_public_data(&self) -> anyhow::Result { + let decoded = base64::Engine::decode( + &base64::engine::general_purpose::STANDARD, + &self.public_data, + )?; + let json_str = String::from_utf8(decoded)?; + Ok(serde_json::from_str(&json_str)?) + } + + /// Parse the publicData field as EphemeralPublicData + pub fn parse_ephemeral_public_data(&self) -> anyhow::Result { + let decoded = base64::Engine::decode( + &base64::engine::general_purpose::STANDARD, + &self.public_data, + )?; + let json_str = String::from_utf8(decoded)?; + Ok(serde_json::from_str(&json_str)?) + } +} diff --git a/api/crates/application/src/documents/ports/share_keys_repository.rs b/api/crates/application/src/documents/ports/share_keys_repository.rs new file mode 100644 index 00000000..07b7a31f --- /dev/null +++ b/api/crates/application/src/documents/ports/share_keys_repository.rs @@ -0,0 +1,50 @@ +use async_trait::async_trait; +use uuid::Uuid; + +use crate::core::ports::errors::PortResult; +use domain::identity::keys::KdfParams; + +#[derive(Debug, Clone)] +pub struct ShareEncryptedKeyRow { + pub share_id: Uuid, + pub encrypted_dek: Vec, + pub salt: Option>, + pub kdf_params: Option, + /// Share key encrypted with creator's KEK (for URL recovery by creator) + pub creator_encrypted_share_key: Option>, + /// Nonce for creator_encrypted_share_key + pub creator_share_key_nonce: Option>, + pub created_at: chrono::DateTime, +} + +#[async_trait] +pub trait ShareKeysRepository: Send + Sync { + /// Get the encrypted DEK for a share + async fn get_encrypted_dek(&self, share_id: Uuid) -> PortResult>; + + /// Get the salt for a password-protected share (for client-side KDF) + async fn get_salt(&self, share_id: Uuid) -> PortResult>>; + + /// Store an encrypted DEK for a share (URL fragment based, no password) + async fn store_encrypted_dek( + &self, + share_id: Uuid, + encrypted_dek: &[u8], + creator_encrypted_share_key: Option<&[u8]>, + creator_share_key_nonce: Option<&[u8]>, + ) -> PortResult; + + /// Store an encrypted DEK for a password-protected share + async fn store_password_protected_dek( + &self, + share_id: Uuid, + encrypted_dek: &[u8], + salt: &[u8], + kdf_params: &KdfParams, + creator_encrypted_share_key: Option<&[u8]>, + creator_share_key_nonce: Option<&[u8]>, + ) -> PortResult; + + /// Delete an encrypted DEK (when share is deleted) + async fn delete_encrypted_dek(&self, share_id: Uuid) -> PortResult; +} diff --git a/api/crates/application/src/documents/ports/sharing/shares_repository.rs b/api/crates/application/src/documents/ports/sharing/shares_repository.rs index 03108e54..6c0d29e5 100644 --- a/api/crates/application/src/documents/ports/sharing/shares_repository.rs +++ b/api/crates/application/src/documents/ports/sharing/shares_repository.rs @@ -18,6 +18,10 @@ pub struct ShareRow { pub document_type: DocumentType, pub document_title: Title, pub created_at: chrono::DateTime, + /// Share key encrypted with creator's KEK (for URL recovery) + pub creator_encrypted_share_key: Option>, + /// Nonce for creator_encrypted_share_key + pub creator_share_key_nonce: Option>, } #[derive(Debug, Clone)] @@ -71,6 +75,16 @@ pub struct ShareSubtreeNode { pub updated_at: DateTime, } +/// Child share info for a document within a folder share +#[derive(Debug, Clone)] +pub struct ChildShareInfo { + pub share_id: Uuid, + pub document_id: Uuid, + pub token: String, + /// Encrypted DEK stored for this child share (raw bytes, nonce prepended) + pub encrypted_dek: Option>, +} + #[async_trait] pub trait SharesRepository: Send + Sync { async fn create_share( @@ -125,6 +139,9 @@ pub trait SharesRepository: Send + Sync { async fn list_materialized_children(&self, parent_share_id: Uuid) -> PortResult>; + /// Get child share info (token + encrypted DEK) for documents in a folder share + async fn list_child_share_info(&self, parent_share_id: Uuid) -> PortResult>; + async fn materialize_folder_share( &self, workspace_id: Uuid, diff --git a/api/crates/application/src/documents/ports/tagging/encrypted_tag_repository.rs b/api/crates/application/src/documents/ports/tagging/encrypted_tag_repository.rs new file mode 100644 index 00000000..2a7dee66 --- /dev/null +++ b/api/crates/application/src/documents/ports/tagging/encrypted_tag_repository.rs @@ -0,0 +1,62 @@ +use async_trait::async_trait; +use chrono::{DateTime, Utc}; +use uuid::Uuid; + +use crate::core::ports::errors::PortResult; + +/// Encrypted tag entry for a document +#[derive(Debug, Clone)] +pub struct EncryptedTagEntry { + pub id: Uuid, + pub workspace_id: Uuid, + pub document_id: Uuid, + pub encrypted_tag: Vec, + pub created_at: DateTime, +} + +/// Summary of encrypted tags with occurrence count +#[derive(Debug, Clone)] +pub struct EncryptedTagSummary { + pub encrypted_tag: Vec, + pub count: i64, +} + +#[async_trait] +pub trait EncryptedTagRepository: Send + Sync { + /// List all unique encrypted tags in a workspace with their counts + async fn list_encrypted_tags( + &self, + workspace_id: Uuid, + ) -> PortResult>; + + /// List encrypted tags for a specific document + async fn list_document_encrypted_tags( + &self, + document_id: Uuid, + ) -> PortResult>; + + /// Replace all encrypted tags for a document + async fn replace_document_encrypted_tags( + &self, + workspace_id: Uuid, + document_id: Uuid, + encrypted_tags: &[Vec], + ) -> PortResult>; + + /// Find documents by encrypted tag (deterministic encryption allows exact match) + async fn find_documents_by_encrypted_tag( + &self, + workspace_id: Uuid, + encrypted_tag: &[u8], + ) -> PortResult>; + + /// Find a specific encrypted tag with its count (for filtering) + async fn find_encrypted_tag( + &self, + workspace_id: Uuid, + encrypted_tag: &[u8], + ) -> PortResult>; + + /// Delete all encrypted tags for a document + async fn delete_document_encrypted_tags(&self, document_id: Uuid) -> PortResult<()>; +} diff --git a/api/crates/application/src/documents/ports/tagging/mod.rs b/api/crates/application/src/documents/ports/tagging/mod.rs index 5a5457fe..0b7f5e24 100644 --- a/api/crates/application/src/documents/ports/tagging/mod.rs +++ b/api/crates/application/src/documents/ports/tagging/mod.rs @@ -1,2 +1 @@ -pub mod tag_repository; -pub mod tagging_repository; +pub mod encrypted_tag_repository; diff --git a/api/crates/application/src/documents/ports/tagging/tag_repository.rs b/api/crates/application/src/documents/ports/tagging/tag_repository.rs deleted file mode 100644 index 18dbd37c..00000000 --- a/api/crates/application/src/documents/ports/tagging/tag_repository.rs +++ /dev/null @@ -1,19 +0,0 @@ -use async_trait::async_trait; -use uuid::Uuid; - -use crate::core::ports::errors::PortResult; - -#[derive(Debug, Clone)] -pub struct TagSummary { - pub name: String, - pub count: i64, -} - -#[async_trait] -pub trait TagRepository: Send + Sync { - async fn list_tags( - &self, - owner_id: Uuid, - filter: Option, - ) -> PortResult>; -} diff --git a/api/crates/application/src/documents/ports/tagging/tagging_repository.rs b/api/crates/application/src/documents/ports/tagging/tagging_repository.rs deleted file mode 100644 index 4748355f..00000000 --- a/api/crates/application/src/documents/ports/tagging/tagging_repository.rs +++ /dev/null @@ -1,12 +0,0 @@ -use async_trait::async_trait; -use uuid::Uuid; - -use crate::core::ports::errors::PortResult; - -#[async_trait] -pub trait TaggingRepository: Send + Sync { - async fn clear_document_tags(&self, doc_id: Uuid) -> PortResult<()>; - async fn upsert_tag_return_id(&self, name: &str) -> PortResult; - async fn owner_doc_exists(&self, doc_id: Uuid, owner_id: Uuid) -> PortResult; - async fn associate_document_tag(&self, doc_id: Uuid, tag_id: i64) -> PortResult<()>; -} diff --git a/api/crates/application/src/documents/services/attachments.rs b/api/crates/application/src/documents/services/attachments.rs index 2b97c0c6..81423573 100644 --- a/api/crates/application/src/documents/services/attachments.rs +++ b/api/crates/application/src/documents/services/attachments.rs @@ -10,12 +10,18 @@ use crate::core::services::utils::hash::sha256_hex; use super::DocumentService; +/// Snapshot of an encrypted attachment for document duplication. +/// All fields contain encrypted data from the original file. #[derive(Debug, Clone)] pub(super) struct AttachmentSnapshot { - filename: String, - content_type: Option, + /// Encrypted file bytes (.rme format) bytes: Vec, - content_hash: String, + /// Encrypted file metadata + encrypted_metadata: Vec, + /// Nonce for encrypted metadata + encrypted_metadata_nonce: Vec, + /// Hash of encrypted content + encrypted_hash: String, } impl DocumentService { @@ -30,6 +36,17 @@ impl DocumentService { .map_err(ServiceError::from)?; let mut snapshots = Vec::new(); for file in files { + // Skip legacy files without E2EE metadata + let (Some(encrypted_metadata), Some(encrypted_metadata_nonce), Some(encrypted_hash)) = + (file.encrypted_metadata, file.encrypted_metadata_nonce, file.encrypted_hash) + else { + warn!( + document_id = %doc_id, + storage_path = %file.storage_path, + "duplicate_attachment_skipped_legacy" + ); + continue; + }; let abs_path = self.storage.absolute_from_relative(&file.storage_path); let exists = self .storage @@ -49,12 +66,11 @@ impl DocumentService { .read_bytes(&abs_path) .await .map_err(ServiceError::from)?; - let content_hash = hash_bytes(&bytes); snapshots.push(AttachmentSnapshot { - filename: file.filename, - content_type: file.content_type, bytes, - content_hash, + encrypted_metadata, + encrypted_metadata_nonce, + encrypted_hash, }); } Ok(snapshots) @@ -75,13 +91,9 @@ impl DocumentService { .await .map_err(ServiceError::from)?; for attachment in attachments { - let filename = std::path::Path::new(&attachment.filename) - .file_name() - .and_then(|f| f.to_str()) - .map(str::to_string) - .filter(|f| !f.is_empty()) - .unwrap_or_else(|| attachment.filename.clone()); - let target_path = base_dir.join("attachments").join(&filename); + // Use UUID for storage filename (E2EE - no plaintext filename) + let file_uuid = Uuid::new_v4(); + let target_path = base_dir.join("attachments").join(file_uuid.to_string()); self.storage .write_bytes(&target_path, &attachment.bytes) .await @@ -91,14 +103,14 @@ impl DocumentService { .relative_from_uploads(&target_path) .replace('\\', "/"); self.files_repo - .insert_file( - target_doc.id(), - &filename, - attachment.content_type.as_deref(), - attachment.bytes.len() as i64, - &storage_path, - &attachment.content_hash, - ) + .insert_file(crate::documents::ports::files::files_repository::FileInsert { + doc_id: target_doc.id(), + size: attachment.bytes.len() as i64, + storage_path: &storage_path, + encrypted_metadata: &attachment.encrypted_metadata, + encrypted_metadata_nonce: &attachment.encrypted_metadata_nonce, + encrypted_hash: &attachment.encrypted_hash, + }) .await .map_err(ServiceError::from)?; if let Some(repo_path) = @@ -109,7 +121,7 @@ impl DocumentService { "storage_path": storage_path, "backend": "api", "size": attachment.bytes.len() as i64, - "content_hash": attachment.content_hash, + "encrypted_hash": attachment.encrypted_hash, "workspace_id": target_doc.workspace_id().to_string(), "actor_id": actor_id.to_string(), }); @@ -126,6 +138,6 @@ impl DocumentService { } } -fn hash_bytes(bytes: &[u8]) -> String { +fn _hash_bytes(bytes: &[u8]) -> String { sha256_hex(bytes) } diff --git a/api/crates/application/src/documents/services/content.rs b/api/crates/application/src/documents/services/content.rs index a9aebf3a..0fc0ef6a 100644 --- a/api/crates/application/src/documents/services/content.rs +++ b/api/crates/application/src/documents/services/content.rs @@ -6,6 +6,8 @@ use domain::documents::document::Document as DomainDocument; use crate::core::services::access::{self, Actor}; use crate::core::services::errors::ServiceError; +use crate::documents::dtos::{ContentDto, ContentUpdateEntry}; +use crate::documents::ports::realtime::realtime_port::EncryptedUpdate; use crate::documents::ports::tx_runner::run_in_tx; use crate::documents::services::realtime::snapshot::snapshot_from_markdown; @@ -14,7 +16,9 @@ use super::patch::{DocumentPatchOperation, apply_patch_operations}; use super::util::map_tx_error; impl DocumentService { - pub async fn get_content(&self, actor: &Actor, doc_id: Uuid) -> Result { + /// Get document content as Yjs snapshot bytes. + /// Returns ContentDto with content bytes, optional nonce, and pending updates (for E2EE documents). + pub async fn get_content(&self, actor: &Actor, doc_id: Uuid) -> Result { access::require_view( self.access_repo.as_ref(), self.share_access.as_ref(), @@ -27,20 +31,64 @@ impl DocumentService { other => other, })?; - let content = self + let snapshot = self .realtime - .get_content(&doc_id.to_string()) + .get_snapshot(&doc_id.to_string()) .await - .map_err(ServiceError::from)? - .unwrap_or_default(); - Ok(content) + .map_err(ServiceError::from)?; + + // All documents are E2EE + let (snapshot_content, snapshot_nonce, base_seq) = match &snapshot { + Some(data) => ( + data.data.clone(), + data.nonce.clone(), + data.seq_at_snapshot.unwrap_or(0), + ), + None => (Vec::new(), None, 0), + }; + + // Fetch pending updates since snapshot + let update_entries = self + .realtime + .get_updates_since(&doc_id.to_string(), base_seq) + .await + .map_err(ServiceError::from)?; + + let updates = if update_entries.is_empty() { + None + } else { + Some( + update_entries + .into_iter() + .map(|u| ContentUpdateEntry { + seq: u.seq, + data: u.data, + nonce: u.nonce, + signature: u.signature, + public_key: u.public_key, + }) + .collect(), + ) + }; + + Ok(ContentDto { + content: snapshot_content, + nonce: snapshot_nonce, + seq_at_snapshot: Some(base_seq), + updates, + }) } + /// Update document content. + /// - For plaintext mode: pass content bytes (Yjs state), nonce and signature as None + /// - For E2EE mode: pass encrypted content bytes with nonce and optional signature pub async fn update_content( &self, actor: &Actor, doc_id: Uuid, - content: &str, + content: &[u8], + nonce: Option<&[u8]>, + signature: Option<&[u8]>, ) -> Result { access::require_edit( self.access_repo.as_ref(), @@ -54,9 +102,9 @@ impl DocumentService { other => other, })?; - let snapshot_bytes = snapshot_from_markdown(content); + // Apply snapshot with optional E2EE metadata self.realtime - .apply_snapshot(&doc_id.to_string(), snapshot_bytes.as_slice()) + .apply_encrypted_snapshot(&doc_id.to_string(), content, nonce, signature) .await .map_err(ServiceError::from)?; @@ -86,6 +134,8 @@ impl DocumentService { }) .await .map_err(map_tx_error)?; + + let is_encrypted = nonce.is_some(); let repo_path = doc.desired_path().as_str().to_string(); let event_payload = json!({ "repo_path": repo_path, @@ -93,6 +143,7 @@ impl DocumentService { "slug": doc.slug().as_str(), "doc_type": doc.doc_type().as_str(), "owner_id": doc.workspace_id(), + "encrypted": is_encrypted, }); self.record_event( doc.workspace_id(), @@ -104,16 +155,27 @@ impl DocumentService { Ok(doc) } - pub async fn patch_content( + /// Update document content from markdown string (convenience method for plaintext mode). + pub async fn update_content_from_markdown( &self, actor: &Actor, doc_id: Uuid, - operations: &[DocumentPatchOperation], + content: &str, ) -> Result { - if operations.is_empty() { - return Err(ServiceError::BadRequest("patch_operations_required")); - } + let snapshot_bytes = snapshot_from_markdown(content); + self.update_content(actor, doc_id, &snapshot_bytes, None, None).await + } + /// Patch document content. + /// - For plaintext mode: pass DocumentPatchOperation with text + /// - For E2EE mode: pass EncryptedUpdate with encrypted data and nonce + pub async fn patch_content( + &self, + actor: &Actor, + doc_id: Uuid, + plaintext_operations: Option<&[DocumentPatchOperation]>, + encrypted_updates: Option<&[EncryptedUpdate]>, + ) -> Result { access::require_edit( self.access_repo.as_ref(), self.share_access.as_ref(), @@ -126,14 +188,82 @@ impl DocumentService { other => other, })?; - let current = self - .realtime - .get_content(&doc_id.to_string()) + let is_encrypted = encrypted_updates.is_some() && !encrypted_updates.unwrap().is_empty(); + + if let Some(updates) = encrypted_updates { + if !updates.is_empty() { + // E2EE mode: apply encrypted updates + self.realtime + .apply_encrypted_updates(&doc_id.to_string(), updates) + .await + .map_err(ServiceError::from)?; + } + } else if let Some(operations) = plaintext_operations { + if operations.is_empty() { + return Err(ServiceError::BadRequest("patch_operations_required")); + } + // Plaintext mode: get current content, apply operations, update + let current = self + .realtime + .get_content(&doc_id.to_string()) + .await + .map_err(ServiceError::from)? + .unwrap_or_default(); + let updated = apply_patch_operations(¤t, operations)?; + let snapshot_bytes = snapshot_from_markdown(&updated); + self.realtime + .apply_snapshot(&doc_id.to_string(), &snapshot_bytes) + .await + .map_err(ServiceError::from)?; + } else { + return Err(ServiceError::BadRequest("patch_operations_required")); + } + + if let Err(err) = self.realtime.force_persist(&doc_id.to_string()).await { + warn!(document_id = %doc_id, error = ?err, "document_force_persist_after_patch_failed"); + } + + let doc = self + .document_repo + .get_by_id(doc_id) .await .map_err(ServiceError::from)? - .unwrap_or_default(); - let updated = apply_patch_operations(¤t, operations)?; + .ok_or(ServiceError::NotFound)?; + + let workspace_id = doc.workspace_id(); + let doc_id = doc.id(); + run_in_tx(self.tx_runner.as_ref(), move |tx| { + Box::pin(async move { + Self::enqueue_doc_sync_tx( + tx.storage_jobs(), + workspace_id, + doc_id, + "patch_content", + ) + .await?; + Ok(()) + }) + }) + .await + .map_err(map_tx_error)?; - self.update_content(actor, doc_id, &updated).await + let repo_path = doc.desired_path().as_str().to_string(); + let event_payload = json!({ + "repo_path": repo_path, + "desired_path": doc.desired_path().as_str(), + "slug": doc.slug().as_str(), + "doc_type": doc.doc_type().as_str(), + "owner_id": doc.workspace_id(), + "encrypted": is_encrypted, + }); + self.record_event( + doc.workspace_id(), + doc.id(), + "document.content_patched", + Some(event_payload), + ) + .await; + + Ok(doc) } } diff --git a/api/crates/application/src/documents/services/crud.rs b/api/crates/application/src/documents/services/crud.rs index 6f0f7725..31596edb 100644 --- a/api/crates/application/src/documents/services/crud.rs +++ b/api/crates/application/src/documents/services/crud.rs @@ -3,7 +3,7 @@ use tracing::{error, warn}; use uuid::Uuid; use domain::access::permissions::PermissionSet; -use domain::documents::document::{Document as DomainDocument, SearchHit}; +use domain::documents::document::Document as DomainDocument; use domain::documents::permissions as doc_permissions; use domain::documents::policy::DocumentState; use domain::documents::{hierarchy, path as doc_path, policy as doc_policy, title}; @@ -16,7 +16,6 @@ use crate::documents::use_cases::create_document::CreateDocument; use crate::documents::use_cases::delete_document::DeleteDocument; use crate::documents::use_cases::get_document::GetDocument; use crate::documents::use_cases::list_documents::ListDocuments; -use crate::documents::use_cases::search_documents::SearchDocuments; use crate::documents::use_cases::update_document::UpdateDocument; use super::DocumentService; @@ -26,14 +25,13 @@ impl DocumentService { pub async fn list_for_user( &self, workspace_id: Uuid, - query: Option, tag: Option, state: DocumentListFilter, ) -> Result, ServiceError> { let uc = ListDocuments { repo: self.document_repo.as_ref(), }; - uc.execute(workspace_id, query, tag, to_repo_state(state)) + uc.execute(workspace_id, tag, to_repo_state(state)) .await .map_err(ServiceError::from) } @@ -181,7 +179,7 @@ impl DocumentService { let result = async { let updated_doc = self - .update_content(&actor, new_doc.id(), &source_content) + .update_content_from_markdown(&actor, new_doc.id(), &source_content) .await?; self.copy_attachments(&updated_doc, &attachments, actor_id) @@ -414,16 +412,15 @@ impl DocumentService { Ok(doc) } - pub async fn search_for_user( + /// Update encrypted title fields for E2EE documents + pub async fn update_encrypted_title( &self, - workspace_id: Uuid, - query: Option, - limit: i64, - ) -> Result, ServiceError> { - let uc = SearchDocuments { - repo: self.document_repo.as_ref(), - }; - uc.execute(workspace_id, query, limit) + doc_id: Uuid, + encrypted_title: Vec, + encrypted_title_nonce: Vec, + ) -> Result<(), ServiceError> { + self.document_repo + .update_encrypted_title(doc_id, encrypted_title, encrypted_title_nonce) .await .map_err(ServiceError::from) } diff --git a/api/crates/application/src/documents/services/downloads.rs b/api/crates/application/src/documents/services/downloads.rs deleted file mode 100644 index 71f4dfc2..00000000 --- a/api/crates/application/src/documents/services/downloads.rs +++ /dev/null @@ -1,73 +0,0 @@ -use uuid::Uuid; - -use crate::core::services::access::Actor; -use crate::core::services::errors::ServiceError; -use crate::documents::dtos::{DocumentDownload, DocumentDownloadFormat}; -use crate::documents::use_cases::download_document::{ - DownloadDocument as DownloadDocumentUseCase, FolderDownloadUnsupportedFormat, -}; - -use super::DocumentService; - -impl DocumentService { - pub async fn download_document( - &self, - actor: &Actor, - doc_id: Uuid, - format: DocumentDownloadFormat, - ) -> Result { - let uc = DownloadDocumentUseCase { - documents: self.document_repo.as_ref(), - files: self.files_repo.as_ref(), - storage: self.storage.as_ref(), - access: self.access_repo.as_ref(), - shares: self.share_access.as_ref(), - snapshot: self.snapshot_service.as_ref(), - exporter: self.exporter.as_ref(), - }; - uc.execute(actor, doc_id, format) - .await - .map_err(|err| { - if err - .downcast_ref::() - .is_some() - { - ServiceError::BadRequest("folder_archive_only") - } else { - ServiceError::from(err) - } - })? - .ok_or(ServiceError::NotFound) - } - - pub async fn download_workspace_root( - &self, - actor: &Actor, - workspace_id: Uuid, - workspace_name: &str, - format: DocumentDownloadFormat, - ) -> Result { - let uc = DownloadDocumentUseCase { - documents: self.document_repo.as_ref(), - files: self.files_repo.as_ref(), - storage: self.storage.as_ref(), - access: self.access_repo.as_ref(), - shares: self.share_access.as_ref(), - snapshot: self.snapshot_service.as_ref(), - exporter: self.exporter.as_ref(), - }; - uc.download_workspace_root(actor, workspace_id, workspace_name, format) - .await - .map_err(|err| { - if err - .downcast_ref::() - .is_some() - { - ServiceError::BadRequest("folder_archive_only") - } else { - ServiceError::from(err) - } - })? - .ok_or(ServiceError::NotFound) - } -} diff --git a/api/crates/application/src/documents/services/files/mod.rs b/api/crates/application/src/documents/services/files/mod.rs index dc65039e..7b1ba1a3 100644 --- a/api/crates/application/src/documents/services/files/mod.rs +++ b/api/crates/application/src/documents/services/files/mod.rs @@ -1,6 +1,5 @@ use std::sync::Arc; -use mime_guess::MimeGuess; use serde_json::json; use tracing::warn; use uuid::Uuid; @@ -10,31 +9,42 @@ use crate::core::services::access::{self, Actor}; use crate::core::services::errors::ServiceError; use crate::documents::ports::access_repository::AccessRepository; use crate::documents::ports::doc_event_log::DocEventLog; +pub use crate::documents::ports::files::files_repository::FileRecord; use crate::documents::ports::files::files_repository::FilesRepository; use crate::documents::ports::sharing::share_access_port::ShareAccessPort; -use crate::documents::use_cases::files::upload_file::{UploadFile, UploadedFile}; +use crate::documents::use_cases::files::upload_file::{FileUploadInput, UploadFile, UploadedFile}; use async_trait::async_trait; use domain::documents::path as doc_path; +/// File payload with optional E2EE metadata pub struct FilePayload { + /// File bytes (.rme format for E2EE files, raw bytes for legacy files) pub bytes: Vec, - pub content_type: Option, + /// Encrypted file metadata (filename, content_type, etc.) + /// None for legacy files uploaded before E2EE + pub encrypted_metadata: Option>, + /// Nonce for encrypted metadata + /// None for legacy files uploaded before E2EE + pub encrypted_metadata_nonce: Option>, + /// Hash of encrypted content + /// None for legacy files uploaded before E2EE + pub encrypted_hash: Option, } #[async_trait] pub trait FileServiceFacade: Send + Sync { + /// Upload an E2EE encrypted file. #[allow(clippy::too_many_arguments)] async fn upload_file( &self, workspace_id: Uuid, actor_id: Uuid, doc_id: Uuid, - bytes: Vec, - orig_filename: Option, - content_type: Option, + input: FileUploadInput, public_base_url: Option, ) -> Result; + /// Download file with E2EE metadata. async fn download_owned_file( &self, actor: &Actor, @@ -42,18 +52,35 @@ pub trait FileServiceFacade: Send + Sync { file_id: Uuid, ) -> Result; - async fn get_file_by_name( + /// Serve file by storage path (for backwards compatibility with existing URLs). + async fn serve_upload( &self, actor: &Actor, doc_id: Uuid, - filename: &str, + attachment_path: &str, ) -> Result; - async fn serve_upload( + /// List files for a document (for building file map on client). + async fn list_files_for_document( + &self, + workspace_id: Uuid, + doc_id: Uuid, + ) -> Result, ServiceError>; + + /// List files for a document with actor-based authorization. + /// Used for share token access where workspace_id is not directly available. + async fn list_files_for_actor( &self, actor: &Actor, doc_id: Uuid, - attachment_path: &str, + ) -> Result, ServiceError>; + + /// Download file with actor-based authorization. + /// Used for share token access where workspace_id is not directly available. + async fn download_file_for_actor( + &self, + actor: &Actor, + file_id: Uuid, ) -> Result; } @@ -65,21 +92,11 @@ impl FileServiceFacade for FileService { workspace_id: Uuid, actor_id: Uuid, doc_id: Uuid, - bytes: Vec, - orig_filename: Option, - content_type: Option, + input: FileUploadInput, public_base_url: Option, ) -> Result { - self.upload_file( - workspace_id, - actor_id, - doc_id, - bytes, - orig_filename, - content_type, - public_base_url, - ) - .await + self.upload_file(workspace_id, actor_id, doc_id, input, public_base_url) + .await } async fn download_owned_file( @@ -91,22 +108,37 @@ impl FileServiceFacade for FileService { self.download_owned_file(actor, workspace_id, file_id).await } - async fn get_file_by_name( + async fn serve_upload( &self, actor: &Actor, doc_id: Uuid, - filename: &str, + attachment_path: &str, ) -> Result { - self.get_file_by_name(actor, doc_id, filename).await + self.serve_upload(actor, doc_id, attachment_path).await } - async fn serve_upload( + async fn list_files_for_document( + &self, + workspace_id: Uuid, + doc_id: Uuid, + ) -> Result, ServiceError> { + self.list_files_for_document(workspace_id, doc_id).await + } + + async fn list_files_for_actor( &self, actor: &Actor, doc_id: Uuid, - attachment_path: &str, + ) -> Result, ServiceError> { + self.list_files_for_actor(actor, doc_id).await + } + + async fn download_file_for_actor( + &self, + actor: &Actor, + file_id: Uuid, ) -> Result { - self.serve_upload(actor, doc_id, attachment_path).await + self.download_file_for_actor(actor, file_id).await } } @@ -135,15 +167,14 @@ impl FileService { } } + /// Upload a file with optional E2EE metadata. #[allow(clippy::too_many_arguments)] pub async fn upload_file( &self, workspace_id: Uuid, actor_id: Uuid, doc_id: Uuid, - bytes: Vec, - orig_filename: Option, - content_type: Option, + input: FileUploadInput, public_base_url: Option, ) -> Result { let uc = UploadFile { @@ -152,7 +183,7 @@ impl FileService { public_base_url, }; let uploaded = uc - .execute(workspace_id, doc_id, bytes, orig_filename, content_type) + .execute(workspace_id, doc_id, input) .await .map_err(ServiceError::from)? .ok_or(ServiceError::Forbidden)?; @@ -161,6 +192,7 @@ impl FileService { Ok(uploaded) } + /// Download file with E2EE metadata. pub async fn download_owned_file( &self, actor: &Actor, @@ -191,15 +223,21 @@ impl FileService { .map_err(ServiceError::from)?; Ok(FilePayload { bytes, - content_type: meta.content_type, + encrypted_metadata: meta.encrypted_metadata, + encrypted_metadata_nonce: meta.encrypted_metadata_nonce, + encrypted_hash: meta.encrypted_hash, }) } - pub async fn get_file_by_name( + /// Serve file by storage path. + /// For E2EE files, returns encrypted bytes with metadata headers. + /// For legacy files, returns raw bytes with None for E2EE fields. + /// Returns encrypted file with E2EE metadata. + pub async fn serve_upload( &self, actor: &Actor, doc_id: Uuid, - filename: &str, + attachment_path: &str, ) -> Result { access::require_view( self.access_repo.as_ref(), @@ -209,30 +247,76 @@ impl FileService { ) .await?; + let file_path = self + .storage + .resolve_upload_path(doc_id, attachment_path) + .await + .map_err(ServiceError::from)?; + + // Get the relative path to look up file record + let relative_path = self.storage.relative_from_uploads(&file_path); + + // Look up file record by storage path to get encrypted metadata + let scope = self + .files_repo + .find_by_storage_path(&relative_path) + .await + .map_err(ServiceError::from)? + .ok_or(ServiceError::NotFound)?; + + // Get full file metadata let meta = self .files_repo - .get_file_path_by_doc_and_name(doc_id, filename) + .get_file_meta(scope.file_id) .await .map_err(ServiceError::from)? .ok_or(ServiceError::NotFound)?; - let abs_path = self.storage.absolute_from_relative(&meta.storage_path); + let bytes = self .storage - .read_bytes(&abs_path) + .read_bytes(&file_path) .await .map_err(ServiceError::from)?; + Ok(FilePayload { bytes, - content_type: meta.content_type, + encrypted_metadata: meta.encrypted_metadata, + encrypted_metadata_nonce: meta.encrypted_metadata_nonce, + encrypted_hash: meta.encrypted_hash, }) } - pub async fn serve_upload( + /// List files for a document (for building file map on client). + pub async fn list_files_for_document( + &self, + workspace_id: Uuid, + doc_id: Uuid, + ) -> Result, ServiceError> { + // Verify document belongs to workspace + let is_workspace_doc = self + .files_repo + .is_workspace_document(doc_id, workspace_id) + .await + .map_err(ServiceError::from)?; + if !is_workspace_doc { + return Err(ServiceError::Forbidden); + } + let files = self + .files_repo + .list_files_for_document(doc_id) + .await + .map_err(ServiceError::from)?; + Ok(files) + } + + /// List files for a document with actor-based authorization. + /// Used for share token access where workspace_id is not directly available. + pub async fn list_files_for_actor( &self, actor: &Actor, doc_id: Uuid, - attachment_path: &str, - ) -> Result { + ) -> Result, ServiceError> { + // Verify actor has view access to the document access::require_view( self.access_repo.as_ref(), self.share_access.as_ref(), @@ -241,21 +325,48 @@ impl FileService { ) .await?; - let file_path = self - .storage - .resolve_upload_path(doc_id, attachment_path) + let files = self + .files_repo + .list_files_for_document(doc_id) .await .map_err(ServiceError::from)?; + Ok(files) + } + + /// Download file with actor-based authorization. + /// Used for share token access where workspace_id is not directly available. + pub async fn download_file_for_actor( + &self, + actor: &Actor, + file_id: Uuid, + ) -> Result { + let meta = self + .files_repo + .get_file_meta(file_id) + .await + .map_err(ServiceError::from)? + .ok_or(ServiceError::NotFound)?; + + // Verify actor has view access to the document + access::require_view( + self.access_repo.as_ref(), + self.share_access.as_ref(), + actor, + meta.document_id, + ) + .await?; + + let abs_path = self.storage.absolute_from_relative(&meta.storage_path); let bytes = self .storage - .read_bytes(&file_path) + .read_bytes(&abs_path) .await .map_err(ServiceError::from)?; - let guess = MimeGuess::from_path(&file_path); - let content_type = Some(guess.first_or_octet_stream().essence_str().to_string()); Ok(FilePayload { bytes, - content_type, + encrypted_metadata: meta.encrypted_metadata, + encrypted_metadata_nonce: meta.encrypted_metadata_nonce, + encrypted_hash: meta.encrypted_hash, }) } @@ -282,7 +393,7 @@ impl FileService { "storage_path": file.storage_path, "backend": "api", "size": file.size, - "content_hash": file.content_hash, + "encrypted_hash": file.encrypted_hash, "workspace_id": workspace_id.to_string(), "actor_id": actor_id.to_string(), })), diff --git a/api/crates/application/src/documents/services/keys.rs b/api/crates/application/src/documents/services/keys.rs new file mode 100644 index 00000000..ec86f7a7 --- /dev/null +++ b/api/crates/application/src/documents/services/keys.rs @@ -0,0 +1,237 @@ +use std::sync::Arc; + +use async_trait::async_trait; +use uuid::Uuid; + +use crate::core::services::errors::ServiceError; +use crate::documents::dtos::{DocumentEncryptedKeyDto, ShareEncryptedKeyDto}; +use crate::documents::ports::document_keys_repository::DocumentKeysRepository; +use crate::documents::ports::share_keys_repository::ShareKeysRepository; +use domain::identity::keys::KdfParams; + +pub struct DocumentKeysService { + document_keys_repo: Arc, + share_keys_repo: Arc, +} + +#[async_trait] +pub trait DocumentKeysServiceFacade: Send + Sync { + // Document keys + async fn get_document_key( + &self, + document_id: Uuid, + ) -> Result, ServiceError>; + + async fn store_document_key( + &self, + document_id: Uuid, + encrypted_dek: Vec, + nonce: Vec, + key_version: i32, + ) -> Result; + + // Share keys + async fn get_share_key( + &self, + share_id: Uuid, + ) -> Result, ServiceError>; + + async fn get_share_salt(&self, share_id: Uuid) -> Result>, ServiceError>; + + async fn store_share_key( + &self, + share_id: Uuid, + encrypted_dek: Vec, + creator_encrypted_share_key: Option>, + creator_share_key_nonce: Option>, + ) -> Result; + + async fn store_password_protected_share_key( + &self, + share_id: Uuid, + encrypted_dek: Vec, + salt: Vec, + kdf_params: KdfParams, + creator_encrypted_share_key: Option>, + creator_share_key_nonce: Option>, + ) -> Result; + + /// Rotate document DEK + /// Returns the new key version + async fn rotate_document_key( + &self, + document_id: Uuid, + encrypted_dek: Vec, + nonce: Vec, + ) -> Result; +} + +impl DocumentKeysService { + pub fn new( + document_keys_repo: Arc, + share_keys_repo: Arc, + ) -> Self { + Self { + document_keys_repo, + share_keys_repo, + } + } +} + +#[async_trait] +impl DocumentKeysServiceFacade for DocumentKeysService { + async fn get_document_key( + &self, + document_id: Uuid, + ) -> Result, ServiceError> { + let row = self + .document_keys_repo + .get_encrypted_dek(document_id) + .await + .map_err(ServiceError::from)?; + Ok(row.map(|r| DocumentEncryptedKeyDto { + document_id: r.document_id, + encrypted_dek: r.encrypted_dek, + nonce: r.nonce, + key_version: r.key_version, + created_at: r.created_at, + updated_at: r.updated_at, + })) + } + + async fn store_document_key( + &self, + document_id: Uuid, + encrypted_dek: Vec, + nonce: Vec, + key_version: i32, + ) -> Result { + let row = self + .document_keys_repo + .upsert_encrypted_dek(document_id, &encrypted_dek, &nonce, key_version) + .await + .map_err(ServiceError::from)?; + Ok(DocumentEncryptedKeyDto { + document_id: row.document_id, + encrypted_dek: row.encrypted_dek, + nonce: row.nonce, + key_version: row.key_version, + created_at: row.created_at, + updated_at: row.updated_at, + }) + } + + async fn get_share_key( + &self, + share_id: Uuid, + ) -> Result, ServiceError> { + let row = self + .share_keys_repo + .get_encrypted_dek(share_id) + .await + .map_err(ServiceError::from)?; + Ok(row.map(|r| ShareEncryptedKeyDto { + share_id: r.share_id, + encrypted_dek: r.encrypted_dek, + salt: r.salt, + kdf_params: r.kdf_params, + creator_encrypted_share_key: r.creator_encrypted_share_key, + creator_share_key_nonce: r.creator_share_key_nonce, + created_at: r.created_at, + })) + } + + async fn get_share_salt(&self, share_id: Uuid) -> Result>, ServiceError> { + self.share_keys_repo + .get_salt(share_id) + .await + .map_err(ServiceError::from) + } + + async fn store_share_key( + &self, + share_id: Uuid, + encrypted_dek: Vec, + creator_encrypted_share_key: Option>, + creator_share_key_nonce: Option>, + ) -> Result { + let row = self + .share_keys_repo + .store_encrypted_dek( + share_id, + &encrypted_dek, + creator_encrypted_share_key.as_deref(), + creator_share_key_nonce.as_deref(), + ) + .await + .map_err(ServiceError::from)?; + Ok(ShareEncryptedKeyDto { + share_id: row.share_id, + encrypted_dek: row.encrypted_dek, + salt: row.salt, + kdf_params: row.kdf_params, + creator_encrypted_share_key: row.creator_encrypted_share_key, + creator_share_key_nonce: row.creator_share_key_nonce, + created_at: row.created_at, + }) + } + + async fn store_password_protected_share_key( + &self, + share_id: Uuid, + encrypted_dek: Vec, + salt: Vec, + kdf_params: KdfParams, + creator_encrypted_share_key: Option>, + creator_share_key_nonce: Option>, + ) -> Result { + let row = self + .share_keys_repo + .store_password_protected_dek( + share_id, + &encrypted_dek, + &salt, + &kdf_params, + creator_encrypted_share_key.as_deref(), + creator_share_key_nonce.as_deref(), + ) + .await + .map_err(ServiceError::from)?; + Ok(ShareEncryptedKeyDto { + share_id: row.share_id, + encrypted_dek: row.encrypted_dek, + salt: row.salt, + kdf_params: row.kdf_params, + creator_encrypted_share_key: row.creator_encrypted_share_key, + creator_share_key_nonce: row.creator_share_key_nonce, + created_at: row.created_at, + }) + } + + async fn rotate_document_key( + &self, + document_id: Uuid, + encrypted_dek: Vec, + nonce: Vec, + ) -> Result { + // Get current key version + let current_version = self + .document_keys_repo + .get_encrypted_dek(document_id) + .await + .map_err(ServiceError::from)? + .map(|r| r.key_version) + .unwrap_or(0); + + // Increment version + let new_version = current_version + 1; + + // Store new key with incremented version + self.document_keys_repo + .upsert_encrypted_dek(document_id, &encrypted_dek, &nonce, new_version) + .await + .map_err(ServiceError::from)?; + + Ok(new_version) + } +} diff --git a/api/crates/application/src/documents/services/mod.rs b/api/crates/application/src/documents/services/mod.rs index 74d21ea7..4d79a480 100644 --- a/api/crates/application/src/documents/services/mod.rs +++ b/api/crates/application/src/documents/services/mod.rs @@ -4,10 +4,9 @@ use uuid::Uuid; use crate::core::ports::storage::storage_port::StorageResolverPort; use crate::core::services::errors::ServiceError; -use crate::documents::dtos::{DocumentDownload, DocumentDownloadFormat, DocumentListFilter}; +use crate::documents::dtos::DocumentListFilter; use crate::documents::ports::access_repository::AccessRepository; use crate::documents::ports::doc_event_log::DocEventLog; -use crate::documents::ports::document_exporter::DocumentExporter; use crate::documents::ports::document_repository::{DocMeta, DocumentRepository}; use crate::documents::ports::files::files_repository::FilesRepository; use crate::documents::ports::linkgraph_repository::LinkGraphRepository; @@ -19,18 +18,16 @@ use async_trait::async_trait; use domain::access::permissions::PermissionSet; use domain::documents::doc_type::DocumentType; use domain::documents::document::Document as DomainDocument; -use domain::documents::document::{ - BacklinkInfo as DomainBacklink, OutgoingLink as DomainOutgoingLink, SearchHit, -}; +use domain::documents::document::{BacklinkInfo as DomainBacklink, OutgoingLink as DomainOutgoingLink}; mod attachments; mod content; mod crud; mod deletion; -mod downloads; mod events; pub mod files; mod jobs; +pub mod keys; mod lifecycle; pub mod linkgraph; mod links; @@ -50,18 +47,10 @@ pub trait DocumentServiceFacade: Send + Sync { async fn list_for_user( &self, workspace_id: Uuid, - query: Option, tag: Option, state: DocumentListFilter, ) -> Result, ServiceError>; - async fn search_for_user( - &self, - workspace_id: Uuid, - query: Option, - limit: i64, - ) -> Result, ServiceError>; - #[allow(clippy::too_many_arguments)] async fn create_for_user( &self, @@ -124,40 +113,44 @@ pub trait DocumentServiceFacade: Send + Sync { permissions: &PermissionSet, ) -> Result; + /// Get document content as Yjs snapshot bytes. + /// Returns ContentDto with content bytes and optional nonce (for E2EE documents). async fn get_content( &self, actor: &crate::core::services::access::Actor, doc_id: Uuid, - ) -> Result; + ) -> Result; + /// Update document content. + /// - For plaintext mode: pass content bytes (Yjs state), nonce and signature as None + /// - For E2EE mode: pass encrypted content bytes with nonce and optional signature async fn update_content( &self, actor: &crate::core::services::access::Actor, doc_id: Uuid, - content: &str, + content: &[u8], + nonce: Option<&[u8]>, + signature: Option<&[u8]>, ) -> Result; - async fn patch_content( + /// Update document content from markdown string (convenience method for plaintext mode). + async fn update_content_from_markdown( &self, actor: &crate::core::services::access::Actor, doc_id: Uuid, - operations: &[DocumentPatchOperation], + content: &str, ) -> Result; - async fn download_document( + /// Patch document content. + /// - For plaintext mode: pass DocumentPatchOperation with text + /// - For E2EE mode: pass EncryptedUpdate with encrypted data and nonce + async fn patch_content( &self, actor: &crate::core::services::access::Actor, doc_id: Uuid, - format: DocumentDownloadFormat, - ) -> Result; - - async fn download_workspace_root( - &self, - actor: &crate::core::services::access::Actor, - workspace_id: Uuid, - workspace_name: &str, - format: DocumentDownloadFormat, - ) -> Result; + plaintext_operations: Option<&[DocumentPatchOperation]>, + encrypted_updates: Option<&[crate::documents::ports::realtime::realtime_port::EncryptedUpdate]>, + ) -> Result; async fn list_snapshots( &self, @@ -183,12 +176,13 @@ pub trait DocumentServiceFacade: Send + Sync { snapshot_id: Uuid, ) -> Result; - async fn download_snapshot( + /// Get a single snapshot with its encrypted content (E2EE format) + async fn get_snapshot( &self, actor: &crate::core::services::access::Actor, doc_id: Uuid, snapshot_id: Uuid, - ) -> Result; + ) -> Result; async fn backlinks( &self, @@ -203,6 +197,14 @@ pub trait DocumentServiceFacade: Send + Sync { workspace_id: Uuid, doc_id: Uuid, ) -> Result, ServiceError>; + + /// Update encrypted title fields for E2EE documents + async fn update_encrypted_title( + &self, + doc_id: Uuid, + encrypted_title: Vec, + encrypted_title_nonce: Vec, + ) -> Result<(), ServiceError>; } #[async_trait] @@ -210,20 +212,10 @@ impl DocumentServiceFacade for DocumentService { async fn list_for_user( &self, workspace_id: Uuid, - query: Option, tag: Option, state: DocumentListFilter, ) -> Result, ServiceError> { - self.list_for_user(workspace_id, query, tag, state).await - } - - async fn search_for_user( - &self, - workspace_id: Uuid, - query: Option, - limit: i64, - ) -> Result, ServiceError> { - self.search_for_user(workspace_id, query, limit).await + self.list_for_user(workspace_id, tag, state).await } async fn create_for_user( @@ -333,7 +325,7 @@ impl DocumentServiceFacade for DocumentService { &self, actor: &crate::core::services::access::Actor, doc_id: Uuid, - ) -> Result { + ) -> Result { self.get_content(actor, doc_id).await } @@ -341,38 +333,30 @@ impl DocumentServiceFacade for DocumentService { &self, actor: &crate::core::services::access::Actor, doc_id: Uuid, - content: &str, + content: &[u8], + nonce: Option<&[u8]>, + signature: Option<&[u8]>, ) -> Result { - self.update_content(actor, doc_id, content).await + self.update_content(actor, doc_id, content, nonce, signature).await } - async fn patch_content( + async fn update_content_from_markdown( &self, actor: &crate::core::services::access::Actor, doc_id: Uuid, - operations: &[DocumentPatchOperation], + content: &str, ) -> Result { - self.patch_content(actor, doc_id, operations).await + self.update_content_from_markdown(actor, doc_id, content).await } - async fn download_document( + async fn patch_content( &self, actor: &crate::core::services::access::Actor, doc_id: Uuid, - format: DocumentDownloadFormat, - ) -> Result { - self.download_document(actor, doc_id, format).await - } - - async fn download_workspace_root( - &self, - actor: &crate::core::services::access::Actor, - workspace_id: Uuid, - workspace_name: &str, - format: DocumentDownloadFormat, - ) -> Result { - self.download_workspace_root(actor, workspace_id, workspace_name, format) - .await + plaintext_operations: Option<&[DocumentPatchOperation]>, + encrypted_updates: Option<&[crate::documents::ports::realtime::realtime_port::EncryptedUpdate]>, + ) -> Result { + self.patch_content(actor, doc_id, plaintext_operations, encrypted_updates).await } async fn list_snapshots( @@ -406,14 +390,13 @@ impl DocumentServiceFacade for DocumentService { self.restore_snapshot(actor, doc_id, snapshot_id).await } - async fn download_snapshot( + async fn get_snapshot( &self, actor: &crate::core::services::access::Actor, doc_id: Uuid, snapshot_id: Uuid, - ) -> Result - { - self.download_snapshot(actor, doc_id, snapshot_id).await + ) -> Result { + self.get_snapshot(actor, doc_id, snapshot_id).await } async fn backlinks( @@ -433,6 +416,16 @@ impl DocumentServiceFacade for DocumentService { ) -> Result, ServiceError> { self.outgoing_links(actor, workspace_id, doc_id).await } + + async fn update_encrypted_title( + &self, + doc_id: Uuid, + encrypted_title: Vec, + encrypted_title_nonce: Vec, + ) -> Result<(), ServiceError> { + self.update_encrypted_title(doc_id, encrypted_title, encrypted_title_nonce) + .await + } } pub struct DocumentService { @@ -446,7 +439,6 @@ pub struct DocumentService { events: Arc, realtime: Arc, snapshot_service: Arc, - exporter: Arc, } impl DocumentService { @@ -462,7 +454,6 @@ impl DocumentService { events: Arc, realtime: Arc, snapshot_service: Arc, - exporter: Arc, ) -> Self { Self { tx_runner, @@ -475,7 +466,6 @@ impl DocumentService { events, realtime, snapshot_service, - exporter, } } diff --git a/api/crates/application/src/documents/services/publishing/mod.rs b/api/crates/application/src/documents/services/publishing/mod.rs index 6aeae1f2..cd7e74c6 100644 --- a/api/crates/application/src/documents/services/publishing/mod.rs +++ b/api/crates/application/src/documents/services/publishing/mod.rs @@ -2,9 +2,12 @@ use std::sync::Arc; use uuid::Uuid; +use crate::core::ports::storage::storage_port::StorageResolverPort; use crate::core::services::errors::ServiceError; use crate::documents::dtos::PublicDocumentSummaryDto; -use crate::documents::ports::publishing::public_repository::PublicRepository; +use crate::documents::ports::publishing::public_repository::{ + PublicFileRow, PublicRepository, StorePublicFileInput, +}; use crate::documents::ports::realtime::realtime_port::RealtimeEngine; use crate::documents::use_cases::publishing::get_public::GetPublicByWorkspaceAndId; use crate::documents::use_cases::publishing::get_status::{GetPublishStatus, PublishStatusDto}; @@ -19,15 +22,23 @@ use domain::documents::public_policy; pub struct PublicService { repo: Arc, realtime: Arc, + storage: Arc, } #[async_trait] pub trait PublicServiceFacade: Send + Sync { + /// Publish document. + /// For E2EE mode: pass plaintext_title and plaintext_content + /// For non-E2EE mode: pass None for both + /// noindex: if true, adds noindex meta tag to prevent search engine indexing (default: true) async fn publish_document( &self, workspace_id: Uuid, permissions: &PermissionSet, doc_id: Uuid, + plaintext_title: Option<&str>, + plaintext_content: Option<&str>, + noindex: bool, ) -> Result; async fn unpublish_document( @@ -44,6 +55,15 @@ pub trait PublicServiceFacade: Send + Sync { doc_id: Uuid, ) -> Result; + /// Update noindex setting for a published document + async fn update_noindex( + &self, + workspace_id: Uuid, + permissions: &PermissionSet, + doc_id: Uuid, + noindex: bool, + ) -> Result; + async fn list_workspace_public_documents( &self, workspace_slug: &str, @@ -59,7 +79,45 @@ pub trait PublicServiceFacade: Send + Sync { &self, workspace_slug: &str, doc_id: Uuid, - ) -> Result; + ) -> Result<(String, bool), ServiceError>; + + // --- Public file methods --- + + /// Store a decrypted file for public access + async fn store_public_file( + &self, + workspace_id: Uuid, + permissions: &PermissionSet, + doc_id: Uuid, + file_id: Uuid, + original_filename: &str, + logical_filename: &str, + mime_type: &str, + bytes: &[u8], + ) -> Result<(), ServiceError>; + + /// Get list of public files for a document + async fn get_public_files( + &self, + workspace_slug: &str, + doc_id: Uuid, + ) -> Result, ServiceError>; + + /// Read public file bytes by file_id + async fn read_public_file( + &self, + workspace_slug: &str, + doc_id: Uuid, + file_id: Uuid, + ) -> Result<(Vec, PublicFileRow), ServiceError>; + + /// Read public file bytes by logical filename + async fn read_public_file_by_logical_filename( + &self, + workspace_slug: &str, + doc_id: Uuid, + logical_filename: &str, + ) -> Result<(Vec, PublicFileRow), ServiceError>; } #[async_trait] @@ -69,8 +127,11 @@ impl PublicServiceFacade for PublicService { workspace_id: Uuid, permissions: &PermissionSet, doc_id: Uuid, + plaintext_title: Option<&str>, + plaintext_content: Option<&str>, + noindex: bool, ) -> Result { - self.publish_document(workspace_id, permissions, doc_id) + self.publish_document(workspace_id, permissions, doc_id, plaintext_title, plaintext_content, noindex) .await } @@ -94,6 +155,17 @@ impl PublicServiceFacade for PublicService { .await } + async fn update_noindex( + &self, + workspace_id: Uuid, + permissions: &PermissionSet, + doc_id: Uuid, + noindex: bool, + ) -> Result { + self.update_noindex(workspace_id, permissions, doc_id, noindex) + .await + } + async fn list_workspace_public_documents( &self, workspace_slug: &str, @@ -114,32 +186,116 @@ impl PublicServiceFacade for PublicService { &self, workspace_slug: &str, doc_id: Uuid, - ) -> Result { + ) -> Result<(String, bool), ServiceError> { self.get_public_content_by_workspace_and_id(workspace_slug, doc_id) .await } + + async fn store_public_file( + &self, + workspace_id: Uuid, + permissions: &PermissionSet, + doc_id: Uuid, + file_id: Uuid, + original_filename: &str, + logical_filename: &str, + mime_type: &str, + bytes: &[u8], + ) -> Result<(), ServiceError> { + self.store_public_file( + workspace_id, + permissions, + doc_id, + file_id, + original_filename, + logical_filename, + mime_type, + bytes, + ) + .await + } + + async fn get_public_files( + &self, + workspace_slug: &str, + doc_id: Uuid, + ) -> Result, ServiceError> { + self.get_public_files(workspace_slug, doc_id).await + } + + async fn read_public_file( + &self, + workspace_slug: &str, + doc_id: Uuid, + file_id: Uuid, + ) -> Result<(Vec, PublicFileRow), ServiceError> { + self.read_public_file(workspace_slug, doc_id, file_id).await + } + + async fn read_public_file_by_logical_filename( + &self, + workspace_slug: &str, + doc_id: Uuid, + logical_filename: &str, + ) -> Result<(Vec, PublicFileRow), ServiceError> { + self.read_public_file_by_logical_filename(workspace_slug, doc_id, logical_filename) + .await + } } impl PublicService { - pub fn new(repo: Arc, realtime: Arc) -> Self { - Self { repo, realtime } + pub fn new( + repo: Arc, + realtime: Arc, + storage: Arc, + ) -> Self { + Self { + repo, + realtime, + storage, + } } + /// Publish document. + /// For E2EE mode: pass plaintext_title and plaintext_content + /// For non-E2EE mode: pass None for both + /// noindex: if true, adds noindex meta tag to prevent search engine indexing (default: true) pub async fn publish_document( &self, workspace_id: Uuid, permissions: &PermissionSet, doc_id: Uuid, + plaintext_title: Option<&str>, + plaintext_content: Option<&str>, + noindex: bool, ) -> Result { public_policy::ensure_public_publish_allowed(permissions) .map_err(|_| ServiceError::Forbidden)?; + let uc = PublishDocument { repo: self.repo.as_ref(), }; - uc.execute(workspace_id, doc_id) + let publish_result = uc + .execute(workspace_id, doc_id, noindex) .await .map_err(ServiceError::from)? - .ok_or(ServiceError::NotFound) + .ok_or(ServiceError::NotFound)?; + + // For E2EE mode: store plaintext content for public access + if let (Some(title), Some(content)) = (plaintext_title, plaintext_content) { + use sha2::{Digest, Sha256}; + let mut hasher = Sha256::new(); + hasher.update(title); + hasher.update(content); + let content_hash = hex::encode(hasher.finalize()); + + self.repo + .store_public_content(doc_id, title, content, &content_hash) + .await + .map_err(ServiceError::from)?; + } + + Ok(publish_result) } pub async fn unpublish_document( @@ -150,6 +306,25 @@ impl PublicService { ) -> Result { public_policy::ensure_public_unpublish_allowed(permissions) .map_err(|_| ServiceError::Forbidden)?; + + // Delete stored public content (E2EE mode) + self.repo + .delete_public_content(doc_id) + .await + .map_err(ServiceError::from)?; + + // Delete public files from storage + self.storage + .delete_public_files_for_document(workspace_id, doc_id) + .await + .map_err(ServiceError::from)?; + + // Delete public file metadata from database + self.repo + .delete_public_files(doc_id) + .await + .map_err(ServiceError::from)?; + let uc = UnpublishDocument { repo: self.repo.as_ref(), }; @@ -177,9 +352,36 @@ impl PublicService { Ok(PublishResponseDto { slug: status.slug, public_url: status.public_url, + noindex: status.noindex, }) } + pub async fn update_noindex( + &self, + workspace_id: Uuid, + permissions: &PermissionSet, + doc_id: Uuid, + noindex: bool, + ) -> Result { + public_policy::ensure_public_publish_allowed(permissions) + .map_err(|_| ServiceError::Forbidden)?; + + // Verify document belongs to workspace + let is_workspace_doc = self + .repo + .is_workspace_document(doc_id, workspace_id) + .await + .map_err(ServiceError::from)?; + if !is_workspace_doc { + return Err(ServiceError::NotFound); + } + + self.repo + .update_noindex(doc_id, noindex) + .await + .map_err(ServiceError::from) + } + pub async fn list_workspace_public_documents( &self, workspace_slug: &str, @@ -208,7 +410,98 @@ impl PublicService { &self, workspace_slug: &str, doc_id: Uuid, - ) -> Result { + ) -> Result<(String, bool), ServiceError> { + // Get noindex setting (also verifies document is published) + let noindex = self + .repo + .get_noindex_by_workspace_and_id(workspace_slug, doc_id) + .await + .map_err(ServiceError::from)? + .ok_or(ServiceError::NotFound)?; + + // Prefer stored plaintext content (E2EE mode) over realtime + if let Some(stored) = self + .repo + .get_public_content(doc_id) + .await + .map_err(ServiceError::from)? + { + return Ok((stored.content, noindex)); + } + + // Fall back to realtime content for non-E2EE documents + let content = self + .realtime + .get_content(&doc_id.to_string()) + .await + .map_err(ServiceError::from)? + .unwrap_or_default(); + Ok((content, noindex)) + } + + // --- Public file methods --- + + pub async fn store_public_file( + &self, + workspace_id: Uuid, + permissions: &PermissionSet, + doc_id: Uuid, + file_id: Uuid, + original_filename: &str, + logical_filename: &str, + mime_type: &str, + bytes: &[u8], + ) -> Result<(), ServiceError> { + // Verify permission to publish + public_policy::ensure_public_publish_allowed(permissions) + .map_err(|_| ServiceError::Forbidden)?; + + // Verify document belongs to workspace + let is_workspace_doc = self + .repo + .is_workspace_document(doc_id, workspace_id) + .await + .map_err(ServiceError::from)?; + if !is_workspace_doc { + return Err(ServiceError::NotFound); + } + + // Store the file in storage + let storage_path = self + .storage + .store_public_file(workspace_id, doc_id, file_id, bytes) + .await + .map_err(ServiceError::from)?; + + // Calculate content hash + use sha2::{Digest, Sha256}; + let content_hash = hex::encode(Sha256::digest(bytes)); + + // Store metadata in database + self.repo + .store_public_file(StorePublicFileInput { + document_id: doc_id, + workspace_id, + file_id, + original_filename: original_filename.to_string(), + logical_filename: logical_filename.to_string(), + mime_type: mime_type.to_string(), + size: bytes.len() as i64, + storage_path, + content_hash, + }) + .await + .map_err(ServiceError::from)?; + + Ok(()) + } + + pub async fn get_public_files( + &self, + workspace_slug: &str, + doc_id: Uuid, + ) -> Result, ServiceError> { + // Verify document is published let exists = self .repo .public_exists_by_workspace_and_id(workspace_slug, doc_id) @@ -217,12 +510,78 @@ impl PublicService { if !exists { return Err(ServiceError::NotFound); } - let content = self - .realtime - .get_content(&doc_id.to_string()) + + self.repo + .get_public_files(doc_id) + .await + .map_err(ServiceError::from) + } + + pub async fn read_public_file( + &self, + workspace_slug: &str, + doc_id: Uuid, + file_id: Uuid, + ) -> Result<(Vec, PublicFileRow), ServiceError> { + // Verify document is published + let exists = self + .repo + .public_exists_by_workspace_and_id(workspace_slug, doc_id) + .await + .map_err(ServiceError::from)?; + if !exists { + return Err(ServiceError::NotFound); + } + + // Get file metadata + let file = self + .repo + .get_public_file(doc_id, file_id) .await .map_err(ServiceError::from)? - .unwrap_or_default(); - Ok(content) + .ok_or(ServiceError::NotFound)?; + + // Read file bytes + let bytes = self + .storage + .read_public_file(file.workspace_id, doc_id, file_id) + .await + .map_err(ServiceError::from)?; + + Ok((bytes, file)) + } + + pub async fn read_public_file_by_logical_filename( + &self, + workspace_slug: &str, + doc_id: Uuid, + logical_filename: &str, + ) -> Result<(Vec, PublicFileRow), ServiceError> { + // Verify document is published + let exists = self + .repo + .public_exists_by_workspace_and_id(workspace_slug, doc_id) + .await + .map_err(ServiceError::from)?; + if !exists { + return Err(ServiceError::NotFound); + } + + // Get file metadata by logical filename + let file = self + .repo + .get_public_file_by_logical_filename(doc_id, logical_filename) + .await + .map_err(ServiceError::from)? + .ok_or(ServiceError::NotFound)?; + + // Read file bytes + let bytes = self + .storage + .read_public_file(file.workspace_id, doc_id, file.file_id) + .await + .map_err(ServiceError::from)?; + + Ok((bytes, file)) } } diff --git a/api/crates/application/src/documents/services/realtime/doc_hydration.rs b/api/crates/application/src/documents/services/realtime/doc_hydration.rs index 51b459be..deedbc6c 100644 --- a/api/crates/application/src/documents/services/realtime/doc_hydration.rs +++ b/api/crates/application/src/documents/services/realtime/doc_hydration.rs @@ -71,9 +71,22 @@ impl DocHydrationService { for update in updates { if update.seq > last_seq { - apply_update_bytes(&doc, &update.update)?; - last_seq = update.seq; - applied_any = true; + match apply_update_bytes(&doc, &update.update) { + Ok(()) => { + last_seq = update.seq; + applied_any = true; + } + Err(e) => { + tracing::warn!( + document_id = %doc_id, + seq = update.seq, + error = ?e, + "hydration_skipping_corrupted_update" + ); + // Skip corrupted update but continue with the rest + last_seq = update.seq; + } + } } } @@ -83,13 +96,32 @@ impl DocHydrationService { .read_update_backlog(&doc_id_str, options.update_start_id) .await?; for entry in backlog { - let updates = extract_updates(&entry.payload)?; - for update in updates { - let mut txn = doc.transact_mut(); - txn.apply_update(update)?; + match extract_updates(&entry.payload) { + Ok(updates) => { + for update in updates { + let mut txn = doc.transact_mut(); + if let Err(e) = txn.apply_update(update) { + tracing::warn!( + document_id = %doc_id, + entry_id = %entry.id, + error = ?e, + "hydration_skipping_corrupted_backlog_update" + ); + } + } + last_update_stream_id = Some(entry.id); + applied_any = true; + } + Err(e) => { + tracing::warn!( + document_id = %doc_id, + entry_id = %entry.id, + error = ?e, + "hydration_skipping_corrupted_backlog_entry" + ); + last_update_stream_id = Some(entry.id); + } } - last_update_stream_id = Some(entry.id); - applied_any = true; } let awareness_entries = self diff --git a/api/crates/application/src/documents/services/realtime/snapshot.rs b/api/crates/application/src/documents/services/realtime/snapshot.rs index b0dd7b18..d9673af6 100644 --- a/api/crates/application/src/documents/services/realtime/snapshot.rs +++ b/api/crates/application/src/documents/services/realtime/snapshot.rs @@ -9,16 +9,16 @@ use yrs::{Doc, GetString, ReadTxn, StateVector, Text, Transact, Update}; use crate::core::ports::storage::storage_projection_queue::{ StorageProjectionJobKind, StorageProjectionQueue, }; -use crate::core::services::tagging; use crate::core::services::utils::hash::sha256_hex; use crate::documents::ports::document_snapshot_archive_repository::{ - DocumentSnapshotArchiveRepository, SnapshotArchiveInsert, SnapshotArchiveRecord, + DocumentSnapshotArchiveRepository, SnapshotArchiveEntry, SnapshotArchiveInsert, + SnapshotArchiveRecord, }; use crate::documents::ports::linkgraph_repository::LinkGraphRepository; use crate::documents::ports::realtime::realtime_hydration_port::DocStateReader; -use crate::documents::ports::realtime::realtime_persistence_port::DocPersistencePort; -use crate::documents::ports::realtime::realtime_persistence_port::SnapshotEntry; -use crate::documents::ports::tagging::tagging_repository::TaggingRepository; +use crate::documents::ports::realtime::realtime_persistence_port::{ + DocPersistencePort, EncryptedUpdateEntry, SnapshotEntry, +}; use crate::documents::services::linkgraph; use domain::documents::doc_type::DocumentType; @@ -26,7 +26,6 @@ pub struct SnapshotService { state_reader: Arc, persistence: Arc, linkgraph_repo: Arc, - tagging_repo: Arc, archive_repo: Arc, storage_jobs: Arc, } @@ -101,12 +100,10 @@ pub struct SnapshotArchiveOptions<'a> { } impl SnapshotService { - #[allow(clippy::too_many_arguments)] pub fn new( state_reader: Arc, persistence: Arc, linkgraph_repo: Arc, - tagging_repo: Arc, archive_repo: Arc, storage_jobs: Arc, ) -> Self { @@ -114,7 +111,6 @@ impl SnapshotService { state_reader, persistence, linkgraph_repo, - tagging_repo, archive_repo, storage_jobs, } @@ -129,7 +125,7 @@ impl SnapshotService { let snapshot_bin = encode_doc_snapshot(doc); let (current_version, previous_snapshot) = if options.skip_if_unchanged { match self.persistence.latest_snapshot_entry(doc_id).await? { - Some(SnapshotEntry { version, bytes }) => (version, Some(bytes)), + Some(SnapshotEntry { version, bytes, .. }) => (version, Some(bytes)), None => (0, None), } } else { @@ -165,7 +161,7 @@ impl SnapshotService { } let next_version = current_version + 1; self.persistence - .persist_snapshot(doc_id, next_version, &snapshot_bin) + .persist_snapshot(doc_id, next_version, &snapshot_bin, None) .await?; if options.clear_updates { self.persistence.clear_updates(doc_id).await?; @@ -218,13 +214,8 @@ impl SnapshotService { &contents, ) .await; - let _ = tagging::update_document_tags( - self.tagging_repo.as_ref(), - *doc_id, - owner_id, - &contents, - ) - .await; + // Note: Automatic tag extraction removed (Phase 14 E2EE) + // Tags are now extracted and encrypted on the client side } Ok(MarkdownPersistResult { written: true }) } @@ -239,14 +230,21 @@ impl SnapshotService { if record.doc_type == DocumentType::Folder { return Ok(None); } - let doc = self.hydrate_doc_from_state(doc_id).await?; - let contents = extract_markdown(&doc); - let bytes = render_markdown_bytes(doc_id, &record.title, &contents); + + // E2EE: Export encrypted snapshot + updates directly (no Yjs hydration) + let snapshot = self.persistence.latest_snapshot_entry(doc_id).await?; + let since_seq = snapshot + .as_ref() + .and_then(|s| s.seq_at_snapshot) + .unwrap_or(0); + let updates = self.persistence.get_updates_since(doc_id, since_seq).await?; + + let bytes = serialize_encrypted_backup(&snapshot, &updates); let content_hash = sha256_hex(&bytes); - let repo_path = repo_path_from_record(&record); + Ok(Some(MarkdownExport { bytes, - repo_path, + repo_path: None, // E2EE: No repo path (doc_id based path) owner_id: record.owner_id, workspace_id: record.workspace_id, content_hash, @@ -327,6 +325,17 @@ impl SnapshotService { } Ok(None) } + + /// Get a snapshot entry (record + bytes) by ID + pub async fn get_snapshot_entry( + &self, + snapshot_id: Uuid, + ) -> anyhow::Result> { + self.archive_repo + .get_by_id(snapshot_id) + .await + .map_err(Into::into) + } } fn extract_markdown(doc: &Doc) -> String { @@ -335,14 +344,6 @@ fn extract_markdown(doc: &Doc) -> String { txt.get_string(&txn) } -fn render_markdown_bytes(doc_id: &Uuid, title: &str, contents: &str) -> Vec { - let mut formatted = format!("---\nid: {}\ntitle: {}\n---\n\n{}", doc_id, title, contents); - if !formatted.ends_with('\n') { - formatted.push('\n'); - } - formatted.into_bytes() -} - fn apply_update_bytes(doc: &Doc, bytes: &[u8]) -> anyhow::Result<()> { let update = Update::decode_v1(bytes)?; let mut txn = doc.transact_mut(); @@ -376,52 +377,69 @@ pub fn doc_from_snapshot_bytes(bytes: &[u8]) -> anyhow::Result { Ok(doc) } -impl SnapshotService { - async fn hydrate_doc_from_state(&self, doc_id: &Uuid) -> anyhow::Result { - let doc = Doc::new(); - let mut last_seq = 0i64; - if let Some(snapshot) = self.state_reader.latest_snapshot(doc_id).await? { - apply_update_bytes(&doc, &snapshot.snapshot)?; - last_seq = snapshot.version; - } - let updates = self.state_reader.updates_since(doc_id, last_seq).await?; - for update in updates { - apply_update_bytes(&doc, &update.update)?; - } - Ok(doc) +/// Serialize encrypted snapshot and updates to a binary backup format. +/// Format: +/// - Magic: "RMBK" (4 bytes) +/// - Version: 1 (1 byte) +/// - Has snapshot: 1 byte (0 or 1) +/// - If has snapshot: +/// - snapshot_version: i64 +/// - snapshot_bytes_len: u32, snapshot_bytes +/// - nonce_len: u32, nonce_bytes (0 if None) +/// - signature_len: u32, signature_bytes (0 if None) +/// - seq_at_snapshot: i64 (-1 if None) +/// - update_count: u32 +/// - For each update: +/// - seq: i64 +/// - data_len: u32, data_bytes +/// - nonce_len: u32, nonce_bytes (0 if None) +/// - signature_len: u32, signature_bytes (0 if None) +/// - public_key_len: u32, public_key_bytes (0 if None) +fn serialize_encrypted_backup( + snapshot: &Option, + updates: &[EncryptedUpdateEntry], +) -> Vec { + let mut buf = Vec::new(); + + // Magic and version + buf.extend_from_slice(b"RMBK"); + buf.push(1u8); + + // Snapshot + if let Some(snap) = snapshot { + buf.push(1u8); // has snapshot + buf.extend_from_slice(&snap.version.to_le_bytes()); + buf.extend_from_slice(&(snap.bytes.len() as u32).to_le_bytes()); + buf.extend_from_slice(&snap.bytes); + write_optional_bytes(&mut buf, snap.nonce.as_deref()); + write_optional_bytes(&mut buf, snap.signature.as_deref()); + buf.extend_from_slice(&snap.seq_at_snapshot.unwrap_or(-1).to_le_bytes()); + } else { + buf.push(0u8); // no snapshot } -} -fn repo_path_from_record( - record: &crate::documents::ports::realtime::realtime_hydration_port::DocumentRecord, -) -> Option { - if let Some(path) = record.desired_path.as_deref() { - return Some(normalize_repo_path(path)); - } - if let Some(path) = record.path.as_deref() { - return strip_workspace_prefix(record.workspace_id, path); + // Updates + buf.extend_from_slice(&(updates.len() as u32).to_le_bytes()); + for update in updates { + buf.extend_from_slice(&update.seq.to_le_bytes()); + buf.extend_from_slice(&(update.data.len() as u32).to_le_bytes()); + buf.extend_from_slice(&update.data); + write_optional_bytes(&mut buf, update.nonce.as_deref()); + write_optional_bytes(&mut buf, update.signature.as_deref()); + write_optional_bytes(&mut buf, update.public_key.as_deref()); } - None -} -fn strip_workspace_prefix(workspace_id: Uuid, relative: &str) -> Option { - let trimmed = relative.trim_start_matches('/'); - let mut parts = trimmed.splitn(2, '/'); - let owner = parts.next()?; - if owner != workspace_id.to_string() { - return None; - } - parts - .next() - .map(|rest| rest.trim_start_matches('/').to_string()) - .filter(|s| !s.is_empty()) + buf } -fn normalize_repo_path(path: &str) -> String { - let trimmed = path.trim_start_matches('/'); - if trimmed.is_empty() { - "".into() - } else { - trimmed.replace('\\', "/") +fn write_optional_bytes(buf: &mut Vec, data: Option<&[u8]>) { + match data { + Some(bytes) => { + buf.extend_from_slice(&(bytes.len() as u32).to_le_bytes()); + buf.extend_from_slice(bytes); + } + None => { + buf.extend_from_slice(&0u32.to_le_bytes()); + } } } diff --git a/api/crates/application/src/documents/services/sharing/crud.rs b/api/crates/application/src/documents/services/sharing/crud.rs index 0c9ae731..fbd62f55 100644 --- a/api/crates/application/src/documents/services/sharing/crud.rs +++ b/api/crates/application/src/documents/services/sharing/crud.rs @@ -36,6 +36,7 @@ impl ShareService { uc.execute(workspace_id, actor_id, document_id, permission, expires_at) .await .map(|res| CreatedShareDto { + share_id: res.share_id, token: res.token, document_id: res.document_id, document_type: res.document_type.as_str().to_string(), diff --git a/api/crates/application/src/documents/services/sharing/mod.rs b/api/crates/application/src/documents/services/sharing/mod.rs index c0566e06..11b06b2f 100644 --- a/api/crates/application/src/documents/services/sharing/mod.rs +++ b/api/crates/application/src/documents/services/sharing/mod.rs @@ -7,7 +7,7 @@ use crate::documents::dtos::{ ActiveShareItemDto, ApplicableShareDto, CreatedShareDto, ShareBrowseResponseDto, ShareDocumentDto, ShareItemDto, ShareMountDto, }; -use crate::documents::ports::sharing::shares_repository::SharesRepository; +use crate::documents::ports::sharing::shares_repository::{ChildShareInfo, SharesRepository}; use async_trait::async_trait; use domain::access::permissions::PermissionSet; use domain::documents::share; @@ -113,6 +113,12 @@ pub trait ShareServiceFacade: Send + Sync { permissions: &PermissionSet, token: &str, ) -> Result; + + /// Get child share info (token, share_id, encrypted_dek) for documents in a folder share + async fn list_child_share_info( + &self, + parent_share_id: Uuid, + ) -> Result, ServiceError>; } #[async_trait] @@ -239,6 +245,16 @@ impl ShareServiceFacade for ShareService { self.materialize_folder_share(workspace_id, actor_id, permissions, token) .await } + + async fn list_child_share_info( + &self, + parent_share_id: Uuid, + ) -> Result, ServiceError> { + self.repo + .list_child_share_info(parent_share_id) + .await + .map_err(|e| ServiceError::Unexpected(e.into())) + } } impl ShareService { diff --git a/api/crates/application/src/documents/services/snapshots.rs b/api/crates/application/src/documents/services/snapshots.rs index 72364519..d3429c4e 100644 --- a/api/crates/application/src/documents/services/snapshots.rs +++ b/api/crates/application/src/documents/services/snapshots.rs @@ -2,11 +2,12 @@ use uuid::Uuid; use crate::core::services::access::{self, Actor}; use crate::core::services::errors::ServiceError; -use crate::documents::dtos::{SnapshotDiffBaseMode, SnapshotDiffDto, SnapshotSummaryDto}; +use crate::documents::dtos::{ + SnapshotDetailDto, SnapshotDiffBaseMode, SnapshotDiffDto, SnapshotSummaryDto, +}; use crate::documents::use_cases::list_snapshots::ListSnapshots; use crate::documents::use_cases::restore_snapshot::RestoreSnapshot; use crate::documents::use_cases::snapshot_diff::SnapshotDiff; -use crate::documents::use_cases::snapshot_download::{DownloadSnapshot, SnapshotDownload}; use super::DocumentService; use super::snapshot_dto::snapshot_diff_dto_from_result; @@ -110,12 +111,13 @@ impl DocumentService { Ok(SnapshotSummaryDto::from(record)) } - pub async fn download_snapshot( + /// Get a single snapshot with its encrypted content (E2EE format) + pub async fn get_snapshot( &self, actor: &Actor, doc_id: Uuid, snapshot_id: Uuid, - ) -> Result { + ) -> Result { access::require_view( self.access_repo.as_ref(), self.share_access.as_ref(), @@ -128,14 +130,23 @@ impl DocumentService { other => other, })?; - let uc = DownloadSnapshot { - files: self.files_repo.as_ref(), - storage: self.storage.as_ref(), - snapshots: self.snapshot_service.as_ref(), - }; - uc.execute(doc_id, snapshot_id) + let entry = self + .snapshot_service + .get_snapshot_entry(snapshot_id) .await .map_err(ServiceError::from)? - .ok_or(ServiceError::NotFound) + .ok_or(ServiceError::NotFound)?; + + // Verify the snapshot belongs to the requested document + if entry.record.document_id != doc_id { + return Err(ServiceError::NotFound); + } + + Ok(SnapshotDetailDto { + id: entry.record.id, + content: entry.bytes, + nonce: entry.record.nonce, + created_at: entry.record.created_at, + }) } } diff --git a/api/crates/application/src/documents/services/tagging/mod.rs b/api/crates/application/src/documents/services/tagging/mod.rs index 9ac28a88..9ff75616 100644 --- a/api/crates/application/src/documents/services/tagging/mod.rs +++ b/api/crates/application/src/documents/services/tagging/mod.rs @@ -3,50 +3,189 @@ use std::sync::Arc; use uuid::Uuid; use crate::core::services::errors::ServiceError; -use crate::documents::dtos::TagItemDto; -use crate::documents::ports::tagging::tag_repository::TagRepository; -use crate::documents::use_cases::tagging::list_tags::ListTags; +use crate::documents::dtos::{EncryptedTagEntryDto, EncryptedTagItemDto}; +use crate::documents::ports::tagging::encrypted_tag_repository::EncryptedTagRepository; use async_trait::async_trait; pub struct TagService { - repo: Arc, + encrypted_tag_repo: Arc, } #[async_trait] pub trait TagServiceFacade: Send + Sync { - async fn list( + /// List all encrypted tags in a workspace + async fn list_encrypted_tags( &self, workspace_id: Uuid, - filter: Option, - ) -> Result, ServiceError>; + ) -> Result, ServiceError>; + + /// List encrypted tags for a specific document + async fn list_document_encrypted_tags( + &self, + document_id: Uuid, + ) -> Result, ServiceError>; + + /// Replace all encrypted tags for a document + async fn replace_document_encrypted_tags( + &self, + workspace_id: Uuid, + document_id: Uuid, + encrypted_tags: Vec>, + ) -> Result, ServiceError>; + + /// Find documents by encrypted tag + async fn find_documents_by_encrypted_tag( + &self, + workspace_id: Uuid, + encrypted_tag: Vec, + ) -> Result, ServiceError>; + + /// Find a specific encrypted tag (for filtering) + async fn find_encrypted_tag( + &self, + workspace_id: Uuid, + encrypted_tag: Vec, + ) -> Result, ServiceError>; } #[async_trait] impl TagServiceFacade for TagService { - async fn list( + async fn list_encrypted_tags( + &self, + workspace_id: Uuid, + ) -> Result, ServiceError> { + self.list_encrypted_tags(workspace_id).await + } + + async fn list_document_encrypted_tags( + &self, + document_id: Uuid, + ) -> Result, ServiceError> { + self.list_document_encrypted_tags(document_id).await + } + + async fn replace_document_encrypted_tags( &self, workspace_id: Uuid, - filter: Option, - ) -> Result, ServiceError> { - self.list(workspace_id, filter).await + document_id: Uuid, + encrypted_tags: Vec>, + ) -> Result, ServiceError> { + self.replace_document_encrypted_tags(workspace_id, document_id, encrypted_tags) + .await + } + + async fn find_documents_by_encrypted_tag( + &self, + workspace_id: Uuid, + encrypted_tag: Vec, + ) -> Result, ServiceError> { + self.find_documents_by_encrypted_tag(workspace_id, encrypted_tag) + .await + } + + async fn find_encrypted_tag( + &self, + workspace_id: Uuid, + encrypted_tag: Vec, + ) -> Result, ServiceError> { + self.find_encrypted_tag(workspace_id, encrypted_tag).await } } impl TagService { - pub fn new(repo: Arc) -> Self { - Self { repo } + pub fn new(encrypted_tag_repo: Arc) -> Self { + Self { encrypted_tag_repo } + } + + pub async fn list_encrypted_tags( + &self, + workspace_id: Uuid, + ) -> Result, ServiceError> { + let summaries = self + .encrypted_tag_repo + .list_encrypted_tags(workspace_id) + .await + .map_err(ServiceError::from)?; + + Ok(summaries + .into_iter() + .map(|s| EncryptedTagItemDto { + encrypted_tag: s.encrypted_tag, + count: s.count, + }) + .collect()) + } + + pub async fn list_document_encrypted_tags( + &self, + document_id: Uuid, + ) -> Result, ServiceError> { + let entries = self + .encrypted_tag_repo + .list_document_encrypted_tags(document_id) + .await + .map_err(ServiceError::from)?; + + Ok(entries + .into_iter() + .map(|e| EncryptedTagEntryDto { + id: e.id, + encrypted_tag: e.encrypted_tag, + created_at: e.created_at, + }) + .collect()) } - pub async fn list( + pub async fn replace_document_encrypted_tags( &self, workspace_id: Uuid, - filter: Option, - ) -> Result, ServiceError> { - let uc = ListTags { - repo: self.repo.as_ref(), - }; - uc.execute(workspace_id, filter) + document_id: Uuid, + encrypted_tags: Vec>, + ) -> Result, ServiceError> { + let entries = self + .encrypted_tag_repo + .replace_document_encrypted_tags(workspace_id, document_id, &encrypted_tags) + .await + .map_err(ServiceError::from)?; + + Ok(entries + .into_iter() + .map(|e| EncryptedTagEntryDto { + id: e.id, + encrypted_tag: e.encrypted_tag, + created_at: e.created_at, + }) + .collect()) + } + + pub async fn find_documents_by_encrypted_tag( + &self, + workspace_id: Uuid, + encrypted_tag: Vec, + ) -> Result, ServiceError> { + self.encrypted_tag_repo + .find_documents_by_encrypted_tag(workspace_id, &encrypted_tag) .await .map_err(ServiceError::from) } + + pub async fn find_encrypted_tag( + &self, + workspace_id: Uuid, + encrypted_tag: Vec, + ) -> Result, ServiceError> { + let result = self + .encrypted_tag_repo + .find_encrypted_tag(workspace_id, &encrypted_tag) + .await + .map_err(ServiceError::from)?; + + Ok(result + .into_iter() + .map(|s| EncryptedTagItemDto { + encrypted_tag: s.encrypted_tag, + count: s.count, + }) + .collect()) + } } diff --git a/api/crates/application/src/documents/use_cases/download_document.rs b/api/crates/application/src/documents/use_cases/download_document.rs deleted file mode 100644 index 3f8d75e7..00000000 --- a/api/crates/application/src/documents/use_cases/download_document.rs +++ /dev/null @@ -1,346 +0,0 @@ -use std::collections::HashMap; -use std::io::{Cursor, Write}; -use std::path::{Component, Path}; - -use chrono::Utc; -use uuid::Uuid; - -use crate::core::ports::storage::storage_port::StorageResolverPort; -use crate::core::services::access::{self, Actor, Capability}; -use crate::documents::dtos::{DocumentDownload, DocumentDownloadFormat}; -use crate::documents::ports::access_repository::AccessRepository; -use crate::documents::ports::document_exporter::{ - DocumentExportAssets, DocumentExportAttachment, DocumentExporter, -}; -use crate::documents::ports::document_repository::DocumentRepository; -use crate::documents::ports::files::files_repository::FilesRepository; -use crate::documents::ports::sharing::share_access_port::ShareAccessPort; -use crate::documents::services::realtime::snapshot::SnapshotService; -use domain::documents::doc_type::DocumentType; -use domain::documents::document::Document as DomainDocument; -use thiserror::Error; -use zip::write::FileOptions; -use zip::{CompressionMethod, ZipWriter}; - -pub struct DownloadDocument<'a, D, F, S, A, SH> -where - D: DocumentRepository + ?Sized, - F: FilesRepository + ?Sized, - S: StorageResolverPort + ?Sized, - A: AccessRepository + ?Sized, - SH: ShareAccessPort + ?Sized, -{ - pub documents: &'a D, - pub files: &'a F, - pub storage: &'a S, - pub access: &'a A, - pub shares: &'a SH, - pub snapshot: &'a SnapshotService, - pub exporter: &'a dyn DocumentExporter, -} - -impl<'a, D, F, S, A, SH> DownloadDocument<'a, D, F, S, A, SH> -where - D: DocumentRepository + ?Sized, - F: FilesRepository + ?Sized, - S: StorageResolverPort + ?Sized, - A: AccessRepository + ?Sized, - SH: ShareAccessPort + ?Sized, -{ - #[allow(clippy::too_many_lines)] - pub async fn execute( - &self, - actor: &Actor, - doc_id: Uuid, - format: DocumentDownloadFormat, - ) -> anyhow::Result> { - let capability = access::resolve_document(self.access, self.shares, actor, doc_id).await?; - if capability < Capability::View { - return Ok(None); - } - - let document = match self.documents.get_by_id(doc_id).await? { - Some(doc) => doc, - None => return Ok(None), - }; - - if document.doc_type() == DocumentType::Folder { - return self.download_folder(actor, &document, format).await; - } - - let export_assets = match self.prepare_document_assets(&document).await? { - Some(assets) => assets, - None => return Ok(None), - }; - - let download = self.exporter.export(export_assets, format).await?; - Ok(Some(download)) - } - - async fn prepare_document_assets( - &self, - document: &DomainDocument, - ) -> anyhow::Result> { - if document.doc_type() == DocumentType::Folder { - return Ok(None); - } - let doc_id = document.id(); - let export = match self.snapshot.export_current_markdown(&doc_id).await? { - Some(export) => export, - None => return Ok(None), - }; - let doc_dir = self.storage.build_doc_dir(doc_id).await?; - let attachments = self.collect_attachments(doc_id, &doc_dir).await?; - let safe_title = sanitize_filename(document.title().as_str()); - let display_title = document.title().as_str().trim(); - let display_title = if display_title.is_empty() { - None - } else { - Some(display_title.to_string()) - }; - Ok(Some(DocumentExportAssets { - safe_title, - display_title, - markdown: export.bytes, - attachments, - })) - } - - async fn collect_attachments( - &self, - doc_id: Uuid, - doc_dir: &Path, - ) -> anyhow::Result> { - let stored_attachments = self.files.list_storage_paths_for_document(doc_id).await?; - let mut attachments: Vec = Vec::new(); - for stored_path in stored_attachments { - let full_path = self.storage.absolute_from_relative(&stored_path); - if !full_path.starts_with(doc_dir) { - continue; - } - let relative = match full_path.strip_prefix(doc_dir) { - Ok(rel) => rel, - Err(_) => continue, - }; - if relative.as_os_str().is_empty() { - continue; - } - if relative - .components() - .any(|c| matches!(c, Component::ParentDir | Component::RootDir)) - { - continue; - } - let rel_str = relative.to_string_lossy().replace('\\', "/"); - let data = self.storage.read_bytes(full_path.as_path()).await?; - attachments.push(DocumentExportAttachment { - relative_path: rel_str, - bytes: data, - }); - } - Ok(attachments) - } - - async fn download_folder( - &self, - actor: &Actor, - folder: &DomainDocument, - format: DocumentDownloadFormat, - ) -> anyhow::Result> { - if format != DocumentDownloadFormat::Archive { - return Err(FolderDownloadUnsupportedFormat.into()); - } - - let mut nodes: HashMap = HashMap::new(); - nodes.insert(folder.id(), folder.clone()); - let subtree = self - .documents - .list_owned_subtree_documents(folder.workspace_id(), folder.id()) - .await?; - for entry in subtree { - if entry.id == folder.id() { - continue; - } - if let Some(doc) = self.documents.get_by_id(entry.id).await? { - nodes.insert(doc.id(), doc); - } - } - - let root_name = sanitize_filename(folder.title().as_str()); - let entries = self - .build_archive_entries( - actor, - &nodes, - folder.id(), - Some(folder.desired_path().as_str()), - ) - .await?; - let bytes = build_folder_archive(&root_name, &entries)?; - Ok(Some(DocumentDownload { - filename: format.file_name(&root_name), - content_type: format.content_type().to_string(), - bytes, - })) - } - - pub async fn download_workspace_root( - &self, - actor: &Actor, - workspace_id: Uuid, - workspace_name: &str, - format: DocumentDownloadFormat, - ) -> anyhow::Result> { - if format != DocumentDownloadFormat::Archive { - return Err(FolderDownloadUnsupportedFormat.into()); - } - - let documents = self - .documents - .list_workspace_documents(workspace_id) - .await?; - let mut nodes: HashMap = HashMap::new(); - for doc in documents { - nodes.insert(doc.id(), doc); - } - - let root = DomainDocument::rehydrate( - workspace_id, - None, - workspace_id, - domain::documents::title::Title::new(workspace_name), - None, - DocumentType::Folder, - Utc::now(), - Utc::now(), - None, - domain::documents::path::Slug::new(sanitize_filename(workspace_name)) - .unwrap_or_else(|_| domain::documents::path::Slug::from_title(workspace_name)), - domain::documents::path::DesiredPath::root(), - None, - None, - None, - None, - None, - ); - nodes.insert(root.id(), root); - - let root_name = sanitize_filename(workspace_name); - let entries = self - .build_archive_entries(actor, &nodes, workspace_id, None) - .await?; - let bytes = build_folder_archive(&root_name, &entries)?; - Ok(Some(DocumentDownload { - filename: format.file_name(&root_name), - content_type: format.content_type().to_string(), - bytes, - })) - } - - async fn build_archive_entries( - &self, - actor: &Actor, - nodes: &HashMap, - root_id: Uuid, - base_prefix: Option<&str>, - ) -> anyhow::Result> { - let mut entries: Vec = Vec::new(); - for doc in nodes.values() { - if doc.id() == root_id || doc.doc_type() == DocumentType::Folder { - continue; - } - let capability = - access::resolve_document(self.access, self.shares, actor, doc.id()).await?; - if capability < Capability::View { - continue; - } - let Some(assets) = self.prepare_document_assets(doc).await? else { - continue; - }; - let relative_path = resolve_relative_path(doc, base_prefix); - entries.push(FolderDownloadEntry { - relative_path, - assets, - }); - } - entries.sort_by(|a, b| a.relative_path.cmp(&b.relative_path)); - Ok(entries) - } -} - -fn sanitize_filename(name: &str) -> String { - let mut s = name.trim().to_string(); - let invalid = ['/', '\\', ':', '*', '?', '"', '<', '>', '|', '\0']; - for ch in invalid { - s = s.replace(ch, "-"); - } - s = s.replace(' ', "_"); - if s.is_empty() { - s = "document".into(); - } - if s.len() > 100 { - s.truncate(100); - } - s -} - -fn resolve_relative_path(doc: &DomainDocument, base_prefix: Option<&str>) -> String { - let path = doc.desired_path().as_str().trim_start_matches('/'); - if let Some(base) = base_prefix - .map(|b| b.trim_start_matches('/')) - .filter(|b| !b.is_empty()) - && let Some(stripped) = path.strip_prefix(base) - && let trimmed = stripped.trim_start_matches('/') - && !trimmed.is_empty() - { - return trimmed.to_string(); - } - if path.is_empty() { - format!("{}.md", sanitize_filename(doc.title().as_str())) - } else { - path.to_string() - } -} - -struct FolderDownloadEntry { - relative_path: String, - assets: DocumentExportAssets, -} - -fn build_folder_archive( - root_name: &str, - entries: &[FolderDownloadEntry], -) -> anyhow::Result> { - let mut cursor = Cursor::new(Vec::new()); - { - let mut zip = ZipWriter::new(&mut cursor); - let options = FileOptions::default() - .compression_method(CompressionMethod::Deflated) - .unix_permissions(0o644); - zip.add_directory(format!("{root_name}/"), options)?; - for entry in entries { - let markdown_entry = format!("{}/{}", root_name, entry.relative_path); - zip.start_file(markdown_entry, options)?; - zip.write_all(&entry.assets.markdown)?; - let doc_parent = Path::new(&entry.relative_path) - .parent() - .map(|p| p.to_string_lossy().trim_start_matches('/').to_string()) - .unwrap_or_default(); - for attachment in &entry.assets.attachments { - let rel_path = attachment.relative_path.trim_start_matches('/'); - let attachment_entry = if doc_parent.is_empty() { - format!("{}/{}", root_name, rel_path) - } else { - format!("{}/{}/{}", root_name, doc_parent, rel_path) - }; - zip.start_file(attachment_entry, options)?; - zip.write_all(&attachment.bytes)?; - } - } - zip.finish()?; - } - Ok(cursor.into_inner()) -} - -#[derive(Debug, Error)] -#[error("folder downloads only support archive format")] -pub struct FolderDownloadUnsupportedFormat; diff --git a/api/crates/application/src/documents/use_cases/files/upload_file.rs b/api/crates/application/src/documents/use_cases/files/upload_file.rs index f589bcfc..0a86f834 100644 --- a/api/crates/application/src/documents/use_cases/files/upload_file.rs +++ b/api/crates/application/src/documents/use_cases/files/upload_file.rs @@ -1,7 +1,7 @@ use uuid::Uuid; use crate::core::ports::storage::storage_port::StorageResolverPort; -use crate::documents::ports::files::files_repository::FilesRepository; +use crate::documents::ports::files::files_repository::{FileInsert, FilesRepository}; pub struct UploadFile<'a, R, S> where @@ -16,11 +16,26 @@ where pub struct UploadedFile { pub id: Uuid, pub url: String, - pub filename: String, - pub content_type: Option, pub size: i64, pub storage_path: String, - pub content_hash: String, + /// Encrypted file metadata (filename, content_type, etc.) + pub encrypted_metadata: Vec, + /// Nonce for encrypted metadata + pub encrypted_metadata_nonce: Vec, + /// Hash of encrypted content + pub encrypted_hash: String, +} + +/// Input for file upload (E2EE encrypted) +pub struct FileUploadInput { + /// Encrypted file bytes (.rme format) + pub bytes: Vec, + /// Encrypted file metadata (filename, content_type, etc.) + pub encrypted_metadata: Vec, + /// Nonce for encrypted metadata + pub encrypted_metadata_nonce: Vec, + /// Hash of encrypted content (for deduplication/verification) + pub encrypted_hash: String, } impl<'a, R, S> UploadFile<'a, R, S> @@ -28,13 +43,13 @@ where R: FilesRepository + ?Sized, S: StorageResolverPort + ?Sized, { + /// Upload an E2EE encrypted file. + /// All files are encrypted - filename and content_type are stored in encrypted_metadata. pub async fn execute( &self, workspace_id: Uuid, doc_id: Uuid, - bytes: Vec, - orig_filename: Option, - content_type: Option, + input: FileUploadInput, ) -> anyhow::Result> { if !self .repo @@ -43,9 +58,10 @@ where { return Ok(None); } + // Store with None for original_filename - we use UUID for storage path let stored = self .storage - .store_doc_attachment(doc_id, orig_filename.as_deref(), &bytes) + .store_doc_attachment(doc_id, None, &input.bytes) .await .map_err(|err| { tracing::error!(error = ?err, doc_id = %doc_id, "store_doc_attachment_failed"); @@ -53,35 +69,39 @@ where })?; let id = self .repo - .insert_file( + .insert_file(FileInsert { doc_id, - &stored.filename, - content_type.as_deref(), - stored.size, - &stored.relative_path, - &stored.content_hash, - ) + size: stored.size, + storage_path: &stored.relative_path, + encrypted_metadata: &input.encrypted_metadata, + encrypted_metadata_nonce: &input.encrypted_metadata_nonce, + encrypted_hash: &input.encrypted_hash, + }) .await .map_err(|err| { tracing::error!(error = ?err, doc_id = %doc_id, "insert_file_failed"); err })?; let storage_path = stored.relative_path.clone(); - let relative = stored.relative_path.trim_start_matches('/'); + // URL format: /api/uploads/{doc_id}/attachments/{filename} + // This matches what serve_upload expects (doc_id as first segment) let url = if let Some(base) = self.public_base_url.as_deref() { let origin = base.trim_end_matches('/'); - format!("{}/api/uploads/{}", origin, relative) + format!( + "{}/api/uploads/{}/attachments/{}", + origin, doc_id, stored.filename + ) } else { - format!("/api/uploads/{}", relative) + format!("/api/uploads/{}/attachments/{}", doc_id, stored.filename) }; Ok(Some(UploadedFile { id, url, - filename: stored.filename, - content_type, size: stored.size, storage_path, - content_hash: stored.content_hash, + encrypted_metadata: input.encrypted_metadata, + encrypted_metadata_nonce: input.encrypted_metadata_nonce, + encrypted_hash: input.encrypted_hash, })) } } diff --git a/api/crates/application/src/documents/use_cases/list_documents.rs b/api/crates/application/src/documents/use_cases/list_documents.rs index 3230c902..1f97922f 100644 --- a/api/crates/application/src/documents/use_cases/list_documents.rs +++ b/api/crates/application/src/documents/use_cases/list_documents.rs @@ -13,12 +13,9 @@ impl<'a, R: DocumentRepository + ?Sized> ListDocuments<'a, R> { pub async fn execute( &self, workspace_id: Uuid, - query: Option, tag: Option, state: DocumentListState, ) -> DocumentRepoResult> { - self.repo - .list_for_user(workspace_id, query, tag, state) - .await + self.repo.list_for_user(workspace_id, tag, state).await } } diff --git a/api/crates/application/src/documents/use_cases/mod.rs b/api/crates/application/src/documents/use_cases/mod.rs index ee4dcb18..6f2e82b3 100644 --- a/api/crates/application/src/documents/use_cases/mod.rs +++ b/api/crates/application/src/documents/use_cases/mod.rs @@ -1,6 +1,5 @@ pub mod create_document; pub mod delete_document; -pub mod download_document; pub mod files; pub mod get_backlinks; pub mod get_document; @@ -9,9 +8,6 @@ pub mod list_documents; pub mod list_snapshots; pub mod publishing; pub mod restore_snapshot; -pub mod search_documents; pub mod sharing; pub mod snapshot_diff; -pub mod snapshot_download; -pub mod tagging; pub mod update_document; diff --git a/api/crates/application/src/documents/use_cases/publishing/get_status.rs b/api/crates/application/src/documents/use_cases/publishing/get_status.rs index 55c9bcb7..d0aef555 100644 --- a/api/crates/application/src/documents/use_cases/publishing/get_status.rs +++ b/api/crates/application/src/documents/use_cases/publishing/get_status.rs @@ -5,6 +5,7 @@ use crate::documents::ports::publishing::public_repository::PublicRepository; pub struct PublishStatusDto { pub slug: String, pub public_url: String, + pub noindex: bool, } pub struct GetPublishStatus<'a, R: PublicRepository + ?Sized> { @@ -22,6 +23,7 @@ impl<'a, R: PublicRepository + ?Sized> GetPublishStatus<'a, R> { Ok(Some(PublishStatusDto { slug: status.slug, public_url, + noindex: status.noindex, })) } else { Ok(None) diff --git a/api/crates/application/src/documents/use_cases/publishing/publish.rs b/api/crates/application/src/documents/use_cases/publishing/publish.rs index 9cfc3d93..fa296f5f 100644 --- a/api/crates/application/src/documents/use_cases/publishing/publish.rs +++ b/api/crates/application/src/documents/use_cases/publishing/publish.rs @@ -5,6 +5,7 @@ use crate::documents::ports::publishing::public_repository::PublicRepository; pub struct PublishResponseDto { pub slug: String, pub public_url: String, + pub noindex: bool, } fn sanitize_title_local(name: &str) -> String { let mut s = name.trim().to_string(); @@ -28,6 +29,7 @@ impl<'a, R: PublicRepository + ?Sized> PublishDocument<'a, R> { &self, workspace_id: Uuid, doc_id: Uuid, + noindex: bool, ) -> anyhow::Result> { let ws = match self .repo @@ -47,8 +49,8 @@ impl<'a, R: PublicRepository + ?Sized> PublishDocument<'a, R> { slug = format!("{}-{}-{}", base_slug, &doc_id.to_string()[..8], i); i += 1; } - self.repo.upsert_public_document(doc_id, &slug).await?; + self.repo.upsert_public_document(doc_id, &slug, noindex).await?; let public_url = format!("/w/{}/{}", ws.workspace_slug, doc_id); - Ok(Some(PublishResponseDto { slug, public_url })) + Ok(Some(PublishResponseDto { slug, public_url, noindex })) } } diff --git a/api/crates/application/src/documents/use_cases/search_documents.rs b/api/crates/application/src/documents/use_cases/search_documents.rs deleted file mode 100644 index 2b2858c4..00000000 --- a/api/crates/application/src/documents/use_cases/search_documents.rs +++ /dev/null @@ -1,19 +0,0 @@ -use uuid::Uuid; - -use crate::documents::ports::document_repository::{DocumentRepoResult, DocumentRepository}; -use domain::documents::document::SearchHit; - -pub struct SearchDocuments<'a, R: DocumentRepository + ?Sized> { - pub repo: &'a R, -} - -impl<'a, R: DocumentRepository + ?Sized> SearchDocuments<'a, R> { - pub async fn execute( - &self, - workspace_id: Uuid, - q: Option, - limit: i64, - ) -> DocumentRepoResult> { - self.repo.search_for_user(workspace_id, q, limit).await - } -} diff --git a/api/crates/application/src/documents/use_cases/sharing/browse_share.rs b/api/crates/application/src/documents/use_cases/sharing/browse_share.rs index 1dbe993a..57f2f51f 100644 --- a/api/crates/application/src/documents/use_cases/sharing/browse_share.rs +++ b/api/crates/application/src/documents/use_cases/sharing/browse_share.rs @@ -1,3 +1,7 @@ +use std::collections::HashMap; + +use base64::Engine; + use crate::documents::dtos::{ShareBrowseResponseDto, ShareBrowseTreeItemDto}; use crate::documents::ports::sharing::shares_repository::SharesRepository; use domain::documents::doc_type::DocumentType; @@ -28,6 +32,8 @@ impl<'a, R: SharesRepository + ?Sized> BrowseShare<'a, R> { r#type: node.document_type.as_str().to_string(), created_at: node.created_at, updated_at: node.updated_at, + share_token: None, + encrypted_dek: None, }); } else { let fallback_title = self @@ -43,19 +49,43 @@ impl<'a, R: SharesRepository + ?Sized> BrowseShare<'a, R> { r#type: ctx.shared_type.as_str().to_string(), created_at: chrono::Utc::now(), updated_at: chrono::Utc::now(), + share_token: None, + encrypted_dek: None, }); } return Ok(Some(ShareBrowseResponseDto { tree })); } // Folder: list subtree and filter to materialized shares under this folder share let rows = self.repo.list_subtree_nodes(ctx.shared_id).await?; - let allowed = self.repo.list_materialized_children(ctx.share_id).await?; + + // Get child share info (token + encrypted DEK) for documents + let child_info = self.repo.list_child_share_info(ctx.share_id).await?; + let child_info_map: HashMap<_, _> = child_info + .into_iter() + .map(|info| (info.document_id, (info.token, info.encrypted_dek))) + .collect(); + let tree: Vec = rows .into_iter() .filter_map(|node| { - if node.document_type == DocumentType::Document && !allowed.contains(&node.id) { - return None; + // For documents, check if they have a materialized child share + if node.document_type == DocumentType::Document { + let child = child_info_map.get(&node.id)?; + let (child_token, encrypted_dek) = child; + return Some(ShareBrowseTreeItemDto { + id: node.id, + title: node.title.into_string(), + parent_id: node.parent_id, + r#type: node.document_type.as_str().to_string(), + created_at: node.created_at, + updated_at: node.updated_at, + share_token: Some(child_token.clone()), + encrypted_dek: encrypted_dek.as_ref().map(|dek| { + base64::engine::general_purpose::STANDARD.encode(dek) + }), + }); } + // For folders, include without child share info Some(ShareBrowseTreeItemDto { id: node.id, title: node.title.into_string(), @@ -63,6 +93,8 @@ impl<'a, R: SharesRepository + ?Sized> BrowseShare<'a, R> { r#type: node.document_type.as_str().to_string(), created_at: node.created_at, updated_at: node.updated_at, + share_token: None, + encrypted_dek: None, }) }) .collect(); diff --git a/api/crates/application/src/documents/use_cases/sharing/create_share.rs b/api/crates/application/src/documents/use_cases/sharing/create_share.rs index 968ef29e..c7a48c41 100644 --- a/api/crates/application/src/documents/use_cases/sharing/create_share.rs +++ b/api/crates/application/src/documents/use_cases/sharing/create_share.rs @@ -9,6 +9,7 @@ pub struct CreateShare<'a, R: SharesRepository + ?Sized> { } pub struct CreateShareResult { + pub share_id: Uuid, pub token: String, pub document_id: Uuid, pub document_type: DocumentType, @@ -28,6 +29,7 @@ impl<'a, R: SharesRepository + ?Sized> CreateShare<'a, R> { .create_share(workspace_id, actor_id, document_id, permission, expires_at) .await?; Ok(CreateShareResult { + share_id: created.share_id, token: created.token, document_id, document_type: created.document_type, diff --git a/api/crates/application/src/documents/use_cases/sharing/list_document_shares.rs b/api/crates/application/src/documents/use_cases/sharing/list_document_shares.rs index c78c3ac6..90295444 100644 --- a/api/crates/application/src/documents/use_cases/sharing/list_document_shares.rs +++ b/api/crates/application/src/documents/use_cases/sharing/list_document_shares.rs @@ -27,6 +27,8 @@ impl<'a, R: SharesRepository + ?Sized> ListDocumentShares<'a, R> { document_id: r.document_id, document_type: r.document_type.as_str().to_string(), parent_share_id: r.parent_share_id, + creator_encrypted_share_key: r.creator_encrypted_share_key, + creator_share_key_nonce: r.creator_share_key_nonce, }) .collect()) } diff --git a/api/crates/application/src/documents/use_cases/snapshot_download.rs b/api/crates/application/src/documents/use_cases/snapshot_download.rs deleted file mode 100644 index fa59de66..00000000 --- a/api/crates/application/src/documents/use_cases/snapshot_download.rs +++ /dev/null @@ -1,147 +0,0 @@ -use std::io::Write; -use std::path::Component; - -use anyhow::anyhow; -use async_trait::async_trait; -use uuid::Uuid; - -use crate::core::ports::storage::storage_port::StorageResolverPort; -use crate::documents::ports::document_snapshot_archive_repository::SnapshotArchiveRecord; -use crate::documents::ports::files::files_repository::FilesRepository; -use crate::documents::services::realtime::snapshot::SnapshotService; - -pub struct SnapshotDownload { - pub filename: String, - pub bytes: Vec, - pub snapshot: SnapshotArchiveRecord, -} - -pub struct DownloadSnapshot<'a, F, S, SNAP> -where - F: FilesRepository + ?Sized, - S: StorageResolverPort + ?Sized, - SNAP: SnapshotServiceProvider + ?Sized, -{ - pub files: &'a F, - pub storage: &'a S, - pub snapshots: &'a SNAP, -} - -#[async_trait] -pub trait SnapshotServiceProvider { - async fn load_markdown_with_record( - &self, - snapshot_id: Uuid, - ) -> anyhow::Result>; -} - -#[async_trait] -impl SnapshotServiceProvider for SnapshotService { - async fn load_markdown_with_record( - &self, - snapshot_id: Uuid, - ) -> anyhow::Result> { - self.load_archive_markdown(snapshot_id).await - } -} - -impl<'a, F, S, SNAP> DownloadSnapshot<'a, F, S, SNAP> -where - F: FilesRepository + ?Sized, - S: StorageResolverPort + ?Sized, - SNAP: SnapshotServiceProvider + ?Sized, -{ - pub async fn execute( - &self, - document_id: Uuid, - snapshot_id: Uuid, - ) -> anyhow::Result> { - let Some((snapshot_record, markdown)) = self - .snapshots - .load_markdown_with_record(snapshot_id) - .await? - else { - return Ok(None); - }; - if snapshot_record.document_id != document_id { - anyhow::bail!("snapshot_document_mismatch"); - } - - let markdown_bytes = markdown.into_bytes(); - let stored_attachments = self - .files - .list_storage_paths_for_document(document_id) - .await?; - let doc_dir = self.storage.build_doc_file_path(document_id).await?; - let doc_dir_parent = doc_dir - .parent() - .ok_or_else(|| anyhow!("document directory missing"))? - .to_path_buf(); - - let mut attachments: Vec<(String, Vec)> = Vec::new(); - for stored_path in stored_attachments { - let full_path = self.storage.absolute_from_relative(&stored_path); - if !full_path.starts_with(&doc_dir_parent) { - continue; - } - let relative = match full_path.strip_prefix(&doc_dir_parent) { - Ok(rel) => rel, - Err(_) => continue, - }; - if relative.as_os_str().is_empty() { - continue; - } - if relative - .components() - .any(|c| matches!(c, Component::ParentDir | Component::RootDir)) - { - continue; - } - let rel_str = relative.to_string_lossy().replace('\\', "/"); - let data = self.storage.read_bytes(full_path.as_path()).await?; - attachments.push((rel_str, data)); - } - - let safe_title = sanitize_filename(&snapshot_record.label); - let archive_name = format!("{}-snapshot.zip", safe_title); - let markdown_entry = format!("{}/{}.md", safe_title, safe_title); - let mut cursor = std::io::Cursor::new(Vec::new()); - { - let mut zip = zip::ZipWriter::new(&mut cursor); - let options = zip::write::FileOptions::default() - .compression_method(zip::CompressionMethod::Deflated) - .unix_permissions(0o644); - zip.start_file(markdown_entry, options)?; - zip.write_all(&markdown_bytes)?; - for (rel_path, data) in attachments { - let entry_path = format!("{}/{}", safe_title, rel_path.trim_start_matches('/')); - zip.start_file(entry_path, options)?; - zip.write_all(&data)?; - } - zip.finish()?; - } - let bytes = cursor.into_inner(); - - Ok(Some(SnapshotDownload { - filename: archive_name, - bytes, - snapshot: snapshot_record, - })) - } -} - -fn sanitize_filename(name: &str) -> String { - let mut s = name.trim().to_string(); - let invalid = ['/', '\\', ':', '*', '?', '"', '<', '>', '|', '\0']; - for ch in invalid { - s = s.replace(ch, "-"); - } - s = s.replace(' ', "_"); - if s.is_empty() { - s = "snapshot".into(); - } - if s.len() > 100 { - s.truncate(100); - } - s -} diff --git a/api/crates/application/src/documents/use_cases/tagging/list_tags.rs b/api/crates/application/src/documents/use_cases/tagging/list_tags.rs deleted file mode 100644 index 7a5df793..00000000 --- a/api/crates/application/src/documents/use_cases/tagging/list_tags.rs +++ /dev/null @@ -1,25 +0,0 @@ -use uuid::Uuid; - -use crate::documents::dtos::TagItemDto; -use crate::documents::ports::tagging::tag_repository::TagRepository; - -pub struct ListTags<'a, R: TagRepository + ?Sized> { - pub repo: &'a R, -} - -impl<'a, R: TagRepository + ?Sized> ListTags<'a, R> { - pub async fn execute( - &self, - owner_id: Uuid, - filter: Option, - ) -> anyhow::Result> { - let rows = self.repo.list_tags(owner_id, filter).await?; - Ok(rows - .into_iter() - .map(|row| TagItemDto { - name: row.name, - count: row.count, - }) - .collect()) - } -} diff --git a/api/crates/application/src/documents/use_cases/tagging/mod.rs b/api/crates/application/src/documents/use_cases/tagging/mod.rs deleted file mode 100644 index 8184e65c..00000000 --- a/api/crates/application/src/documents/use_cases/tagging/mod.rs +++ /dev/null @@ -1 +0,0 @@ -pub mod list_tags; diff --git a/api/crates/application/src/git/dtos/git.rs b/api/crates/application/src/git/dtos/git.rs index 3f8fc489..35ec721f 100644 --- a/api/crates/application/src/git/dtos/git.rs +++ b/api/crates/application/src/git/dtos/git.rs @@ -7,6 +7,8 @@ pub struct GitConfigDto { pub auto_sync: bool, pub created_at: chrono::DateTime, pub updated_at: chrono::DateTime, + /// E2EE encrypted auth data (only present for E2EE clients) + pub encrypted_auth_data: Option, } #[derive(Debug, Clone)] @@ -16,20 +18,6 @@ pub struct GitRemoteCheckDto { pub reason: Option, } -#[derive(Debug, Clone)] -pub struct GitStatusDto { - pub repository_initialized: bool, - pub has_remote: bool, - pub current_branch: Option, - pub uncommitted_changes: u32, - pub untracked_files: u32, - pub last_sync: Option>, - pub last_sync_status: Option, - pub last_sync_message: Option, - pub last_sync_commit_hash: Option, - pub sync_enabled: bool, -} - #[derive(Debug, Clone)] pub struct UpsertGitConfigInput { pub repository_url: String, @@ -38,111 +26,3 @@ pub struct UpsertGitConfigInput { pub auth_data: serde_json::Value, pub auto_sync: Option, } - -#[derive(Debug, Clone)] -pub struct GitChangeItem { - pub path: String, - pub status: String, -} - -#[derive(Debug, Clone)] -pub struct GitCommitInfo { - pub hash: String, - pub message: String, - pub author_name: String, - pub author_email: String, - pub time: chrono::DateTime, -} - -#[derive(Debug, Clone)] -pub struct GitWorkspaceStatus { - pub repository_initialized: bool, - pub current_branch: Option, - pub uncommitted_changes: u32, - pub untracked_files: u32, -} - -#[derive(Debug, Clone)] -pub struct GitSyncRequestDto { - pub message: Option, - pub force: Option, - pub full_scan: Option, - pub skip_push: Option, -} - -#[derive(Debug, Clone)] -pub struct GitSyncResponseDto { - pub success: bool, - pub message: String, - pub commit_hash: Option, - pub files_changed: u32, -} - -#[derive(Debug, Clone)] -pub struct GitSyncOutcome { - pub files_changed: u32, - pub commit_hash: Option, - pub pushed: bool, - pub message: String, -} - -#[derive(Debug, Clone)] -pub struct GitImportOutcome { - pub files_changed: u32, - pub commit_hash: Option, - pub docs_created: u32, - pub attachments_created: u32, - pub message: String, -} - -#[derive(Debug, Clone)] -pub struct GitignoreUpdateDto { - pub added: usize, - pub patterns: Vec, -} - -#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] -pub struct GitPullResolutionDto { - pub path: String, - /// one of: ours, theirs, custom_text - pub choice: String, - pub content: Option, -} - -#[derive(Debug, Clone)] -pub struct GitPullRequestDto { - pub resolutions: Vec, -} - -#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] -pub struct GitPullConflictItemDto { - pub path: String, - pub is_binary: bool, - pub ours: Option, - pub theirs: Option, - pub base: Option, - pub document_id: Option, -} - -#[derive(Debug, Clone)] -pub struct GitPullResultDto { - pub success: bool, - pub message: String, - pub files_changed: u32, - pub commit_hash: Option, - pub conflicts: Option>, - pub base_commit: Option>, - pub remote_commit: Option>, -} - -#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] -pub struct GitPullSessionDto { - pub id: uuid::Uuid, - pub workspace_id: uuid::Uuid, - pub status: domain::git::pull_session::GitPullSessionStatus, - pub conflicts: Vec, - pub resolutions: Vec, - pub message: Option, - pub base_commit: Option>, - pub remote_commit: Option>, -} diff --git a/api/crates/application/src/git/ports/git_pull_session_repository.rs b/api/crates/application/src/git/ports/git_pull_session_repository.rs deleted file mode 100644 index 7fe142cf..00000000 --- a/api/crates/application/src/git/ports/git_pull_session_repository.rs +++ /dev/null @@ -1,11 +0,0 @@ -use async_trait::async_trait; -use uuid::Uuid; - -use crate::core::ports::errors::PortResult; -use crate::git::dtos::GitPullSessionDto; - -#[async_trait] -pub trait GitPullSessionRepository: Send + Sync { - async fn upsert(&self, session: GitPullSessionDto) -> PortResult<()>; - async fn get(&self, workspace_id: Uuid, id: Uuid) -> PortResult>; -} diff --git a/api/crates/application/src/git/ports/git_rebuild_job_queue.rs b/api/crates/application/src/git/ports/git_rebuild_job_queue.rs deleted file mode 100644 index c56d422a..00000000 --- a/api/crates/application/src/git/ports/git_rebuild_job_queue.rs +++ /dev/null @@ -1,26 +0,0 @@ -use async_trait::async_trait; -use uuid::Uuid; - -use crate::core::ports::errors::PortResult; - -#[derive(Debug, Clone)] -pub struct GitRebuildJob { - pub id: i64, - pub workspace_id: Uuid, - pub actor_id: Option, - pub attempts: i32, - pub permission_snapshot: Vec, -} - -#[async_trait] -pub trait GitRebuildJobQueue: Send + Sync { - async fn enqueue( - &self, - workspace_id: Uuid, - actor_id: Option, - permission_snapshot: &[String], - ) -> PortResult<()>; - async fn fetch_next(&self, lock_timeout_secs: i64) -> PortResult>; - async fn complete(&self, job_id: i64) -> PortResult<()>; - async fn fail(&self, job_id: i64, error: &str) -> PortResult<()>; -} diff --git a/api/crates/application/src/git/ports/git_repository.rs b/api/crates/application/src/git/ports/git_repository.rs index 19190d73..520f238a 100644 --- a/api/crates/application/src/git/ports/git_repository.rs +++ b/api/crates/application/src/git/ports/git_repository.rs @@ -1,7 +1,6 @@ use async_trait::async_trait; use chrono::{DateTime, Utc}; use domain::git::auth::GitAuthType; -use domain::git::sync_log::{GitSyncOperation, GitSyncStatus}; use serde_json::Value; use uuid::Uuid; @@ -16,23 +15,8 @@ pub struct GitConfigRecord { pub auto_sync: bool, pub created_at: DateTime, pub updated_at: DateTime, -} - -#[derive(Debug, Clone)] -pub struct GitLastSyncLog { - pub created_at: Option>, - pub status: Option, - pub message: Option, - pub commit_hash: Option, -} - -#[derive(Debug, Clone)] -pub struct UserGitCfg { - pub repository_url: String, - pub branch_name: String, - pub auth_type: Option, + /// Raw auth_data from database (E2EE encrypted data is stored as-is) pub auth_data: Option, - pub auto_sync: bool, } #[async_trait] @@ -48,20 +32,4 @@ pub trait GitRepository: Send + Sync { auto_sync: Option, ) -> PortResult; async fn delete_config(&self, workspace_id: Uuid) -> PortResult; - async fn load_user_git_cfg(&self, workspace_id: Uuid) -> PortResult>; - async fn get_last_sync_log(&self, workspace_id: Uuid) -> PortResult>; - async fn log_sync_operation( - &self, - workspace_id: Uuid, - operation: GitSyncOperation, - status: GitSyncStatus, - message: Option<&str>, - commit_hash: Option<&str>, - ) -> PortResult<()>; - - async fn delete_sync_logs(&self, workspace_id: Uuid) -> PortResult<()>; - - async fn delete_repository_state(&self, workspace_id: Uuid) -> PortResult<()>; - - async fn list_auto_sync_workspaces(&self) -> PortResult>; } diff --git a/api/crates/application/src/git/ports/git_storage.rs b/api/crates/application/src/git/ports/git_storage.rs deleted file mode 100644 index 13fe2324..00000000 --- a/api/crates/application/src/git/ports/git_storage.rs +++ /dev/null @@ -1,81 +0,0 @@ -use std::collections::HashMap; -use std::pin::Pin; - -use async_trait::async_trait; -use futures_core::Stream; -use uuid::Uuid; - -use crate::core::ports::errors::PortResult; - -pub type CommitId = Vec; - -#[derive(Debug, Clone)] -pub struct CommitMeta { - pub commit_id: CommitId, - pub parent_commit_id: Option, - pub message: Option, - pub author_name: Option, - pub author_email: Option, - pub committed_at: chrono::DateTime, - pub pack_key: String, - pub file_hash_index: HashMap, -} - -#[derive(Debug, Clone)] -pub struct PackBlob { - pub commit_id: CommitId, - pub bytes: Vec, - pub pack_key: String, -} - -#[derive(Debug, Clone)] -pub struct BlobKey { - pub path: String, -} - -pub type PackStream = Pin> + Send>>; - -#[async_trait] -pub trait GitStorage: Send + Sync { - async fn latest_commit(&self, user_id: Uuid) -> PortResult>; - async fn store_pack(&self, user_id: Uuid, pack: &[u8], meta: &CommitMeta) -> PortResult<()>; - async fn load_pack_chain(&self, user_id: Uuid, until: Option<&[u8]>) -> PortResult; - async fn put_blob(&self, key: &BlobKey, data: &[u8]) -> PortResult<()>; - async fn fetch_blob(&self, key: &BlobKey) -> PortResult>; - async fn commit_meta(&self, user_id: Uuid, commit_id: &[u8]) -> PortResult>; - async fn restore_commit_meta(&self, user_id: Uuid, meta: &CommitMeta) -> PortResult<()>; - async fn fetch_pack_for_commit( - &self, - user_id: Uuid, - commit_id: &[u8], - ) -> PortResult>>; - async fn delete_blob(&self, key: &BlobKey) -> PortResult<()>; - async fn delete_pack(&self, user_id: Uuid, commit_id: &[u8]) -> PortResult<()>; - async fn set_latest_commit(&self, user_id: Uuid, meta: Option<&CommitMeta>) -> PortResult<()>; - async fn delete_all(&self, user_id: Uuid) -> PortResult<()>; -} - -pub fn encode_commit_id(bytes: &[u8]) -> String { - bytes.iter().map(|b| format!("{:02x}", b)).collect() -} - -pub fn decode_commit_id(hex: &str) -> PortResult { - if !hex.len().is_multiple_of(2) { - return Err(anyhow::anyhow!("invalid commit id length").into()); - } - let mut out = Vec::with_capacity(hex.len() / 2); - let chars: Vec = hex.chars().collect(); - for chunk in chars.chunks(2) { - let [hi, lo] = chunk else { - return Err(anyhow::anyhow!("invalid commit id").into()); - }; - let hi = hi - .to_digit(16) - .ok_or_else(|| anyhow::anyhow!("invalid commit id"))?; - let lo = lo - .to_digit(16) - .ok_or_else(|| anyhow::anyhow!("invalid commit id"))?; - out.push(((hi << 4) | lo) as u8); - } - Ok(out) -} diff --git a/api/crates/application/src/git/ports/git_workspace.rs b/api/crates/application/src/git/ports/git_workspace.rs deleted file mode 100644 index 95b292b0..00000000 --- a/api/crates/application/src/git/ports/git_workspace.rs +++ /dev/null @@ -1,59 +0,0 @@ -use async_trait::async_trait; -use uuid::Uuid; - -use crate::core::dtos::TextDiffResult; -use crate::core::ports::errors::PortResult; -use crate::git::dtos::{ - GitChangeItem, GitCommitInfo, GitImportOutcome, GitPullRequestDto, GitPullResultDto, - GitRemoteCheckDto, GitSyncOutcome, GitSyncRequestDto, GitWorkspaceStatus, -}; -use crate::git::ports::git_repository::UserGitCfg; - -#[async_trait] -pub trait GitWorkspacePort: Send + Sync { - async fn ensure_repository(&self, workspace_id: Uuid, default_branch: &str) -> PortResult<()>; - async fn remove_repository(&self, workspace_id: Uuid) -> PortResult<()>; - async fn status(&self, workspace_id: Uuid) -> PortResult; - async fn list_changes(&self, workspace_id: Uuid) -> PortResult>; - async fn working_diff(&self, workspace_id: Uuid) -> PortResult>; - async fn commit_diff( - &self, - workspace_id: Uuid, - from: &str, - to: &str, - ) -> PortResult>; - async fn history(&self, workspace_id: Uuid) -> PortResult>; - async fn sync( - &self, - workspace_id: Uuid, - req: &GitSyncRequestDto, - cfg: Option<&UserGitCfg>, - ) -> PortResult; - async fn import_repository( - &self, - workspace_id: Uuid, - actor_id: Uuid, - cfg: &UserGitCfg, - ) -> PortResult; - async fn pull( - &self, - workspace_id: Uuid, - actor_id: Uuid, - req: &GitPullRequestDto, - cfg: &UserGitCfg, - ) -> PortResult; - async fn head_commit(&self, workspace_id: Uuid) -> PortResult>>; - async fn remote_head( - &self, - workspace_id: Uuid, - cfg: &UserGitCfg, - ) -> PortResult>>; - async fn has_pending_changes(&self, workspace_id: Uuid) -> PortResult; - async fn drift_since_commit(&self, workspace_id: Uuid, base_commit: &[u8]) -> PortResult; - - async fn check_remote( - &self, - workspace_id: Uuid, - cfg: &UserGitCfg, - ) -> PortResult; -} diff --git a/api/crates/application/src/git/ports/gitignore_port.rs b/api/crates/application/src/git/ports/gitignore_port.rs deleted file mode 100644 index efe70c79..00000000 --- a/api/crates/application/src/git/ports/gitignore_port.rs +++ /dev/null @@ -1,10 +0,0 @@ -use async_trait::async_trait; - -use crate::core::ports::errors::PortResult; - -#[async_trait] -pub trait GitignorePort: Send + Sync { - async fn ensure_gitignore(&self, dir: &str) -> PortResult; - async fn upsert_gitignore_patterns(&self, dir: &str, patterns: &[String]) -> PortResult; - async fn read_gitignore_patterns(&self, dir: &str) -> PortResult>; -} diff --git a/api/crates/application/src/git/ports/mod.rs b/api/crates/application/src/git/ports/mod.rs index ab43457c..cf243bc0 100644 --- a/api/crates/application/src/git/ports/mod.rs +++ b/api/crates/application/src/git/ports/mod.rs @@ -1,6 +1 @@ -pub mod git_pull_session_repository; -pub mod git_rebuild_job_queue; pub mod git_repository; -pub mod git_storage; -pub mod git_workspace; -pub mod gitignore_port; diff --git a/api/crates/application/src/git/services/mod.rs b/api/crates/application/src/git/services/mod.rs index 248dbba0..43294243 100644 --- a/api/crates/application/src/git/services/mod.rs +++ b/api/crates/application/src/git/services/mod.rs @@ -2,148 +2,27 @@ use std::sync::Arc; use uuid::Uuid; -use crate::core::dtos::TextDiffResult; -use crate::core::ports::storage::storage_port::StorageResolverPort; use crate::core::services::errors::ServiceError; -use crate::documents::ports::document_repository::DocumentRepository; -use crate::documents::ports::files::files_repository::FilesRepository; -use crate::git::dtos::{ - GitChangeItem, GitCommitInfo, GitConfigDto, GitImportOutcome, GitPullConflictItemDto, - GitPullRequestDto, GitPullResolutionDto, GitPullResultDto, GitPullSessionDto, - GitRemoteCheckDto, GitStatusDto, GitSyncRequestDto, GitSyncResponseDto, GitignoreUpdateDto, - UpsertGitConfigInput, -}; -use crate::git::ports::git_pull_session_repository::GitPullSessionRepository; +use crate::git::dtos::{GitConfigDto, UpsertGitConfigInput}; use crate::git::ports::git_repository::GitRepository; -use crate::git::ports::git_workspace::GitWorkspacePort; -use crate::git::ports::gitignore_port::GitignorePort; use crate::git::use_cases::delete_config::DeleteGitConfig; -use crate::git::use_cases::get_changes::GetChanges; -use crate::git::use_cases::get_commit_diff::GetCommitDiff; use crate::git::use_cases::get_config::GetGitConfig; -use crate::git::use_cases::get_history::GetHistory; -use crate::git::use_cases::get_status::GetGitStatus; -use crate::git::use_cases::get_working_diff::GetWorkingDiff; -use crate::git::use_cases::gitignore_patterns::{ - AddGitignorePatterns, CheckPathIgnored, GetGitignorePatterns, -}; -use crate::git::use_cases::ignore_document::IgnoreDocument; -use crate::git::use_cases::ignore_folder::IgnoreFolder; -use crate::git::use_cases::init_repo::{DeinitRepo, InitRepo}; -use crate::git::use_cases::pull::PullRepository; -use crate::git::use_cases::sync_now::SyncNow; use crate::git::use_cases::upsert_config::UpsertGitConfig; use async_trait::async_trait; -use domain::git::pull_session::GitPullSessionStatus; -use tracing::warn; - -pub mod rebuild; -pub mod rebuild_scheduler; pub struct GitService { repo: Arc, - storage: Arc, - files: Arc, - docs: Arc, - gitignore: Arc, - workspace: Arc, - pull_sessions: Arc, -} - -pub struct FinalizePullSessionResult { - pub session: GitPullSessionDto, - pub git_status: Option, } #[async_trait] pub trait GitServiceFacade: Send + Sync { async fn get_config(&self, workspace_id: Uuid) -> Result, ServiceError>; - async fn check_remote( - &self, - workspace_id: Uuid, - ) -> Result, ServiceError>; async fn upsert_config( &self, workspace_id: Uuid, input: &UpsertGitConfigInput, ) -> Result; async fn delete_config(&self, workspace_id: Uuid) -> Result<(), ServiceError>; - async fn get_status(&self, workspace_id: Uuid) -> Result; - async fn sync_now( - &self, - workspace_id: Uuid, - payload: GitSyncRequestDto, - ) -> Result; - async fn init_repository(&self, workspace_id: Uuid) -> Result<(), ServiceError>; - async fn deinit_repository(&self, workspace_id: Uuid) -> Result<(), ServiceError>; - async fn get_changes(&self, workspace_id: Uuid) -> Result, ServiceError>; - async fn get_history(&self, workspace_id: Uuid) -> Result, ServiceError>; - async fn get_working_diff( - &self, - workspace_id: Uuid, - ) -> Result, ServiceError>; - async fn get_commit_diff( - &self, - workspace_id: Uuid, - from: &str, - to: &str, - ) -> Result, ServiceError>; - async fn import_repository( - &self, - workspace_id: Uuid, - actor_id: Uuid, - input: &UpsertGitConfigInput, - ) -> Result; - async fn ignore_document( - &self, - workspace_id: Uuid, - doc_id: Uuid, - ) -> Result; - async fn ignore_folder( - &self, - workspace_id: Uuid, - folder_id: Uuid, - ) -> Result; - async fn add_gitignore_patterns( - &self, - workspace_id: Uuid, - patterns: Vec, - ) -> Result; - async fn get_gitignore_patterns(&self, workspace_id: Uuid) - -> Result, ServiceError>; - async fn check_path_ignored( - &self, - workspace_id: Uuid, - path: &str, - ) -> Result; - async fn pull_repository( - &self, - workspace_id: Uuid, - actor_id: Uuid, - req: GitPullRequestDto, - ) -> Result; - async fn start_pull_session_flow( - &self, - workspace_id: Uuid, - actor_id: Uuid, - ) -> Result; - async fn load_pull_session_with_stale_check( - &self, - workspace_id: Uuid, - session_id: Uuid, - ) -> Result, ServiceError>; - async fn resolve_pull_session_flow( - &self, - workspace_id: Uuid, - actor_id: Uuid, - session_id: Uuid, - resolutions: Vec, - ) -> Result; - async fn finalize_pull_session_flow( - &self, - workspace_id: Uuid, - session_id: Uuid, - ) -> Result; } #[async_trait] @@ -152,13 +31,6 @@ impl GitServiceFacade for GitService { self.get_config(workspace_id).await } - async fn check_remote( - &self, - workspace_id: Uuid, - ) -> Result, ServiceError> { - self.check_remote(workspace_id).await - } - async fn upsert_config( &self, workspace_id: Uuid, @@ -170,166 +42,11 @@ impl GitServiceFacade for GitService { async fn delete_config(&self, workspace_id: Uuid) -> Result<(), ServiceError> { self.delete_config(workspace_id).await } - - async fn get_status(&self, workspace_id: Uuid) -> Result { - self.get_status(workspace_id).await - } - - async fn sync_now( - &self, - workspace_id: Uuid, - payload: GitSyncRequestDto, - ) -> Result { - self.sync_now(workspace_id, payload).await - } - - async fn init_repository(&self, workspace_id: Uuid) -> Result<(), ServiceError> { - self.init_repository(workspace_id).await - } - - async fn deinit_repository(&self, workspace_id: Uuid) -> Result<(), ServiceError> { - self.deinit_repository(workspace_id).await - } - - async fn get_changes(&self, workspace_id: Uuid) -> Result, ServiceError> { - self.get_changes(workspace_id).await - } - - async fn get_history(&self, workspace_id: Uuid) -> Result, ServiceError> { - self.get_history(workspace_id).await - } - - async fn get_working_diff( - &self, - workspace_id: Uuid, - ) -> Result, ServiceError> { - self.get_working_diff(workspace_id).await - } - - async fn get_commit_diff( - &self, - workspace_id: Uuid, - from: &str, - to: &str, - ) -> Result, ServiceError> { - self.get_commit_diff(workspace_id, from, to).await - } - - async fn import_repository( - &self, - workspace_id: Uuid, - actor_id: Uuid, - input: &UpsertGitConfigInput, - ) -> Result { - self.import_repository(workspace_id, actor_id, input).await - } - - async fn ignore_document( - &self, - workspace_id: Uuid, - doc_id: Uuid, - ) -> Result { - self.ignore_document(workspace_id, doc_id).await - } - - async fn ignore_folder( - &self, - workspace_id: Uuid, - folder_id: Uuid, - ) -> Result { - self.ignore_folder(workspace_id, folder_id).await - } - - async fn add_gitignore_patterns( - &self, - workspace_id: Uuid, - patterns: Vec, - ) -> Result { - self.add_gitignore_patterns(workspace_id, patterns).await - } - - async fn get_gitignore_patterns( - &self, - workspace_id: Uuid, - ) -> Result, ServiceError> { - self.get_gitignore_patterns(workspace_id).await - } - - async fn check_path_ignored( - &self, - workspace_id: Uuid, - path: &str, - ) -> Result { - self.check_path_ignored(workspace_id, path).await - } - - async fn pull_repository( - &self, - workspace_id: Uuid, - actor_id: Uuid, - req: GitPullRequestDto, - ) -> Result { - self.pull_repository(workspace_id, actor_id, req).await - } - - async fn start_pull_session_flow( - &self, - workspace_id: Uuid, - actor_id: Uuid, - ) -> Result { - self.start_pull_session_flow(workspace_id, actor_id).await - } - - async fn load_pull_session_with_stale_check( - &self, - workspace_id: Uuid, - session_id: Uuid, - ) -> Result, ServiceError> { - self.load_pull_session_with_stale_check(workspace_id, session_id) - .await - } - - async fn resolve_pull_session_flow( - &self, - workspace_id: Uuid, - actor_id: Uuid, - session_id: Uuid, - resolutions: Vec, - ) -> Result { - self.resolve_pull_session_flow(workspace_id, actor_id, session_id, resolutions) - .await - } - - async fn finalize_pull_session_flow( - &self, - workspace_id: Uuid, - session_id: Uuid, - ) -> Result { - self.finalize_pull_session_flow(workspace_id, session_id) - .await - } } impl GitService { - #[allow(clippy::too_many_arguments)] - pub fn new( - repo: Arc, - storage: Arc, - files: Arc, - docs: Arc, - gitignore: Arc, - workspace: Arc, - pull_sessions: Arc, - ) -> Self { - Self { - repo, - storage, - files, - docs, - gitignore, - workspace, - pull_sessions, - } + pub fn new(repo: Arc) -> Self { + Self { repo } } pub async fn get_config( @@ -342,26 +59,6 @@ impl GitService { uc.execute(workspace_id).await.map_err(ServiceError::from) } - pub async fn check_remote( - &self, - workspace_id: Uuid, - ) -> Result, ServiceError> { - let cfg = self - .repo - .load_user_git_cfg(workspace_id) - .await - .map_err(ServiceError::from)?; - let Some(cfg) = cfg else { - return Ok(None); - }; - let res = self - .workspace - .check_remote(workspace_id, &cfg) - .await - .map_err(ServiceError::from)?; - Ok(Some(res)) - } - pub async fn upsert_config( &self, workspace_id: Uuid, @@ -369,9 +66,6 @@ impl GitService { ) -> Result { let uc = UpsertGitConfig { repo: self.repo.as_ref(), - storage: self.storage.as_ref(), - gitignore: self.gitignore.as_ref(), - workspace: self.workspace.as_ref(), }; uc.execute(workspace_id, input) .await @@ -387,610 +81,4 @@ impl GitService { .map(|_| ()) .map_err(ServiceError::from) } - - pub async fn get_status(&self, workspace_id: Uuid) -> Result { - let uc = GetGitStatus { - repo: self.repo.as_ref(), - workspace: self.workspace.as_ref(), - }; - uc.execute(workspace_id).await.map_err(ServiceError::from) - } - - pub async fn sync_now( - &self, - workspace_id: Uuid, - payload: GitSyncRequestDto, - ) -> Result { - let uc = SyncNow { - workspace: self.workspace.as_ref(), - repo: self.repo.as_ref(), - }; - uc.execute(workspace_id, payload).await.map_err(|err| { - let msg_lower = err.to_string().to_lowercase(); - if msg_lower.contains("git_http_auth_redirect") - || msg_lower.contains("too many redirects") - || msg_lower.contains("http (34)") - { - ServiceError::BadRequest("git_auth_redirect") - } else if msg_lower.contains("git_http_not_found") - || msg_lower.contains("status code: 404") - { - ServiceError::BadRequest("git_repo_not_found") - } else if msg_lower.contains("notfastforward") - || msg_lower.contains("not fast forward") - || msg_lower.contains("non-fast-forward") - || msg_lower.contains("non fast forward") - || msg_lower.contains("cannot push because a reference") - || msg_lower.contains("failed to push some refs") - || msg_lower.contains("updates were rejected") - || msg_lower.contains("rejected") - { - ServiceError::Conflict - } else { - ServiceError::from(err) - } - }) - } - - pub async fn get_changes( - &self, - workspace_id: Uuid, - ) -> Result, ServiceError> { - let uc = GetChanges { - workspace: self.workspace.as_ref(), - }; - uc.execute(workspace_id).await.map_err(ServiceError::from) - } - - pub async fn get_history( - &self, - workspace_id: Uuid, - ) -> Result, ServiceError> { - let uc = GetHistory { - workspace: self.workspace.as_ref(), - }; - uc.execute(workspace_id).await.map_err(ServiceError::from) - } - - pub async fn get_working_diff( - &self, - workspace_id: Uuid, - ) -> Result, ServiceError> { - let uc = GetWorkingDiff { - workspace: self.workspace.as_ref(), - }; - uc.execute(workspace_id).await.map_err(ServiceError::from) - } - - pub async fn get_commit_diff( - &self, - workspace_id: Uuid, - from: &str, - to: &str, - ) -> Result, ServiceError> { - let uc = GetCommitDiff { - workspace: self.workspace.as_ref(), - }; - uc.execute(workspace_id, from.to_string(), to.to_string()) - .await - .map_err(ServiceError::from) - } - - pub async fn init_repository(&self, workspace_id: Uuid) -> Result<(), ServiceError> { - let uc = InitRepo { - repo: self.repo.as_ref(), - storage: self.storage.as_ref(), - gitignore: self.gitignore.as_ref(), - workspace: self.workspace.as_ref(), - }; - uc.execute(workspace_id).await.map_err(ServiceError::from) - } - - pub async fn deinit_repository(&self, workspace_id: Uuid) -> Result<(), ServiceError> { - let uc = DeinitRepo { - workspace: self.workspace.as_ref(), - }; - uc.execute(workspace_id).await.map_err(ServiceError::from)?; - self.repo - .delete_sync_logs(workspace_id) - .await - .map_err(ServiceError::from)?; - self.repo - .delete_repository_state(workspace_id) - .await - .map_err(ServiceError::from)?; - self.repo - .delete_config(workspace_id) - .await - .map(|_| ()) - .map_err(ServiceError::from) - } - - pub async fn import_repository( - &self, - workspace_id: Uuid, - actor_id: Uuid, - input: &UpsertGitConfigInput, - ) -> Result { - // Save configuration first - let _ = self.upsert_config(workspace_id, input).await?; - let cfg = self - .repo - .load_user_git_cfg(workspace_id) - .await - .map_err(ServiceError::from)? - .ok_or(ServiceError::BadRequest("git_not_configured"))?; - - self.workspace - .ensure_repository(workspace_id, &cfg.branch_name) - .await - .map_err(ServiceError::from)?; - - self.workspace - .import_repository(workspace_id, actor_id, &cfg) - .await - .map_err(|err| { - let msg = err.to_string().to_lowercase(); - if msg.contains("git_http_auth_redirect") || msg.contains("too many redirects") { - ServiceError::BadRequest("git_auth_redirect") - } else if msg.contains("git_http_not_found") || msg.contains("status code: 404") { - ServiceError::BadRequest("git_repo_not_found") - } else { - ServiceError::from(err) - } - }) - } - - pub async fn ignore_document( - &self, - workspace_id: Uuid, - doc_id: Uuid, - ) -> Result { - let uc = IgnoreDocument { - storage: self.storage.as_ref(), - files: self.files.as_ref(), - docs: self.docs.as_ref(), - gitignore: self.gitignore.as_ref(), - workspace: self.workspace.as_ref(), - }; - uc.execute(workspace_id, doc_id) - .await - .map(|res| GitignoreUpdateDto { - added: res.added, - patterns: res.patterns, - }) - .map_err(ServiceError::from) - } - - pub async fn ignore_folder( - &self, - workspace_id: Uuid, - folder_id: Uuid, - ) -> Result { - let uc = IgnoreFolder { - storage: self.storage.as_ref(), - files: self.files.as_ref(), - docs: self.docs.as_ref(), - gitignore: self.gitignore.as_ref(), - workspace: self.workspace.as_ref(), - }; - uc.execute(workspace_id, folder_id) - .await - .map(|res| GitignoreUpdateDto { - added: res.added, - patterns: res.patterns, - }) - .map_err(ServiceError::from) - } - - pub async fn add_gitignore_patterns( - &self, - workspace_id: Uuid, - patterns: Vec, - ) -> Result { - let uc = AddGitignorePatterns { - storage: self.storage.as_ref(), - gitignore: self.gitignore.as_ref(), - workspace: self.workspace.as_ref(), - }; - uc.execute(workspace_id, patterns) - .await - .map(|count| count as i64) - .map_err(ServiceError::from) - } - - pub async fn get_gitignore_patterns( - &self, - workspace_id: Uuid, - ) -> Result, ServiceError> { - let uc = GetGitignorePatterns { - storage: self.storage.as_ref(), - gitignore: self.gitignore.as_ref(), - }; - uc.execute(workspace_id).await.map_err(ServiceError::from) - } - - pub async fn check_path_ignored( - &self, - workspace_id: Uuid, - path: &str, - ) -> Result { - let uc = CheckPathIgnored { - gitignore: self.gitignore.as_ref(), - storage: self.storage.as_ref(), - }; - uc.execute(workspace_id, path) - .await - .map_err(ServiceError::from) - } - - pub async fn pull_repository( - &self, - workspace_id: Uuid, - actor_id: Uuid, - req: GitPullRequestDto, - ) -> Result { - let uc = PullRepository { - workspace: self.workspace.as_ref(), - repo: self.repo.as_ref(), - }; - let mut dto = uc - .execute(workspace_id, actor_id, req) - .await - .map_err(|err| { - let msg = err.to_string(); - if msg.contains("pending changes") { - ServiceError::BadRequest("workspace_has_pending_changes") - } else if msg.contains("not initialized") { - ServiceError::BadRequest("repository_not_initialized") - } else if msg.contains("remote not configured") - || msg.contains("git_not_configured") - { - ServiceError::BadRequest("remote_not_configured") - } else if msg.contains("custom_text content required") { - ServiceError::BadRequest("resolution_content_required") - } else { - ServiceError::from(err) - } - })?; - - if let Some(conflicts) = dto.conflicts.take() { - dto.conflicts = Some( - self.attach_conflict_documents(workspace_id, conflicts) - .await?, - ); - } - - Ok(dto) - } - - pub async fn start_pull_session_flow( - &self, - workspace_id: Uuid, - actor_id: Uuid, - ) -> Result { - let mut dto = self - .pull_repository( - workspace_id, - actor_id, - GitPullRequestDto { - resolutions: Vec::new(), - }, - ) - .await?; - let conflicts = dto.conflicts.clone().unwrap_or_default(); - let session_id = Uuid::new_v4(); - // Align recorded base commit with the current head so stale detection does not flag a - // successfully merged session. - if let Some(head) = self.workspace.head_commit(workspace_id).await? { - dto.base_commit = Some(head); - } - let status = if !dto.success && conflicts.is_empty() { - GitPullSessionStatus::Error - } else if conflicts.is_empty() { - GitPullSessionStatus::Merged - } else { - GitPullSessionStatus::Pending - }; - let session = GitPullSessionDto { - id: session_id, - workspace_id, - status, - conflicts, - resolutions: Vec::new(), - message: Some(dto.message.clone()), - base_commit: dto.base_commit, - remote_commit: dto.remote_commit, - }; - self.save_pull_session(session.clone()).await?; - Ok(session) - } - - pub async fn resolve_pull_session_flow( - &self, - workspace_id: Uuid, - actor_id: Uuid, - session_id: Uuid, - resolutions: Vec, - ) -> Result { - let existing = self - .load_pull_session(workspace_id, session_id) - .await? - .ok_or(ServiceError::NotFound)?; - if self.pull_session_is_stale(workspace_id, &existing).await? { - let mut stale = existing.clone(); - stale.status = GitPullSessionStatus::Stale; - stale.message = Some("Pull session is stale".to_string()); - let _ = self.save_pull_session(stale.clone()).await; - return Ok(stale); - } - - let dto = self - .pull_repository( - workspace_id, - actor_id, - GitPullRequestDto { - resolutions: resolutions.clone(), - }, - ) - .await?; - let conflicts = dto.conflicts.clone().unwrap_or_default(); - let status = if !dto.success && conflicts.is_empty() { - GitPullSessionStatus::Error - } else if conflicts.is_empty() { - GitPullSessionStatus::Merged - } else { - GitPullSessionStatus::Resolving - }; - // When the pull completed (no conflicts), record the latest head as the session base so - // subsequent finalize calls don't treat the session as stale. - let mut base_commit = dto.base_commit.clone(); - if conflicts.is_empty() - && let Some(head) = self.workspace.head_commit(workspace_id).await? - { - base_commit = Some(head); - } - let session = GitPullSessionDto { - id: session_id, - workspace_id, - status, - conflicts, - resolutions, - message: Some(dto.message.clone()), - base_commit, - remote_commit: dto.remote_commit, - }; - self.save_pull_session(session.clone()).await?; - Ok(session) - } - - pub async fn finalize_pull_session_flow( - &self, - workspace_id: Uuid, - session_id: Uuid, - ) -> Result { - let existing = self - .load_pull_session(workspace_id, session_id) - .await? - .ok_or(ServiceError::NotFound)?; - if existing.status == GitPullSessionStatus::Merged { - let git_status = self.get_status(workspace_id).await?; - return Ok(FinalizePullSessionResult { - session: existing, - git_status: Some(git_status), - }); - } - if existing.status == GitPullSessionStatus::Stale { - let mut stale = existing.clone(); - if stale.message.is_none() { - stale.message = Some("Pull session is stale".to_string()); - let _ = self.save_pull_session(stale.clone()).await; - } - return Ok(FinalizePullSessionResult { - session: stale, - git_status: None, - }); - } - if existing.status == GitPullSessionStatus::Error { - return Ok(FinalizePullSessionResult { - session: existing, - git_status: None, - }); - } - if existing.status.is_in_progress() - && self.pull_session_is_stale(workspace_id, &existing).await? - { - let mut stale = existing.clone(); - stale.status = GitPullSessionStatus::Stale; - if stale.message.is_none() { - stale.message = Some("Pull session is stale".to_string()); - } - let _ = self.save_pull_session(stale.clone()).await; - return Ok(FinalizePullSessionResult { - session: stale, - git_status: None, - }); - } - if !existing.conflicts.is_empty() { - return Ok(FinalizePullSessionResult { - session: existing, - git_status: None, - }); - } - let git_status = self.get_status(workspace_id).await?; - let merged = GitPullSessionDto { - id: session_id, - workspace_id, - status: GitPullSessionStatus::Merged, - conflicts: Vec::new(), - resolutions: existing.resolutions.clone(), - message: Some("merge completed".to_string()), - base_commit: existing.base_commit.clone(), - remote_commit: existing.remote_commit.clone(), - }; - let _ = self.save_pull_session(merged.clone()).await; - Ok(FinalizePullSessionResult { - session: merged, - git_status: Some(git_status), - }) - } - - pub async fn load_pull_session_with_stale_check( - &self, - workspace_id: Uuid, - id: Uuid, - ) -> Result, ServiceError> { - let mut session = match self.load_pull_session(workspace_id, id).await? { - Some(s) => s, - None => return Ok(None), - }; - if session.status.is_in_progress() - && self.pull_session_is_stale(workspace_id, &session).await? - { - session.status = GitPullSessionStatus::Stale; - session.message = Some("Pull session is stale".to_string()); - let _ = self.save_pull_session(session.clone()).await; - } - Ok(Some(session)) - } - - async fn attach_conflict_documents( - &self, - workspace_id: Uuid, - conflicts: Vec, - ) -> Result, ServiceError> { - let mut out = Vec::with_capacity(conflicts.len()); - let docs = self - .docs - .list_workspace_documents(workspace_id) - .await - .map_err(ServiceError::from)?; - - let normalize = |path: &str| { - path.trim_start_matches("./") - .trim_start_matches('/') - .to_string() - }; - - for mut conflict in conflicts { - if conflict.document_id.is_some() { - out.push(conflict); - continue; - } - let candidate = normalize(&conflict.path); - - let mut matched = None; - for doc in docs.iter() { - let mut paths: Vec = Vec::new(); - if let Some(p) = doc.path() { - let norm = normalize(p); - if !norm.is_empty() { - paths.push(norm); - } - } - let desired = normalize(doc.desired_path().as_str()); - if !desired.is_empty() { - paths.push(desired); - } - - if paths.iter().any(|p| { - candidate == *p - || candidate.ends_with(&format!("/{p}")) - || p.ends_with(&candidate) - }) { - matched = Some(doc.id()); - break; - } - } - - conflict.document_id = matched; - if let Some(doc_id) = matched - && let Some(doc) = docs.iter().find(|d| d.id() == doc_id) - { - conflict.path = doc.desired_path().as_str().to_string(); - } - out.push(conflict); - } - - Ok(out) - } - - pub async fn save_pull_session(&self, session: GitPullSessionDto) -> Result<(), ServiceError> { - self.pull_sessions - .upsert(session) - .await - .map_err(ServiceError::from) - } - - pub async fn load_pull_session( - &self, - workspace_id: Uuid, - id: Uuid, - ) -> Result, ServiceError> { - self.pull_sessions - .get(workspace_id, id) - .await - .map_err(ServiceError::from) - } - - pub async fn pull_session_is_stale( - &self, - workspace_id: Uuid, - session: &GitPullSessionDto, - ) -> Result { - let cfg = self - .repo - .load_user_git_cfg(workspace_id) - .await - .map_err(ServiceError::from)?; - let Some(cfg) = cfg else { - return Err(ServiceError::BadRequest("remote_not_configured")); - }; - - if let Some(saved_base) = session.base_commit.as_ref() { - let current_head = self - .workspace - .head_commit(workspace_id) - .await - .map_err(ServiceError::from)?; - match current_head { - Some(head) if saved_base == &head => {} - Some(head) - if session - .remote_commit - .as_ref() - .is_some_and(|remote| remote == &head) => {} - Some(_) | None => return Ok(true), - } - } - - if let Some(saved_remote) = session.remote_commit.as_ref() { - match self.workspace.remote_head(workspace_id, &cfg).await { - Ok(Some(current_remote)) => { - if saved_remote != ¤t_remote { - return Ok(true); - } - } - Ok(None) => return Ok(true), - Err(err) => { - let msg = err.to_string(); - let mapped = if msg.contains("not initialized") { - ServiceError::BadRequest("repository_not_initialized") - } else if msg.contains("remote not configured") { - ServiceError::BadRequest("remote_not_configured") - } else { - ServiceError::from(err) - }; - warn!( - workspace_id = %workspace_id, - error = %msg, - "git_pull_remote_head_unavailable" - ); - return Err(mapped); - } - } - } - - Ok(false) - } } diff --git a/api/crates/application/src/git/services/rebuild.rs b/api/crates/application/src/git/services/rebuild.rs deleted file mode 100644 index 4b5d8147..00000000 --- a/api/crates/application/src/git/services/rebuild.rs +++ /dev/null @@ -1,630 +0,0 @@ -use std::sync::Arc; -use std::time::Duration; - -use tracing::{error, info, warn}; -#[cfg(test)] -use uuid::Uuid; - -use crate::core::services::metrics::MetricsRegistry; -use crate::core::services::worker::WorkerTick; -use crate::git::dtos::GitSyncRequestDto; -use crate::git::ports::git_rebuild_job_queue::{GitRebuildJob, GitRebuildJobQueue}; -use crate::git::ports::git_repository::GitRepository; -use crate::git::ports::git_workspace::GitWorkspacePort; -use crate::git::use_cases::helpers::needs_force_retry; -use crate::workspaces::services::WorkspacePermissionResolver; -use crate::workspaces::services::permission_snapshot::permission_set_from_snapshot; -use domain::access::permissions::{PERM_GIT_SYNC, PermissionSet}; -use domain::git::policy; -use domain::git::sync_log::{GitSyncOperation, GitSyncStatus}; - -pub struct GitRebuildService { - jobs: Arc, - workspace: Arc, - git_repo: Arc, - metrics: Arc, - permission_resolver: Arc, - idle_backoff: Duration, - lock_timeout_secs: i64, - max_attempts: i32, -} - -impl GitRebuildService { - pub fn new( - jobs: Arc, - workspace: Arc, - git_repo: Arc, - metrics: Arc, - permission_resolver: Arc, - ) -> Self { - Self { - jobs, - workspace, - git_repo, - metrics, - permission_resolver, - idle_backoff: Duration::from_secs(1), - lock_timeout_secs: 30, - max_attempts: 5, - } - } - - pub fn with_idle_backoff(mut self, backoff: Duration) -> Self { - self.idle_backoff = backoff; - self - } - - pub fn with_lock_timeout(mut self, secs: i64) -> Self { - self.lock_timeout_secs = secs.max(1); - self - } - - pub fn with_max_attempts(mut self, attempts: i32) -> Self { - self.max_attempts = attempts.max(1); - self - } - - pub async fn tick(&self) -> anyhow::Result { - match self.jobs.fetch_next(self.lock_timeout_secs).await { - Ok(Some(job)) => { - if let Err(err) = self.process_job(&job).await { - error!(error = ?err, job_id = job.id, "git_rebuild_job_failed"); - } - Ok(WorkerTick::Processed) - } - Ok(None) => Ok(WorkerTick::Idle), - Err(err) => Err(err.into()), - } - } - - async fn process_job(&self, job: &GitRebuildJob) -> anyhow::Result<()> { - let permissions = self.permissions_for_job(job).await; - if policy::ensure_git_sync_allowed(&permissions).is_err() { - warn!( - workspace_id = %job.workspace_id, - "git_rebuild_missing_permission" - ); - self.jobs.complete(job.id).await?; - return Ok(()); - } - let status = self.workspace.status(job.workspace_id).await?; - if !status.repository_initialized { - self.jobs.complete(job.id).await?; - info!( - workspace_id = %job.workspace_id, - "git_rebuild_job_skipped_for_uninitialized_repo" - ); - return Ok(()); - } - let cfg = self.git_repo.load_user_git_cfg(job.workspace_id).await?; - let mut req = GitSyncRequestDto { - message: Some("Automated Git rebuild".to_string()), - force: Some(false), - full_scan: Some(true), - skip_push: Some(true), - }; - let outcome = match self - .workspace - .sync(job.workspace_id, &req, cfg.as_ref()) - .await - { - Ok(outcome) => outcome, - Err(err) => { - if !req.force.unwrap_or(false) && needs_force_retry(&err) { - warn!( - workspace_id = %job.workspace_id, - "git_rebuild_retrying_with_force" - ); - req.force = Some(true); - match self - .workspace - .sync(job.workspace_id, &req, cfg.as_ref()) - .await - { - Ok(outcome) => outcome, - Err(err) => return self.on_job_error(job, err.into()).await, - } - } else { - return self.on_job_error(job, err.into()).await; - } - } - }; - - self.jobs.complete(job.id).await?; - self.metrics.inc_git_rebuild_success(); - info!( - workspace_id = %job.workspace_id, - files = outcome.files_changed, - "git_rebuild_job_completed" - ); - if let Err(err) = self - .git_repo - .log_sync_operation( - job.workspace_id, - GitSyncOperation::Commit, - GitSyncStatus::Success, - Some(&outcome.message), - outcome.commit_hash.as_deref(), - ) - .await - { - warn!( - error = ?err, - workspace_id = %job.workspace_id, - "git_rebuild_log_failed" - ); - } - Ok(()) - } - - async fn on_job_error(&self, job: &GitRebuildJob, err: anyhow::Error) -> anyhow::Result<()> { - let msg = format!("{err:#}"); - if job.attempts >= self.max_attempts { - self.jobs.complete(job.id).await?; - self.metrics.inc_git_rebuild_failure(); - warn!( - error = ?err, - workspace_id = %job.workspace_id, - attempts = job.attempts, - "git_rebuild_job_gave_up" - ); - if let Err(log_err) = self - .git_repo - .log_sync_operation( - job.workspace_id, - GitSyncOperation::Commit, - GitSyncStatus::Error, - Some(&msg), - None, - ) - .await - { - warn!( - error = ?log_err, - workspace_id = %job.workspace_id, - "git_rebuild_log_failed" - ); - } - } else { - self.jobs.fail(job.id, &msg).await?; - self.metrics.inc_git_rebuild_retry(); - warn!( - error = ?err, - workspace_id = %job.workspace_id, - "git_rebuild_job_retrying" - ); - } - Ok(()) - } - - async fn permissions_for_job(&self, job: &GitRebuildJob) -> PermissionSet { - let set = permission_set_from_snapshot(&job.permission_snapshot); - if !set.is_empty() { - return set; - } - if let Some(actor_id) = job.actor_id { - match self - .permission_resolver - .load_permission_set(job.workspace_id, actor_id) - .await - { - Ok(Some(resolved)) => { - info!( - workspace_id = %job.workspace_id, - actor_id = %actor_id, - "git_rebuild_permissions_rehydrated" - ); - resolved - } - Ok(None) => { - warn!( - workspace_id = %job.workspace_id, - actor_id = %actor_id, - "git_rebuild_member_missing_for_permissions" - ); - PermissionSet::from_slice(&[PERM_GIT_SYNC]) - } - Err(err) => { - warn!( - error = ?err, - workspace_id = %job.workspace_id, - actor_id = %actor_id, - "git_rebuild_permission_resolve_failed" - ); - PermissionSet::from_slice(&[PERM_GIT_SYNC]) - } - } - } else { - PermissionSet::from_slice(&[PERM_GIT_SYNC]) - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - use async_trait::async_trait; - use std::collections::VecDeque; - use std::sync::Mutex; - - use crate::core::ports::errors::PortResult; - use crate::core::services::errors::ServiceError; - - struct RecordingWorkspace { - outcomes: Mutex>, - failures: Mutex>, - } - - impl RecordingWorkspace { - fn new() -> Self { - Self { - outcomes: Mutex::new(Vec::new()), - failures: Mutex::new(VecDeque::new()), - } - } - - fn fail_with(&self, err: anyhow::Error) { - self.failures.lock().unwrap().push_back(err); - } - - fn requests(&self) -> Vec { - self.outcomes.lock().unwrap().clone() - } - } - - #[async_trait] - impl GitWorkspacePort for RecordingWorkspace { - async fn ensure_repository( - &self, - _workspace_id: Uuid, - _default_branch: &str, - ) -> PortResult<()> { - unimplemented!() - } - - async fn remove_repository(&self, _workspace_id: Uuid) -> PortResult<()> { - unimplemented!() - } - - async fn status( - &self, - _workspace_id: Uuid, - ) -> PortResult { - Ok(crate::git::dtos::GitWorkspaceStatus { - repository_initialized: true, - current_branch: Some("main".into()), - uncommitted_changes: 0, - untracked_files: 0, - }) - } - - async fn list_changes( - &self, - _workspace_id: Uuid, - ) -> PortResult> { - unimplemented!() - } - - async fn working_diff( - &self, - _workspace_id: Uuid, - ) -> PortResult> { - unimplemented!() - } - - async fn commit_diff( - &self, - _workspace_id: Uuid, - _from: &str, - _to: &str, - ) -> PortResult> { - unimplemented!() - } - - async fn history( - &self, - _workspace_id: Uuid, - ) -> PortResult> { - unimplemented!() - } - - async fn sync( - &self, - _workspace_id: Uuid, - req: &GitSyncRequestDto, - _cfg: Option<&crate::git::ports::git_repository::UserGitCfg>, - ) -> PortResult { - self.outcomes.lock().unwrap().push(req.clone()); - if let Some(err) = self.failures.lock().unwrap().pop_front() { - Err(err.into()) - } else { - Ok(crate::git::dtos::GitSyncOutcome { - files_changed: 1, - commit_hash: Some("abc123".into()), - pushed: false, - message: "ok".into(), - }) - } - } - - async fn import_repository( - &self, - _workspace_id: Uuid, - _actor_id: Uuid, - _cfg: &crate::git::ports::git_repository::UserGitCfg, - ) -> PortResult { - Ok(crate::git::dtos::GitImportOutcome { - files_changed: 0, - commit_hash: None, - docs_created: 0, - attachments_created: 0, - message: "not implemented".to_string(), - }) - } - - async fn pull( - &self, - _workspace_id: Uuid, - _actor_id: Uuid, - _req: &crate::git::dtos::GitPullRequestDto, - _cfg: &crate::git::ports::git_repository::UserGitCfg, - ) -> PortResult { - Ok(crate::git::dtos::GitPullResultDto { - success: true, - message: "ok".to_string(), - files_changed: 0, - commit_hash: None, - conflicts: None, - base_commit: None, - remote_commit: None, - }) - } - - async fn check_remote( - &self, - _workspace_id: Uuid, - _cfg: &crate::git::ports::git_repository::UserGitCfg, - ) -> PortResult { - Ok(crate::git::dtos::GitRemoteCheckDto { - ok: true, - message: "ok".into(), - reason: None, - }) - } - - async fn head_commit(&self, _workspace_id: Uuid) -> PortResult>> { - Ok(None) - } - - async fn remote_head( - &self, - _workspace_id: Uuid, - _cfg: &crate::git::ports::git_repository::UserGitCfg, - ) -> PortResult>> { - Ok(None) - } - - async fn has_pending_changes(&self, _workspace_id: Uuid) -> PortResult { - Ok(false) - } - - async fn drift_since_commit( - &self, - _workspace_id: Uuid, - _base_commit: &[u8], - ) -> PortResult { - Ok(false) - } - } - - struct RecordingJobQueue { - complete: Mutex>, - failed: Mutex>, - } - - impl RecordingJobQueue { - fn new() -> Self { - Self { - complete: Mutex::new(Vec::new()), - failed: Mutex::new(Vec::new()), - } - } - } - - #[async_trait] - impl GitRebuildJobQueue for RecordingJobQueue { - async fn enqueue( - &self, - _workspace_id: Uuid, - _actor_id: Option, - _permission_snapshot: &[String], - ) -> PortResult<()> { - Ok(()) - } - - async fn fetch_next(&self, _lock_timeout_secs: i64) -> PortResult> { - Ok(None) - } - - async fn complete(&self, job_id: i64) -> PortResult<()> { - self.complete.lock().unwrap().push(job_id); - Ok(()) - } - - async fn fail(&self, job_id: i64, _error: &str) -> PortResult<()> { - self.failed.lock().unwrap().push(job_id); - Ok(()) - } - } - - struct RecordingGitRepo { - last_status: Mutex>, - } - - impl RecordingGitRepo { - fn new() -> Self { - Self { - last_status: Mutex::new(None), - } - } - } - - #[async_trait] - impl GitRepository for RecordingGitRepo { - async fn get_config( - &self, - _user_id: Uuid, - ) -> PortResult> { - unimplemented!() - } - - async fn upsert_config( - &self, - _user_id: Uuid, - _repository_url: &str, - _branch_name: Option<&str>, - _auth_type: domain::git::auth::GitAuthType, - _auth_data: &serde_json::Value, - _auto_sync: Option, - ) -> PortResult { - unimplemented!() - } - - async fn delete_config(&self, _user_id: Uuid) -> PortResult { - unimplemented!() - } - - async fn load_user_git_cfg( - &self, - _user_id: Uuid, - ) -> PortResult> { - Ok(None) - } - - async fn get_last_sync_log( - &self, - _user_id: Uuid, - ) -> PortResult> { - Ok(None) - } - - async fn log_sync_operation( - &self, - _workspace_id: Uuid, - _operation: domain::git::sync_log::GitSyncOperation, - status: domain::git::sync_log::GitSyncStatus, - _message: Option<&str>, - _commit_hash: Option<&str>, - ) -> PortResult<()> { - *self.last_status.lock().unwrap() = Some(status.as_str().to_string()); - Ok(()) - } - - async fn delete_sync_logs(&self, _workspace_id: Uuid) -> PortResult<()> { - Ok(()) - } - - async fn delete_repository_state(&self, _workspace_id: Uuid) -> PortResult<()> { - Ok(()) - } - - async fn list_auto_sync_workspaces(&self) -> PortResult> { - Ok(Vec::new()) - } - } - - struct AllowAllPermissions; - - #[async_trait] - impl WorkspacePermissionResolver for AllowAllPermissions { - async fn load_permission_set( - &self, - _workspace_id: Uuid, - _user_id: Uuid, - ) -> Result, ServiceError> { - Ok(Some(PermissionSet::all())) - } - } - - #[tokio::test] - async fn successful_job_updates_metrics() { - let queue = Arc::new(RecordingJobQueue::new()); - let workspace = Arc::new(RecordingWorkspace::new()); - let git_repo = Arc::new(RecordingGitRepo::new()); - let metrics = Arc::new(MetricsRegistry::default()); - let svc = GitRebuildService::new( - queue.clone(), - workspace, - git_repo, - metrics.clone(), - Arc::new(AllowAllPermissions), - ); - let job = GitRebuildJob { - id: 1, - workspace_id: Uuid::new_v4(), - actor_id: None, - attempts: 1, - permission_snapshot: vec!["git:sync".into()], - }; - svc.process_job(&job).await.unwrap(); - assert_eq!(queue.complete.lock().unwrap().as_slice(), &[1]); - assert_eq!(metrics.snapshot().git_rebuild_success, 1); - } - - #[tokio::test] - async fn failing_job_retries_and_counts_metrics() { - let queue = Arc::new(RecordingJobQueue::new()); - let workspace = Arc::new(RecordingWorkspace::new()); - workspace.fail_with(anyhow::anyhow!("broken")); - let git_repo = Arc::new(RecordingGitRepo::new()); - let metrics = Arc::new(MetricsRegistry::default()); - let svc = GitRebuildService::new( - queue.clone(), - workspace, - git_repo, - metrics.clone(), - Arc::new(AllowAllPermissions), - ); - let job = GitRebuildJob { - id: 2, - workspace_id: Uuid::new_v4(), - actor_id: None, - attempts: 0, - permission_snapshot: vec!["git:sync".into()], - }; - svc.process_job(&job).await.unwrap(); - assert_eq!(queue.failed.lock().unwrap().as_slice(), &[2]); - assert_eq!(metrics.snapshot().git_rebuild_retry, 1); - } - - #[tokio::test] - async fn forced_retry_failure_routes_through_error_handler() { - let queue = Arc::new(RecordingJobQueue::new()); - let workspace = Arc::new(RecordingWorkspace::new()); - workspace.fail_with(anyhow::anyhow!("non-fast-forward push rejected")); - workspace.fail_with(anyhow::anyhow!("still broken")); - let git_repo = Arc::new(RecordingGitRepo::new()); - let metrics = Arc::new(MetricsRegistry::default()); - let svc = GitRebuildService::new( - queue.clone(), - workspace.clone(), - git_repo, - metrics.clone(), - Arc::new(AllowAllPermissions), - ); - let job = GitRebuildJob { - id: 3, - workspace_id: Uuid::new_v4(), - actor_id: None, - attempts: 0, - permission_snapshot: vec!["git:sync".into()], - }; - svc.process_job(&job).await.unwrap(); - assert_eq!(queue.failed.lock().unwrap().as_slice(), &[3]); - assert_eq!(metrics.snapshot().git_rebuild_retry, 1); - let requests = workspace.requests(); - assert_eq!(requests.len(), 2); - assert_eq!(requests[0].force, Some(false)); - assert_eq!(requests[1].force, Some(true)); - } -} diff --git a/api/crates/application/src/git/services/rebuild_scheduler.rs b/api/crates/application/src/git/services/rebuild_scheduler.rs deleted file mode 100644 index aa4a2819..00000000 --- a/api/crates/application/src/git/services/rebuild_scheduler.rs +++ /dev/null @@ -1,66 +0,0 @@ -use std::sync::Arc; - -use tracing::{debug, error, info}; -use uuid::Uuid; - -use crate::git::ports::git_rebuild_job_queue::GitRebuildJobQueue; -use crate::git::ports::git_repository::GitRepository; -use crate::git::ports::git_workspace::GitWorkspacePort; -use domain::access::permissions::{ - PERM_GIT_CONFIGURE, PERM_GIT_INIT, PERM_GIT_SYNC, PermissionSet, -}; - -const GIT_BACKGROUND_PERMISSIONS: &[&str] = &[PERM_GIT_SYNC, PERM_GIT_CONFIGURE, PERM_GIT_INIT]; - -pub struct GitRebuildScheduler { - jobs: Arc, - git_repo: Arc, - workspace: Arc, -} - -impl GitRebuildScheduler { - pub fn new( - jobs: Arc, - git_repo: Arc, - workspace: Arc, - ) -> Self { - Self { - jobs, - git_repo, - workspace, - } - } - - pub async fn tick(&self) { - match self.git_repo.list_auto_sync_workspaces().await { - Ok(ids) => { - for workspace_id in ids { - if let Err(err) = self.enqueue_job_if_ready(workspace_id).await { - error!( - error = ?err, - workspace_id = %workspace_id, - "git_rebuild_enqueue_failed" - ); - } - } - } - Err(err) => error!(error = ?err, "git_rebuild_scheduler_workspace_list_failed"), - } - } - - async fn enqueue_job_if_ready(&self, workspace_id: Uuid) -> anyhow::Result<()> { - let status = self.workspace.status(workspace_id).await?; - if !status.repository_initialized { - debug!(workspace_id = %workspace_id, "git_rebuild_skip_uninitialized"); - return Ok(()); - } - self.enqueue_job(workspace_id).await - } - - async fn enqueue_job(&self, workspace_id: Uuid) -> anyhow::Result<()> { - let permissions = PermissionSet::from_slice(GIT_BACKGROUND_PERMISSIONS).to_vec(); - self.jobs.enqueue(workspace_id, None, &permissions).await?; - info!(workspace_id = %workspace_id, "git_rebuild_job_enqueued"); - Ok(()) - } -} diff --git a/api/crates/application/src/git/use_cases/get_changes.rs b/api/crates/application/src/git/use_cases/get_changes.rs deleted file mode 100644 index a0ac67e2..00000000 --- a/api/crates/application/src/git/use_cases/get_changes.rs +++ /dev/null @@ -1,16 +0,0 @@ -use crate::git::dtos::GitChangeItem; -use crate::git::ports::git_workspace::GitWorkspacePort; -use uuid::Uuid; - -pub struct GetChanges<'a, W: GitWorkspacePort + ?Sized> { - pub workspace: &'a W, -} - -impl<'a, W: GitWorkspacePort + ?Sized> GetChanges<'a, W> { - pub async fn execute(&self, workspace_id: Uuid) -> anyhow::Result> { - self.workspace - .list_changes(workspace_id) - .await - .map_err(Into::into) - } -} diff --git a/api/crates/application/src/git/use_cases/get_commit_diff.rs b/api/crates/application/src/git/use_cases/get_commit_diff.rs deleted file mode 100644 index 0803e04a..00000000 --- a/api/crates/application/src/git/use_cases/get_commit_diff.rs +++ /dev/null @@ -1,21 +0,0 @@ -use crate::core::dtos::TextDiffResult; -use crate::git::ports::git_workspace::GitWorkspacePort; -use uuid::Uuid; - -pub struct GetCommitDiff<'a, W: GitWorkspacePort + ?Sized> { - pub workspace: &'a W, -} - -impl<'a, W: GitWorkspacePort + ?Sized> GetCommitDiff<'a, W> { - pub async fn execute( - &self, - workspace_id: Uuid, - from: String, - to: String, - ) -> anyhow::Result> { - self.workspace - .commit_diff(workspace_id, &from, &to) - .await - .map_err(Into::into) - } -} diff --git a/api/crates/application/src/git/use_cases/get_config.rs b/api/crates/application/src/git/use_cases/get_config.rs index aa30dc26..32d23804 100644 --- a/api/crates/application/src/git/use_cases/get_config.rs +++ b/api/crates/application/src/git/use_cases/get_config.rs @@ -12,14 +12,26 @@ impl<'a, R: GitRepository + ?Sized> GetGitConfig<'a, R> { .repo .get_config(workspace_id) .await? - .map(|record| GitConfigDto { - id: record.id, - repository_url: record.repository_url, - branch_name: record.branch_name, - auth_type: record.auth_type.as_str().to_string(), - auto_sync: record.auto_sync, - created_at: record.created_at, - updated_at: record.updated_at, + .map(|record| { + // Return encrypted_auth_data only for E2EE (when e2ee flag is present) + let encrypted_auth_data = record.auth_data.as_ref().and_then(|data| { + if data.get("e2ee").and_then(|v| v.as_bool()).unwrap_or(false) { + Some(data.clone()) + } else { + None + } + }); + + GitConfigDto { + id: record.id, + repository_url: record.repository_url, + branch_name: record.branch_name, + auth_type: record.auth_type.as_str().to_string(), + auto_sync: record.auto_sync, + created_at: record.created_at, + updated_at: record.updated_at, + encrypted_auth_data, + } })) } } diff --git a/api/crates/application/src/git/use_cases/get_history.rs b/api/crates/application/src/git/use_cases/get_history.rs deleted file mode 100644 index 7c5c11ee..00000000 --- a/api/crates/application/src/git/use_cases/get_history.rs +++ /dev/null @@ -1,16 +0,0 @@ -use crate::git::dtos::GitCommitInfo; -use crate::git::ports::git_workspace::GitWorkspacePort; -use uuid::Uuid; - -pub struct GetHistory<'a, W: GitWorkspacePort + ?Sized> { - pub workspace: &'a W, -} - -impl<'a, W: GitWorkspacePort + ?Sized> GetHistory<'a, W> { - pub async fn execute(&self, workspace_id: Uuid) -> anyhow::Result> { - self.workspace - .history(workspace_id) - .await - .map_err(Into::into) - } -} diff --git a/api/crates/application/src/git/use_cases/get_status.rs b/api/crates/application/src/git/use_cases/get_status.rs deleted file mode 100644 index 03f295bb..00000000 --- a/api/crates/application/src/git/use_cases/get_status.rs +++ /dev/null @@ -1,57 +0,0 @@ -use crate::git::dtos::{GitStatusDto, GitWorkspaceStatus}; -use crate::git::ports::git_repository::GitRepository; -use crate::git::ports::git_workspace::GitWorkspacePort; -use uuid::Uuid; - -pub struct GetGitStatus<'a, R, W> -where - R: GitRepository + ?Sized, - W: GitWorkspacePort + ?Sized, -{ - pub repo: &'a R, - pub workspace: &'a W, -} - -impl<'a, R, W> GetGitStatus<'a, R, W> -where - R: GitRepository + ?Sized, - W: GitWorkspacePort + ?Sized, -{ - pub async fn execute(&self, workspace_id: Uuid) -> anyhow::Result { - let cfg_row = self.repo.get_config(workspace_id).await?; - let (repository_url, auto_sync) = cfg_row - .map(|cfg| (cfg.repository_url, cfg.auto_sync)) - .unwrap_or((String::new(), false)); - - let GitWorkspaceStatus { - repository_initialized, - current_branch, - uncommitted_changes, - untracked_files, - } = self.workspace.status(workspace_id).await?; - - let last = self.repo.get_last_sync_log(workspace_id).await?; - let (last_sync, last_sync_status, last_sync_message, last_sync_commit_hash) = match last { - Some(log) => ( - log.created_at, - log.status.map(|s| s.as_str().to_string()), - log.message, - log.commit_hash, - ), - None => (None, None, None, None), - }; - - Ok(GitStatusDto { - repository_initialized, - has_remote: !repository_url.is_empty(), - current_branch, - uncommitted_changes, - untracked_files, - last_sync, - last_sync_status, - last_sync_message, - last_sync_commit_hash, - sync_enabled: auto_sync, - }) - } -} diff --git a/api/crates/application/src/git/use_cases/get_working_diff.rs b/api/crates/application/src/git/use_cases/get_working_diff.rs deleted file mode 100644 index 200a0e37..00000000 --- a/api/crates/application/src/git/use_cases/get_working_diff.rs +++ /dev/null @@ -1,16 +0,0 @@ -use crate::core::dtos::TextDiffResult; -use crate::git::ports::git_workspace::GitWorkspacePort; -use uuid::Uuid; - -pub struct GetWorkingDiff<'a, W: GitWorkspacePort + ?Sized> { - pub workspace: &'a W, -} - -impl<'a, W: GitWorkspacePort + ?Sized> GetWorkingDiff<'a, W> { - pub async fn execute(&self, workspace_id: Uuid) -> anyhow::Result> { - self.workspace - .working_diff(workspace_id) - .await - .map_err(Into::into) - } -} diff --git a/api/crates/application/src/git/use_cases/gitignore_patterns.rs b/api/crates/application/src/git/use_cases/gitignore_patterns.rs deleted file mode 100644 index 213c6490..00000000 --- a/api/crates/application/src/git/use_cases/gitignore_patterns.rs +++ /dev/null @@ -1,86 +0,0 @@ -use crate::core::ports::storage::storage_port::StorageResolverPort; -use crate::git::ports::git_workspace::GitWorkspacePort; -use crate::git::ports::gitignore_port::GitignorePort; - -pub struct GetGitignorePatterns<'a, G, S> -where - G: GitignorePort + ?Sized, - S: StorageResolverPort + ?Sized, -{ - pub storage: &'a S, - pub gitignore: &'a G, -} - -impl<'a, G, S> GetGitignorePatterns<'a, G, S> -where - G: GitignorePort + ?Sized, - S: StorageResolverPort + ?Sized, -{ - pub async fn execute(&self, workspace_id: uuid::Uuid) -> anyhow::Result> { - let dir = self.storage.user_repo_dir(workspace_id); - let patterns = self.gitignore.read_gitignore_patterns(&dir).await?; - Ok(patterns) - } -} - -pub struct AddGitignorePatterns<'a, G, S, W> -where - G: GitignorePort + ?Sized, - S: StorageResolverPort + ?Sized, - W: GitWorkspacePort + ?Sized, -{ - pub storage: &'a S, - pub gitignore: &'a G, - pub workspace: &'a W, -} - -impl<'a, G, S, W> AddGitignorePatterns<'a, G, S, W> -where - G: GitignorePort + ?Sized, - S: StorageResolverPort + ?Sized, - W: GitWorkspacePort + ?Sized, -{ - pub async fn execute( - &self, - workspace_id: uuid::Uuid, - patterns: Vec, - ) -> anyhow::Result { - self.workspace - .ensure_repository(workspace_id, "main") - .await?; - let dir = self.storage.user_repo_dir(workspace_id); - let _ = self.gitignore.ensure_gitignore(&dir).await?; - let added = self - .gitignore - .upsert_gitignore_patterns(&dir, &patterns) - .await?; - Ok(added) - } -} - -pub struct CheckPathIgnored<'a, G: GitignorePort + ?Sized, S: StorageResolverPort + ?Sized> { - pub gitignore: &'a G, - pub storage: &'a S, -} - -impl<'a, G: GitignorePort + ?Sized, S: StorageResolverPort + ?Sized> CheckPathIgnored<'a, G, S> { - pub async fn execute(&self, workspace_id: uuid::Uuid, rel_path: &str) -> anyhow::Result { - let dir = self.storage.user_repo_dir(workspace_id); - let patterns = self.gitignore.read_gitignore_patterns(&dir).await?; - let p = rel_path.trim_start_matches('/'); - let mut is_ignored = false; - for pat in &patterns { - if pat.ends_with('/') { - let prefix = pat.trim_end_matches('/'); - if p.starts_with(prefix) { - is_ignored = true; - break; - } - } else if pat == p { - is_ignored = true; - break; - } - } - Ok(is_ignored) - } -} diff --git a/api/crates/application/src/git/use_cases/helpers.rs b/api/crates/application/src/git/use_cases/helpers.rs deleted file mode 100644 index f8e01993..00000000 --- a/api/crates/application/src/git/use_cases/helpers.rs +++ /dev/null @@ -1,87 +0,0 @@ -use crate::core::ports::storage::storage_port::StorageResolverPort; -use crate::documents::ports::document_repository::DocumentRepository; -use crate::documents::ports::files::files_repository::FilesRepository; -use anyhow::Error; -use domain::documents::doc_type::DocumentType; -use uuid::Uuid; - -fn strip_user_prefix(owner_id: Uuid, rel_from_uploads: &str) -> String { - let pfx = format!("{}/", owner_id); - if let Some(stripped) = rel_from_uploads.strip_prefix(&pfx) { - stripped.to_string() - } else { - rel_from_uploads.to_string() - } -} - -/// Compute .gitignore patterns for a document or folder. -/// - For document: returns the markdown file path and attachment file paths -/// relative to the user's repository root. -/// - For folder: returns a single directory pattern with trailing '/' -pub async fn compute_doc_patterns_with< - D: DocumentRepository + ?Sized, - F: FilesRepository + ?Sized, - S: StorageResolverPort + ?Sized, ->( - docs: &D, - files: &F, - storage: &S, - node_id: Uuid, - owner_id: Uuid, -) -> anyhow::Result> { - // Fetch document meta for owner - let meta = docs - .get_meta_for_owner(node_id, owner_id) - .await? - .ok_or_else(|| anyhow::anyhow!("Document not found"))?; - let dtype = meta.doc_type; - - // Folder: ignore the entire directory under the repo root - if dtype == DocumentType::Folder { - let dir_full = storage.build_doc_dir(node_id).await?; // .../uploads// - let rel_from_uploads = storage.relative_from_uploads(&dir_full); - let repo_rel = strip_user_prefix(owner_id, &rel_from_uploads); - let mut pat = repo_rel; - if !pat.ends_with('/') { - pat.push('/'); - } - return Ok(vec![pat]); - } - - // Document: file path + attachment files - let mut patterns: Vec = Vec::new(); - - // 1) Markdown file path - let file_rel_from_uploads: String = if let Some(p) = meta.path { - p - } else { - let full = storage.build_doc_file_path(node_id).await?; - storage.relative_from_uploads(&full) - }; - let file_repo_rel = strip_user_prefix(owner_id, &file_rel_from_uploads); - patterns.push(file_repo_rel); - - // 2) Attachment paths (exact files for the document) - let file_paths = files.list_storage_paths_for_document(node_id).await?; - for storage_path in file_paths { - let full = storage.absolute_from_relative(&storage_path); - let rel_from_uploads = storage.relative_from_uploads(&full); - let repo_rel = strip_user_prefix(owner_id, &rel_from_uploads); - patterns.push(repo_rel); - } - - // Dedup to keep .gitignore tidy - patterns.sort(); - patterns.dedup(); - Ok(patterns) -} - -pub fn needs_force_retry(err: &Error) -> bool { - let msg = err.to_string().to_lowercase(); - msg.contains("remote repository state diverged") - || msg.contains("repository latest commit mismatch") - || msg.contains("remote repository already contains commit") - || msg.contains("non-fast-forward") - || msg.contains("non fast forward") - || (msg.contains("push") && msg.contains("rejected")) -} diff --git a/api/crates/application/src/git/use_cases/ignore_document.rs b/api/crates/application/src/git/use_cases/ignore_document.rs deleted file mode 100644 index b006f085..00000000 --- a/api/crates/application/src/git/use_cases/ignore_document.rs +++ /dev/null @@ -1,53 +0,0 @@ -use uuid::Uuid; - -use crate::core::ports::storage::storage_port::StorageResolverPort; -use crate::documents::ports::document_repository::DocumentRepository; -use crate::documents::ports::files::files_repository::FilesRepository; -use crate::git::ports::git_workspace::GitWorkspacePort; -use crate::git::ports::gitignore_port::GitignorePort; -use crate::git::use_cases::helpers::compute_doc_patterns_with; - -pub struct IgnoreDocument<'a, G, S, F, D, W> -where - G: GitignorePort + ?Sized, - S: StorageResolverPort + ?Sized, - F: FilesRepository + ?Sized, - D: DocumentRepository + ?Sized, - W: GitWorkspacePort + ?Sized, -{ - pub storage: &'a S, - pub files: &'a F, - pub docs: &'a D, - pub gitignore: &'a G, - pub workspace: &'a W, -} - -pub struct IgnoreResult { - pub added: usize, - pub patterns: Vec, -} - -impl<'a, G, S, F, D, W> IgnoreDocument<'a, G, S, F, D, W> -where - G: GitignorePort + ?Sized, - S: StorageResolverPort + ?Sized, - F: FilesRepository + ?Sized, - D: DocumentRepository + ?Sized, - W: GitWorkspacePort + ?Sized, -{ - pub async fn execute(&self, workspace_id: Uuid, doc_id: Uuid) -> anyhow::Result { - self.workspace - .ensure_repository(workspace_id, "main") - .await?; - let patterns = - compute_doc_patterns_with(self.docs, self.files, self.storage, doc_id, workspace_id) - .await?; - let dir = self.storage.user_repo_dir(workspace_id); - let _ = self.gitignore.ensure_gitignore(&dir).await?; - let added = self - .gitignore - .upsert_gitignore_patterns(&dir, &patterns) - .await?; - Ok(IgnoreResult { added, patterns }) - } -} diff --git a/api/crates/application/src/git/use_cases/ignore_folder.rs b/api/crates/application/src/git/use_cases/ignore_folder.rs deleted file mode 100644 index 40151927..00000000 --- a/api/crates/application/src/git/use_cases/ignore_folder.rs +++ /dev/null @@ -1,57 +0,0 @@ -use uuid::Uuid; - -use crate::core::ports::storage::storage_port::StorageResolverPort; -use crate::documents::ports::document_repository::DocumentRepository; -use crate::documents::ports::files::files_repository::FilesRepository; -use crate::git::ports::git_workspace::GitWorkspacePort; -use crate::git::ports::gitignore_port::GitignorePort; -use crate::git::use_cases::helpers::compute_doc_patterns_with; - -pub struct IgnoreFolder<'a, G, S, F, D, W> -where - G: GitignorePort + ?Sized, - S: StorageResolverPort + ?Sized, - F: FilesRepository + ?Sized, - D: DocumentRepository + ?Sized, - W: GitWorkspacePort + ?Sized, -{ - pub storage: &'a S, - pub files: &'a F, - pub docs: &'a D, - pub gitignore: &'a G, - pub workspace: &'a W, -} - -pub struct IgnoreResult { - pub added: usize, - pub patterns: Vec, -} - -impl<'a, G, S, F, D, W> IgnoreFolder<'a, G, S, F, D, W> -where - G: GitignorePort + ?Sized, - S: StorageResolverPort + ?Sized, - F: FilesRepository + ?Sized, - D: DocumentRepository + ?Sized, - W: GitWorkspacePort + ?Sized, -{ - pub async fn execute( - &self, - workspace_id: Uuid, - folder_id: Uuid, - ) -> anyhow::Result { - self.workspace - .ensure_repository(workspace_id, "main") - .await?; - let patterns = - compute_doc_patterns_with(self.docs, self.files, self.storage, folder_id, workspace_id) - .await?; - let dir = self.storage.user_repo_dir(workspace_id); - let _ = self.gitignore.ensure_gitignore(&dir).await?; - let added = self - .gitignore - .upsert_gitignore_patterns(&dir, &patterns) - .await?; - Ok(IgnoreResult { added, patterns }) - } -} diff --git a/api/crates/application/src/git/use_cases/init_repo.rs b/api/crates/application/src/git/use_cases/init_repo.rs deleted file mode 100644 index 991a11ac..00000000 --- a/api/crates/application/src/git/use_cases/init_repo.rs +++ /dev/null @@ -1,56 +0,0 @@ -use crate::core::ports::storage::storage_port::StorageResolverPort; -use crate::git::ports::git_repository::GitRepository; -use crate::git::ports::git_workspace::GitWorkspacePort; -use crate::git::ports::gitignore_port::GitignorePort; -use uuid::Uuid; - -pub struct InitRepo<'a, R, G, S, W> -where - R: GitRepository + ?Sized, - G: GitignorePort + ?Sized, - S: StorageResolverPort + ?Sized, - W: GitWorkspacePort + ?Sized, -{ - pub repo: &'a R, - pub storage: &'a S, - pub gitignore: &'a G, - pub workspace: &'a W, -} - -impl<'a, R, G, S, W> InitRepo<'a, R, G, S, W> -where - R: GitRepository + ?Sized, - G: GitignorePort + ?Sized, - S: StorageResolverPort + ?Sized, - W: GitWorkspacePort + ?Sized, -{ - pub async fn execute(&self, workspace_id: Uuid) -> anyhow::Result<()> { - let default_branch = self - .repo - .get_config(workspace_id) - .await? - .map(|row| row.branch_name) - .unwrap_or_else(|| "main".to_string()); - - self.workspace - .ensure_repository(workspace_id, &default_branch) - .await?; - - let dir = self.storage.user_repo_dir(workspace_id); - let _ = self.gitignore.ensure_gitignore(&dir).await?; - Ok(()) - } -} - -pub struct DeinitRepo<'a, W: GitWorkspacePort + ?Sized> { - pub workspace: &'a W, -} - -impl<'a, W: GitWorkspacePort + ?Sized> DeinitRepo<'a, W> { - pub async fn execute(&self, workspace_id: Uuid) -> anyhow::Result<()> { - self.workspace - .remove_repository(workspace_id) - .await - .map_err(Into::into) - } -} diff --git a/api/crates/application/src/git/use_cases/mod.rs b/api/crates/application/src/git/use_cases/mod.rs index 01832bc2..784a7246 100644 --- a/api/crates/application/src/git/use_cases/mod.rs +++ b/api/crates/application/src/git/use_cases/mod.rs @@ -1,15 +1,3 @@ pub mod delete_config; -pub mod get_changes; -pub mod get_commit_diff; pub mod get_config; -pub mod get_history; -pub mod get_status; -pub mod get_working_diff; -pub mod gitignore_patterns; -pub mod helpers; -pub mod ignore_document; -pub mod ignore_folder; -pub mod init_repo; -pub mod pull; -pub mod sync_now; pub mod upsert_config; diff --git a/api/crates/application/src/git/use_cases/pull.rs b/api/crates/application/src/git/use_cases/pull.rs deleted file mode 100644 index 8756e07f..00000000 --- a/api/crates/application/src/git/use_cases/pull.rs +++ /dev/null @@ -1,35 +0,0 @@ -use anyhow::anyhow; -use uuid::Uuid; - -use crate::git::dtos::{GitPullRequestDto, GitPullResultDto}; -use crate::git::ports::git_repository::GitRepository; -use crate::git::ports::git_workspace::GitWorkspacePort; - -pub struct PullRepository<'a, R, W> -where - R: GitRepository + ?Sized, - W: GitWorkspacePort + ?Sized, -{ - pub workspace: &'a W, - pub repo: &'a R, -} - -impl<'a, R, W> PullRepository<'a, R, W> -where - R: GitRepository + ?Sized, - W: GitWorkspacePort + ?Sized, -{ - pub async fn execute( - &self, - workspace_id: Uuid, - actor_id: Uuid, - req: GitPullRequestDto, - ) -> anyhow::Result { - let cfg = self.repo.load_user_git_cfg(workspace_id).await?; - let cfg = cfg.ok_or_else(|| anyhow!("git_not_configured"))?; - self.workspace - .pull(workspace_id, actor_id, &req, &cfg) - .await - .map_err(Into::into) - } -} diff --git a/api/crates/application/src/git/use_cases/sync_now.rs b/api/crates/application/src/git/use_cases/sync_now.rs deleted file mode 100644 index 0abe2c67..00000000 --- a/api/crates/application/src/git/use_cases/sync_now.rs +++ /dev/null @@ -1,89 +0,0 @@ -use uuid::Uuid; - -use crate::git::dtos::{GitSyncRequestDto, GitSyncResponseDto}; -use crate::git::ports::git_repository::GitRepository; -use crate::git::ports::git_workspace::GitWorkspacePort; -use domain::git::sync_log::{GitSyncOperation, GitSyncStatus}; - -pub struct SyncNow<'a, R, W> -where - R: GitRepository + ?Sized, - W: GitWorkspacePort + ?Sized, -{ - pub workspace: &'a W, - pub repo: &'a R, -} - -impl<'a, R, W> SyncNow<'a, R, W> -where - R: GitRepository + ?Sized, - W: GitWorkspacePort + ?Sized, -{ - pub async fn execute( - &self, - workspace_id: Uuid, - req: GitSyncRequestDto, - ) -> anyhow::Result { - let cfg = self.repo.load_user_git_cfg(workspace_id).await?; - let attempt_req = req.clone(); - let outcome = self - .workspace - .sync(workspace_id, &attempt_req, cfg.as_ref()) - .await?; - - if let Some(cfg) = cfg.as_ref() - && !cfg.repository_url.is_empty() - { - if attempt_req.skip_push.unwrap_or(false) { - let _ = self - .repo - .log_sync_operation( - workspace_id, - GitSyncOperation::Commit, - GitSyncStatus::Success, - Some(&outcome.message), - outcome.commit_hash.as_deref(), - ) - .await; - } else { - // Treat "nothing to commit" as success even if no push occurred. - let status = if outcome.files_changed == 0 || outcome.pushed { - GitSyncStatus::Success - } else { - GitSyncStatus::Error - }; - let _ = self - .repo - .log_sync_operation( - workspace_id, - GitSyncOperation::Push, - status, - Some(&outcome.message), - outcome.commit_hash.as_deref(), - ) - .await; - } - } - - let has_remote = cfg - .as_ref() - .map(|c| !c.repository_url.is_empty()) - .unwrap_or(false); - // Success rule: - // - If a remote is configured: success when push succeeded or there were no changes. - // - If no remote: success when commit was created or there were no changes. - let skip_push = attempt_req.skip_push.unwrap_or(false); - let success = if has_remote && !skip_push { - outcome.files_changed == 0 || outcome.pushed - } else { - outcome.files_changed == 0 || outcome.commit_hash.is_some() - }; - - Ok(GitSyncResponseDto { - success, - message: outcome.message, - commit_hash: outcome.commit_hash, - files_changed: outcome.files_changed, - }) - } -} diff --git a/api/crates/application/src/git/use_cases/upsert_config.rs b/api/crates/application/src/git/use_cases/upsert_config.rs index e82d1cdb..5c642e10 100644 --- a/api/crates/application/src/git/use_cases/upsert_config.rs +++ b/api/crates/application/src/git/use_cases/upsert_config.rs @@ -1,30 +1,18 @@ -use crate::core::ports::storage::storage_port::StorageResolverPort; use crate::git::dtos::{GitConfigDto, UpsertGitConfigInput}; use crate::git::ports::git_repository::GitRepository; -use crate::git::ports::git_workspace::GitWorkspacePort; -use crate::git::ports::gitignore_port::GitignorePort; use domain::git::auth::GitAuthType; use uuid::Uuid; -pub struct UpsertGitConfig<'a, R, G, S, W> +pub struct UpsertGitConfig<'a, R> where R: GitRepository + ?Sized, - G: GitignorePort + ?Sized, - S: StorageResolverPort + ?Sized, - W: GitWorkspacePort + ?Sized, { pub repo: &'a R, - pub storage: &'a S, - pub gitignore: &'a G, - pub workspace: &'a W, } -impl<'a, R, G, S, W> UpsertGitConfig<'a, R, G, S, W> +impl<'a, R> UpsertGitConfig<'a, R> where R: GitRepository + ?Sized, - G: GitignorePort + ?Sized, - S: StorageResolverPort + ?Sized, - W: GitWorkspacePort + ?Sized, { pub async fn execute( &self, @@ -36,6 +24,7 @@ where if !auth_type.validate_repository_url(&req.repository_url) { anyhow::bail!("bad_request"); } + let record = self .repo .upsert_config( @@ -47,11 +36,19 @@ where req.auto_sync, ) .await?; - self.workspace - .ensure_repository(workspace_id, &record.branch_name) - .await?; - let dir = self.storage.user_repo_dir(workspace_id); - let _ = self.gitignore.ensure_gitignore(&dir).await?; + + // Return encrypted_auth_data only for E2EE (when e2ee flag is present) + let encrypted_auth_data = if req + .auth_data + .get("e2ee") + .and_then(|v| v.as_bool()) + .unwrap_or(false) + { + Some(req.auth_data.clone()) + } else { + None + }; + Ok(GitConfigDto { id: record.id, repository_url: record.repository_url, @@ -60,6 +57,7 @@ where auto_sync: record.auto_sync, created_at: record.created_at, updated_at: record.updated_at, + encrypted_auth_data, }) } } diff --git a/api/crates/application/src/identity/dtos/keys.rs b/api/crates/application/src/identity/dtos/keys.rs new file mode 100644 index 00000000..cc5c7b53 --- /dev/null +++ b/api/crates/application/src/identity/dtos/keys.rs @@ -0,0 +1,32 @@ +use uuid::Uuid; + +use domain::identity::keys::{KdfParams, KdfType, KeyType}; + +#[derive(Debug, Clone)] +pub struct UserPublicKeyDto { + pub user_id: Uuid, + pub public_key: Vec, + pub key_type: KeyType, + pub created_at: chrono::DateTime, + pub updated_at: chrono::DateTime, +} + +#[derive(Debug, Clone)] +pub struct UserEncryptedMasterKeyDto { + pub user_id: Uuid, + pub encrypted_key: Vec, + pub salt: Vec, + pub kdf_type: KdfType, + pub kdf_params: KdfParams, + pub created_at: chrono::DateTime, + pub updated_at: chrono::DateTime, +} + +#[derive(Debug, Clone)] +pub struct UserEncryptedPrivateKeyDto { + pub user_id: Uuid, + pub encrypted_private_key: Vec, + pub nonce: Vec, + pub created_at: chrono::DateTime, + pub updated_at: chrono::DateTime, +} diff --git a/api/crates/application/src/identity/dtos/mod.rs b/api/crates/application/src/identity/dtos/mod.rs index 80430ce7..3f3954a9 100644 --- a/api/crates/application/src/identity/dtos/mod.rs +++ b/api/crates/application/src/identity/dtos/mod.rs @@ -1,7 +1,9 @@ mod api_tokens; mod auth; +mod keys; mod user_shortcuts; pub use api_tokens::*; pub use auth::*; +pub use keys::*; pub use user_shortcuts::*; diff --git a/api/crates/application/src/identity/ports/migration_repository.rs b/api/crates/application/src/identity/ports/migration_repository.rs new file mode 100644 index 00000000..87bc6afd --- /dev/null +++ b/api/crates/application/src/identity/ports/migration_repository.rs @@ -0,0 +1,59 @@ +//! Migration repository port for E2EE migration processing. +//! +//! This repository provides read-only access to existing plaintext data +//! during the E2EE migration process. Write operations are handled through +//! the transactional interface in `migration_tx_runner`. + +use async_trait::async_trait; +use chrono::{DateTime, Utc}; +use uuid::Uuid; + +use crate::core::ports::errors::PortResult; + +/// Document information for migration. +#[derive(Debug, Clone)] +pub struct MigrationDocument { + pub id: Uuid, + pub workspace_id: Uuid, + pub title: String, + pub created_at: DateTime, +} + +/// File information for migration. +#[derive(Debug, Clone)] +pub struct MigrationFile { + pub id: Uuid, + pub document_id: Uuid, + pub workspace_id: Uuid, + pub filename: String, + pub content_type: Option, + pub storage_path: String, +} + +/// Latest snapshot information. +#[derive(Debug, Clone)] +pub struct MigrationSnapshot { + pub document_id: Uuid, + pub version: i64, + pub data: Vec, + pub seq_at_snapshot: Option, +} + +/// Repository trait for E2EE migration read operations. +/// +/// This trait provides read-only access to plaintext data that needs +/// to be encrypted during migration. Write operations are performed +/// through `MigrationRepositoryTx` within a transaction. +#[async_trait] +pub trait MigrationRepository: Send + Sync { + /// List all documents owned by or accessible to a user. + /// + /// Returns documents from all workspaces where the user is a member, + /// filtered to only include documents that haven't been encrypted yet. + async fn list_user_documents(&self, user_id: Uuid) -> PortResult>; + + /// List all files associated with a user's documents. + /// + /// Returns files that haven't been encrypted yet. + async fn list_user_files(&self, user_id: Uuid) -> PortResult>; +} diff --git a/api/crates/application/src/identity/ports/migration_tx_runner.rs b/api/crates/application/src/identity/ports/migration_tx_runner.rs new file mode 100644 index 00000000..61f62a41 --- /dev/null +++ b/api/crates/application/src/identity/ports/migration_tx_runner.rs @@ -0,0 +1,183 @@ +//! Transaction runner for E2EE migration operations. +//! +//! This module provides transactional support for the migration process, +//! ensuring that all database operations are atomic. + +use std::any::Any; +use std::future::Future; +use std::pin::Pin; + +use anyhow::anyhow; +use async_trait::async_trait; +use uuid::Uuid; + +use crate::core::ports::errors::PortResult; + +use super::migration_repository::MigrationSnapshot; + +// ============================================================================ +// Type aliases for boxed futures +// ============================================================================ + +pub type BoxFuture<'a, T> = Pin + Send + 'a>>; +pub type BoxedTxResult = Box; +pub type MigrationTxFuture<'tx> = BoxFuture<'tx, anyhow::Result>; +pub type MigrationTxFn = + Box FnOnce(&'tx mut dyn MigrationTx) -> MigrationTxFuture<'tx> + Send>; + +// ============================================================================ +// Transaction context trait +// ============================================================================ + +/// Transaction context for migration operations. +/// +/// Provides access to all repositories needed during migration, +/// all operating within the same database transaction. +pub trait MigrationTx: Send { + /// Access to migration-specific repository operations. + fn migration(&mut self) -> &mut dyn MigrationRepositoryTx; + + /// Access to document keys repository. + fn document_keys(&mut self) -> &mut dyn DocumentKeysRepositoryTx; + + /// Access to workspace keys repository. + fn workspace_keys(&mut self) -> &mut dyn WorkspaceKeysRepositoryTx; + + /// Access to user keys repository. + fn user_keys(&mut self) -> &mut dyn UserKeysRepositoryTx; +} + +// ============================================================================ +// Transactional repository traits +// ============================================================================ + +/// Migration repository operations that run within a transaction. +#[async_trait] +pub trait MigrationRepositoryTx: Send { + /// Update a document with encrypted title. + async fn update_encrypted_title( + &mut self, + document_id: Uuid, + encrypted_title: &[u8], + nonce: &[u8], + ) -> PortResult<()>; + + /// Create or update an encrypted snapshot for a document. + async fn upsert_encrypted_snapshot( + &mut self, + document_id: Uuid, + encrypted_snapshot: &[u8], + nonce: &[u8], + seq_at_snapshot: i64, + ) -> PortResult<()>; + + /// Delete all plaintext updates for a document. + async fn clear_plaintext_updates(&mut self, document_id: Uuid) -> PortResult; + + /// Update a file's metadata with encrypted values. + async fn update_encrypted_file_metadata( + &mut self, + file_id: Uuid, + encrypted_metadata: &[u8], + nonce: &[u8], + encrypted_hash: &str, + ) -> PortResult<()>; + + /// Clear plaintext title from a document after encryption. + async fn clear_plaintext_title(&mut self, document_id: Uuid) -> PortResult<()>; + + /// Clear plaintext metadata from a file after encryption. + async fn clear_plaintext_file_metadata(&mut self, file_id: Uuid) -> PortResult<()>; + + /// Get the latest Yjs snapshot for a document. + async fn get_document_snapshot( + &mut self, + document_id: Uuid, + ) -> PortResult>; + + /// Get the maximum sequence number for a document's updates. + async fn get_document_max_seq(&mut self, document_id: Uuid) -> PortResult>; +} + +/// Document keys repository operations that run within a transaction. +#[async_trait] +pub trait DocumentKeysRepositoryTx: Send { + /// Store or update an encrypted DEK for a document. + async fn upsert_encrypted_dek( + &mut self, + document_id: Uuid, + encrypted_dek: &[u8], + nonce: &[u8], + key_version: i32, + ) -> PortResult<()>; +} + +/// Workspace keys repository operations that run within a transaction. +#[async_trait] +pub trait WorkspaceKeysRepositoryTx: Send { + /// Store or update an encrypted KEK for a workspace member. + async fn upsert_encrypted_kek( + &mut self, + workspace_id: Uuid, + user_id: Uuid, + encrypted_kek: &[u8], + key_version: i32, + ) -> PortResult<()>; +} + +/// User keys repository operations that run within a transaction. +#[async_trait] +pub trait UserKeysRepositoryTx: Send { + /// Mark encryption setup as completed for a user. + async fn mark_encryption_setup_completed(&mut self, user_id: Uuid) -> PortResult<()>; + + /// Check if encryption setup is completed for a user. + async fn is_encryption_setup_completed(&mut self, user_id: Uuid) -> PortResult; +} + +// ============================================================================ +// Transaction runner trait +// ============================================================================ + +/// Runner for executing migration operations within a transaction. +#[async_trait] +pub trait MigrationTxRunner: Send + Sync { + /// Execute a function within a database transaction. + /// + /// The transaction is committed if the function returns Ok, + /// and rolled back if it returns Err. + async fn run_boxed(&self, f: MigrationTxFn) -> anyhow::Result; +} + +// ============================================================================ +// Helper function +// ============================================================================ + +/// Execute a migration operation within a transaction. +/// +/// This is a type-safe wrapper around `MigrationTxRunner::run_boxed`. +pub async fn run_migration_tx(runner: &dyn MigrationTxRunner, f: F) -> anyhow::Result +where + T: Send + 'static, + F: for<'tx> FnOnce(&'tx mut dyn MigrationTx) -> BoxFuture<'tx, anyhow::Result> + + Send + + 'static, +{ + let mut f = Some(f); + let result = runner + .run_boxed(Box::new(move |tx| { + let f = f + .take() + .expect("MigrationTx closure must be called exactly once"); + Box::pin(async move { + let out = f(tx).await?; + Ok(Box::new(out) as BoxedTxResult) + }) + })) + .await?; + + result + .downcast::() + .map(|v| *v) + .map_err(|_| anyhow!("migration tx runner output type mismatch")) +} diff --git a/api/crates/application/src/identity/ports/mod.rs b/api/crates/application/src/identity/ports/mod.rs index 4f5afe38..6b994f8b 100644 --- a/api/crates/application/src/identity/ports/mod.rs +++ b/api/crates/application/src/identity/ports/mod.rs @@ -1,6 +1,9 @@ pub mod api_token_repository; pub mod jwt_codec; +pub mod migration_repository; +pub mod migration_tx_runner; pub mod secret_hasher; +pub mod user_keys_repository; pub mod user_repository; pub mod user_session_repository; pub mod user_shortcuts; diff --git a/api/crates/application/src/identity/ports/user_keys_repository.rs b/api/crates/application/src/identity/ports/user_keys_repository.rs new file mode 100644 index 00000000..1c73f341 --- /dev/null +++ b/api/crates/application/src/identity/ports/user_keys_repository.rs @@ -0,0 +1,76 @@ +use async_trait::async_trait; +use uuid::Uuid; + +use crate::core::ports::errors::PortResult; +use domain::identity::keys::{KdfParams, KdfType, KeyType}; + +#[derive(Debug, Clone)] +pub struct UserPublicKeyRow { + pub user_id: Uuid, + pub public_key: Vec, + pub key_type: KeyType, + pub created_at: chrono::DateTime, + pub updated_at: chrono::DateTime, +} + +#[derive(Debug, Clone)] +pub struct UserEncryptedMasterKeyRow { + pub user_id: Uuid, + pub encrypted_key: Vec, + pub salt: Vec, + pub kdf_type: KdfType, + pub kdf_params: KdfParams, + pub created_at: chrono::DateTime, + pub updated_at: chrono::DateTime, +} + +#[derive(Debug, Clone)] +pub struct UserEncryptedPrivateKeyRow { + pub user_id: Uuid, + pub encrypted_private_key: Vec, + pub nonce: Vec, + pub created_at: chrono::DateTime, + pub updated_at: chrono::DateTime, +} + +#[async_trait] +pub trait UserKeysRepository: Send + Sync { + // Public keys + async fn get_public_key(&self, user_id: Uuid) -> PortResult>; + async fn upsert_public_key( + &self, + user_id: Uuid, + public_key: &[u8], + key_type: KeyType, + ) -> PortResult; + + // Encrypted master keys (for recovery) + async fn get_encrypted_master_key( + &self, + user_id: Uuid, + ) -> PortResult>; + async fn upsert_encrypted_master_key( + &self, + user_id: Uuid, + encrypted_key: &[u8], + salt: &[u8], + kdf_type: KdfType, + kdf_params: &KdfParams, + ) -> PortResult; + + // Encrypted private keys + async fn get_encrypted_private_key( + &self, + user_id: Uuid, + ) -> PortResult>; + async fn upsert_encrypted_private_key( + &self, + user_id: Uuid, + encrypted_private_key: &[u8], + nonce: &[u8], + ) -> PortResult; + + // Encryption setup status + async fn mark_encryption_setup_completed(&self, user_id: Uuid) -> PortResult<()>; + async fn is_encryption_setup_completed(&self, user_id: Uuid) -> PortResult; +} diff --git a/api/crates/application/src/identity/services/auth/account.rs b/api/crates/application/src/identity/services/auth/account.rs index 86ffca93..70bc817f 100644 --- a/api/crates/application/src/identity/services/auth/account.rs +++ b/api/crates/application/src/identity/services/auth/account.rs @@ -7,7 +7,6 @@ use crate::core::services::errors::ServiceError; use crate::documents::ports::document_repository::DocumentRepository; use crate::documents::ports::files::files_repository::FilesRepository; use crate::git::ports::git_repository::GitRepository; -use crate::git::ports::git_workspace::GitWorkspacePort; use crate::identity::dtos::UserDto; use crate::identity::ports::secret_hasher::SecretHasher; use crate::identity::ports::user_repository::UserRepository; @@ -31,7 +30,6 @@ pub struct AccountService { plugin_repo: Arc, plugin_assets: Arc, git_repo: Arc, - git_workspace: Arc, storage_jobs: Arc, workspace_service: Arc, } @@ -95,7 +93,6 @@ impl AccountService { plugin_repo: Arc, plugin_assets: Arc, git_repo: Arc, - git_workspace: Arc, storage_jobs: Arc, workspace_service: Arc, ) -> Self { @@ -108,7 +105,6 @@ impl AccountService { plugin_repo, plugin_assets, git_repo, - git_workspace, storage_jobs, workspace_service, } @@ -207,7 +203,6 @@ impl AccountService { plugin_repo: self.plugin_repo.as_ref(), plugin_assets: self.plugin_assets.clone(), git_repo: self.git_repo.as_ref(), - git_workspace: self.git_workspace.as_ref(), storage_jobs: self.storage_jobs.as_ref(), }; uc.execute(user_id).await.map_err(ServiceError::from) diff --git a/api/crates/application/src/identity/services/migration/mod.rs b/api/crates/application/src/identity/services/migration/mod.rs new file mode 100644 index 00000000..2878f558 --- /dev/null +++ b/api/crates/application/src/identity/services/migration/mod.rs @@ -0,0 +1,344 @@ +//! E2EE migration service. +//! +//! This service handles the server-side encryption of existing plaintext data +//! during the E2EE migration process. + +pub mod types; + +use std::sync::Arc; + +use async_trait::async_trait; +use serde_json::json; +use tracing::{info, warn}; +use uuid::Uuid; + +use crate::core::services::errors::ServiceError; +use crate::identity::ports::migration_repository::MigrationRepository; +use crate::identity::ports::migration_tx_runner::{run_migration_tx, MigrationTx, MigrationTxRunner}; + +pub use types::*; + +/// Encryption function signature. +/// Takes (key, plaintext) and returns (ciphertext, nonce). +pub type EncryptFn = fn(&[u8], &[u8]) -> Result<(Vec, [u8; 24]), anyhow::Error>; + +/// Migration service for E2EE. +pub struct MigrationService { + migration_repo: Arc, + tx_runner: Arc, + encrypt_fn: EncryptFn, +} + +/// Service facade trait for E2EE migration. +#[async_trait] +pub trait MigrationServiceFacade: Send + Sync { + /// Execute the full migration for a user. + /// + /// This encrypts all of the user's documents and files using the provided keys. + /// The operation is atomic - either all data is encrypted or none is. + async fn migrate_user_data( + &self, + user_id: Uuid, + request: MigrationRequest, + ) -> Result; + + /// Check if migration is needed for a user. + async fn needs_migration(&self, user_id: Uuid) -> Result; +} + +impl MigrationService { + pub fn new( + migration_repo: Arc, + tx_runner: Arc, + ) -> Self { + Self { + migration_repo, + tx_runner, + encrypt_fn: default_encrypt_fn, + } + } + + /// Create with a custom encryption function (for testing). + #[cfg(test)] + pub fn with_encrypt_fn(mut self, encrypt_fn: EncryptFn) -> Self { + self.encrypt_fn = encrypt_fn; + self + } +} + +#[async_trait] +impl MigrationServiceFacade for MigrationService { + async fn migrate_user_data( + &self, + user_id: Uuid, + request: MigrationRequest, + ) -> Result { + // Get all documents and files for the user (outside transaction for read-only ops) + let documents = self + .migration_repo + .list_user_documents(user_id) + .await + .map_err(ServiceError::from)?; + + let files = self + .migration_repo + .list_user_files(user_id) + .await + .map_err(ServiceError::from)?; + + let encrypt_fn = self.encrypt_fn; + + // Execute all write operations within a transaction + let result = run_migration_tx(self.tx_runner.as_ref(), move |tx| { + Box::pin(async move { + migrate_user_data_in_tx(tx, user_id, request, documents, files, encrypt_fn).await + }) + }) + .await + .map_err(|e| ServiceError::Unexpected(e))?; + + Ok(result) + } + + async fn needs_migration(&self, user_id: Uuid) -> Result { + // Check using the transaction runner (reads from users table) + let result = run_migration_tx(self.tx_runner.as_ref(), move |tx| { + Box::pin(async move { + let completed = tx.user_keys().is_encryption_setup_completed(user_id).await?; + Ok(!completed) + }) + }) + .await + .map_err(|e| ServiceError::Unexpected(e))?; + + Ok(result) + } +} + +/// Execute migration within a transaction. +async fn migrate_user_data_in_tx( + tx: &mut dyn MigrationTx, + user_id: Uuid, + request: MigrationRequest, + documents: Vec, + files: Vec, + encrypt_fn: EncryptFn, +) -> anyhow::Result { + // Check if already migrated + let already_completed = tx.user_keys().is_encryption_setup_completed(user_id).await?; + + if already_completed { + info!(user_id = %user_id, "User already completed E2EE migration"); + return Ok(MigrationResult { + documents_encrypted: 0, + files_encrypted: 0, + updates_cleared: 0, + status: MigrationStatus::AlreadyCompleted, + }); + } + + info!(user_id = %user_id, "Starting E2EE migration"); + + let mut documents_encrypted = 0; + let mut files_encrypted = 0; + let mut updates_cleared: u64 = 0; + + // Encrypt each document + for doc in &documents { + let dek = request.document_deks.get(&doc.id).ok_or_else(|| { + warn!(document_id = %doc.id, "Missing DEK for document"); + anyhow::anyhow!("missing DEK for document") + })?; + + // Validate DEK length + if dek.len() != 32 { + anyhow::bail!("invalid DEK length"); + } + + // Encrypt title + let (encrypted_title, title_nonce) = encrypt_title(encrypt_fn, dek, &doc.title)?; + + tx.migration() + .update_encrypted_title(doc.id, &encrypted_title, &title_nonce) + .await?; + + // Get and encrypt snapshot if exists + if let Some(snapshot) = tx.migration().get_document_snapshot(doc.id).await? { + let (encrypted_snapshot, snapshot_nonce) = + encrypt_snapshot(encrypt_fn, dek, &snapshot.data)?; + + // Get current max seq for seq_at_snapshot + let max_seq = tx + .migration() + .get_document_max_seq(doc.id) + .await? + .unwrap_or(0); + + tx.migration() + .upsert_encrypted_snapshot(doc.id, &encrypted_snapshot, &snapshot_nonce, max_seq) + .await?; + + // Clear plaintext updates + let cleared = tx.migration().clear_plaintext_updates(doc.id).await?; + updates_cleared += cleared; + } + + // Clear plaintext title + tx.migration().clear_plaintext_title(doc.id).await?; + + // Store encrypted DEK + if let Some(encrypted_dek) = request.encrypted_document_deks.get(&doc.id) { + tx.document_keys() + .upsert_encrypted_dek( + doc.id, + &encrypted_dek.encrypted_dek, + &encrypted_dek.nonce, + 1, // Initial key version + ) + .await?; + } + + documents_encrypted += 1; + } + + // Encrypt each file's metadata + for file in &files { + let dek = request.document_deks.get(&file.document_id).ok_or_else(|| { + warn!(file_id = %file.id, document_id = %file.document_id, "Missing DEK for file's document"); + anyhow::anyhow!("missing DEK for file's document") + })?; + + let (encrypted_metadata, metadata_nonce, encrypted_hash) = + encrypt_file_metadata(encrypt_fn, dek, &file.filename, file.content_type.as_deref())?; + + tx.migration() + .update_encrypted_file_metadata(file.id, &encrypted_metadata, &metadata_nonce, &encrypted_hash) + .await?; + + // Clear plaintext metadata + tx.migration().clear_plaintext_file_metadata(file.id).await?; + + files_encrypted += 1; + } + + // Store workspace KEKs for all members + for (workspace_id, member_keks) in &request.encrypted_workspace_keks { + for member_kek in member_keks { + tx.workspace_keys() + .upsert_encrypted_kek( + *workspace_id, + member_kek.user_id, + &member_kek.encrypted_kek, + 1, // Initial key version + ) + .await?; + } + } + + // Mark migration as completed + tx.user_keys().mark_encryption_setup_completed(user_id).await?; + + info!( + user_id = %user_id, + documents_encrypted, + files_encrypted, + updates_cleared, + "E2EE migration completed" + ); + + Ok(MigrationResult { + documents_encrypted, + files_encrypted, + updates_cleared, + status: MigrationStatus::Completed, + }) +} + +/// Encrypt a document's title. +fn encrypt_title( + encrypt_fn: EncryptFn, + dek: &[u8], + title: &str, +) -> anyhow::Result<(Vec, Vec)> { + let (ciphertext, nonce) = encrypt_fn(dek, title.as_bytes())?; + Ok((ciphertext, nonce.to_vec())) +} + +/// Encrypt a document's Yjs snapshot. +fn encrypt_snapshot( + encrypt_fn: EncryptFn, + dek: &[u8], + snapshot: &[u8], +) -> anyhow::Result<(Vec, Vec)> { + let (ciphertext, nonce) = encrypt_fn(dek, snapshot)?; + Ok((ciphertext, nonce.to_vec())) +} + +/// Encrypt file metadata (filename, content_type). +fn encrypt_file_metadata( + encrypt_fn: EncryptFn, + dek: &[u8], + filename: &str, + content_type: Option<&str>, +) -> anyhow::Result<(Vec, Vec, String)> { + let metadata = json!({ + "filename": filename, + "content_type": content_type + }); + let metadata_bytes = serde_json::to_vec(&metadata)?; + + let (ciphertext, nonce) = encrypt_fn(dek, &metadata_bytes)?; + + // Create encrypted hash (hash of the encrypted metadata) + let encrypted_hash = format!("enc:{}", hex::encode(&ciphertext[..16.min(ciphertext.len())])); + + Ok((ciphertext, nonce.to_vec(), encrypted_hash)) +} + +/// Default encryption function using XChaCha20-Poly1305. +fn default_encrypt_fn(key: &[u8], plaintext: &[u8]) -> Result<(Vec, [u8; 24]), anyhow::Error> { + use chacha20poly1305::{ + aead::{Aead, KeyInit}, + XChaCha20Poly1305, XNonce, + }; + use rand::RngCore; + + if key.len() != 32 { + anyhow::bail!("Invalid key length: expected 32, got {}", key.len()); + } + + let cipher = XChaCha20Poly1305::new_from_slice(key) + .map_err(|e| anyhow::anyhow!("Invalid key: {}", e))?; + + let mut nonce_bytes = [0u8; 24]; + rand::thread_rng().fill_bytes(&mut nonce_bytes); + let nonce = XNonce::from_slice(&nonce_bytes); + + let ciphertext = cipher + .encrypt(nonce, plaintext) + .map_err(|e| anyhow::anyhow!("Encryption failed: {}", e))?; + + Ok((ciphertext, nonce_bytes)) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_migration_progress_percent() { + let progress = MigrationProgress { + total_documents: 10, + processed_documents: 5, + total_files: 10, + processed_files: 5, + }; + assert!((progress.percent() - 50.0).abs() < 0.01); + } + + #[test] + fn test_migration_progress_empty() { + let progress = MigrationProgress::default(); + assert!((progress.percent() - 100.0).abs() < 0.01); + } +} diff --git a/api/crates/application/src/identity/services/migration/types.rs b/api/crates/application/src/identity/services/migration/types.rs new file mode 100644 index 00000000..ea214ddd --- /dev/null +++ b/api/crates/application/src/identity/services/migration/types.rs @@ -0,0 +1,89 @@ +//! Types for E2EE migration service. + +use std::collections::HashMap; +use uuid::Uuid; + +/// Request to migrate user data to E2EE. +/// +/// Contains all the keys needed to encrypt the user's existing data. +#[derive(Debug, Clone)] +pub struct MigrationRequest { + /// Workspace KEKs (Key Encryption Keys). + /// Maps workspace_id -> raw KEK (32 bytes). + pub workspace_keks: HashMap>, + + /// Document DEKs (Data Encryption Keys). + /// Maps document_id -> raw DEK (32 bytes). + pub document_deks: HashMap>, + + /// Encrypted workspace KEKs to store for each member. + /// Maps workspace_id -> Vec<(user_id, encrypted_kek, nonce)>. + pub encrypted_workspace_keks: HashMap>, + + /// Encrypted DEKs to store for each document. + /// Maps document_id -> (encrypted_dek, nonce). + pub encrypted_document_deks: HashMap, +} + +/// Encrypted KEK for a workspace member. +/// +/// The KEK is encrypted with the member's ECDH public key. +/// The encryption format (including nonce) is handled by the client. +#[derive(Debug, Clone)] +pub struct MemberEncryptedKek { + pub user_id: Uuid, + /// KEK encrypted with user's ECDH public key. + pub encrypted_kek: Vec, +} + +/// Encrypted DEK for a document. +#[derive(Debug, Clone)] +pub struct EncryptedDek { + pub encrypted_dek: Vec, + pub nonce: Vec, +} + +/// Result of the migration process. +#[derive(Debug, Clone)] +pub struct MigrationResult { + /// Number of documents encrypted. + pub documents_encrypted: usize, + + /// Number of files with encrypted metadata. + pub files_encrypted: usize, + + /// Total number of Yjs updates cleared. + pub updates_cleared: u64, + + /// Migration status. + pub status: MigrationStatus, +} + +/// Status of the migration. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum MigrationStatus { + /// Migration completed successfully. + Completed, + /// Migration was already completed before. + AlreadyCompleted, +} + +/// Progress tracking during migration. +#[derive(Debug, Clone, Default)] +pub struct MigrationProgress { + pub total_documents: usize, + pub processed_documents: usize, + pub total_files: usize, + pub processed_files: usize, +} + +impl MigrationProgress { + pub fn percent(&self) -> f32 { + let total = self.total_documents + self.total_files; + if total == 0 { + return 100.0; + } + let done = self.processed_documents + self.processed_files; + (done as f32 / total as f32) * 100.0 + } +} diff --git a/api/crates/application/src/identity/services/mod.rs b/api/crates/application/src/identity/services/mod.rs index de652f14..59aa7bd8 100644 --- a/api/crates/application/src/identity/services/mod.rs +++ b/api/crates/application/src/identity/services/mod.rs @@ -1,3 +1,5 @@ pub mod api_tokens; pub mod auth; +pub mod migration; +pub mod user_keys; pub mod user_shortcuts; diff --git a/api/crates/application/src/identity/services/user_keys/mod.rs b/api/crates/application/src/identity/services/user_keys/mod.rs new file mode 100644 index 00000000..97d0aabc --- /dev/null +++ b/api/crates/application/src/identity/services/user_keys/mod.rs @@ -0,0 +1,197 @@ +use std::sync::Arc; + +use async_trait::async_trait; +use uuid::Uuid; + +use crate::core::services::errors::ServiceError; +use crate::identity::dtos::{ + UserEncryptedMasterKeyDto, UserEncryptedPrivateKeyDto, UserPublicKeyDto, +}; +use crate::identity::ports::user_keys_repository::UserKeysRepository; +use domain::identity::keys::{KdfParams, KdfType, KeyType}; + +pub struct UserKeysService { + repo: Arc, +} + +#[async_trait] +pub trait UserKeysServiceFacade: Send + Sync { + // Public keys + async fn get_public_key(&self, user_id: Uuid) -> Result, ServiceError>; + async fn register_public_key( + &self, + user_id: Uuid, + public_key: Vec, + key_type: KeyType, + ) -> Result; + + // Master key backup (for recovery) + async fn get_master_key_backup( + &self, + user_id: Uuid, + ) -> Result, ServiceError>; + async fn store_master_key_backup( + &self, + user_id: Uuid, + encrypted_key: Vec, + salt: Vec, + kdf_type: KdfType, + kdf_params: KdfParams, + ) -> Result; + + // Private key (encrypted with UMK) + async fn get_encrypted_private_key( + &self, + user_id: Uuid, + ) -> Result, ServiceError>; + async fn store_encrypted_private_key( + &self, + user_id: Uuid, + encrypted_private_key: Vec, + nonce: Vec, + ) -> Result; + + // Encryption setup status + async fn mark_encryption_setup_completed(&self, user_id: Uuid) -> Result<(), ServiceError>; + async fn is_encryption_setup_completed(&self, user_id: Uuid) -> Result; +} + +impl UserKeysService { + pub fn new(repo: Arc) -> Self { + Self { repo } + } +} + +#[async_trait] +impl UserKeysServiceFacade for UserKeysService { + async fn get_public_key(&self, user_id: Uuid) -> Result, ServiceError> { + let row = self + .repo + .get_public_key(user_id) + .await + .map_err(ServiceError::from)?; + Ok(row.map(|r| UserPublicKeyDto { + user_id: r.user_id, + public_key: r.public_key, + key_type: r.key_type, + created_at: r.created_at, + updated_at: r.updated_at, + })) + } + + async fn register_public_key( + &self, + user_id: Uuid, + public_key: Vec, + key_type: KeyType, + ) -> Result { + let row = self + .repo + .upsert_public_key(user_id, &public_key, key_type) + .await + .map_err(ServiceError::from)?; + Ok(UserPublicKeyDto { + user_id: row.user_id, + public_key: row.public_key, + key_type: row.key_type, + created_at: row.created_at, + updated_at: row.updated_at, + }) + } + + async fn get_master_key_backup( + &self, + user_id: Uuid, + ) -> Result, ServiceError> { + let row = self + .repo + .get_encrypted_master_key(user_id) + .await + .map_err(ServiceError::from)?; + Ok(row.map(|r| UserEncryptedMasterKeyDto { + user_id: r.user_id, + encrypted_key: r.encrypted_key, + salt: r.salt, + kdf_type: r.kdf_type, + kdf_params: r.kdf_params, + created_at: r.created_at, + updated_at: r.updated_at, + })) + } + + async fn store_master_key_backup( + &self, + user_id: Uuid, + encrypted_key: Vec, + salt: Vec, + kdf_type: KdfType, + kdf_params: KdfParams, + ) -> Result { + let row = self + .repo + .upsert_encrypted_master_key(user_id, &encrypted_key, &salt, kdf_type, &kdf_params) + .await + .map_err(ServiceError::from)?; + Ok(UserEncryptedMasterKeyDto { + user_id: row.user_id, + encrypted_key: row.encrypted_key, + salt: row.salt, + kdf_type: row.kdf_type, + kdf_params: row.kdf_params, + created_at: row.created_at, + updated_at: row.updated_at, + }) + } + + async fn get_encrypted_private_key( + &self, + user_id: Uuid, + ) -> Result, ServiceError> { + let row = self + .repo + .get_encrypted_private_key(user_id) + .await + .map_err(ServiceError::from)?; + Ok(row.map(|r| UserEncryptedPrivateKeyDto { + user_id: r.user_id, + encrypted_private_key: r.encrypted_private_key, + nonce: r.nonce, + created_at: r.created_at, + updated_at: r.updated_at, + })) + } + + async fn store_encrypted_private_key( + &self, + user_id: Uuid, + encrypted_private_key: Vec, + nonce: Vec, + ) -> Result { + let row = self + .repo + .upsert_encrypted_private_key(user_id, &encrypted_private_key, &nonce) + .await + .map_err(ServiceError::from)?; + Ok(UserEncryptedPrivateKeyDto { + user_id: row.user_id, + encrypted_private_key: row.encrypted_private_key, + nonce: row.nonce, + created_at: row.created_at, + updated_at: row.updated_at, + }) + } + + async fn mark_encryption_setup_completed(&self, user_id: Uuid) -> Result<(), ServiceError> { + self.repo + .mark_encryption_setup_completed(user_id) + .await + .map_err(ServiceError::from) + } + + async fn is_encryption_setup_completed(&self, user_id: Uuid) -> Result { + self.repo + .is_encryption_setup_completed(user_id) + .await + .map_err(ServiceError::from) + } +} diff --git a/api/crates/application/src/identity/use_cases/auth/delete_account.rs b/api/crates/application/src/identity/use_cases/auth/delete_account.rs index 6527d671..ec10cdaf 100644 --- a/api/crates/application/src/identity/use_cases/auth/delete_account.rs +++ b/api/crates/application/src/identity/use_cases/auth/delete_account.rs @@ -9,7 +9,6 @@ use crate::core::ports::storage::storage_projection_queue::{ use crate::documents::ports::document_repository::DocumentRepository; use crate::documents::ports::files::files_repository::FilesRepository; use crate::git::ports::git_repository::GitRepository; -use crate::git::ports::git_workspace::GitWorkspacePort; use crate::identity::ports::user_repository::UserRepository; use crate::plugins::ports::plugin_asset_store::PluginAssetStore; use crate::plugins::ports::plugin_installation_repository::PluginInstallationRepository; @@ -18,14 +17,13 @@ use domain::access::permissions::PermissionSet; use domain::documents::doc_type::DocumentType; use domain::plugins::scope::{PluginRecordScope, PluginScope}; -pub struct DeleteAccount<'a, UR, DR, PIR, PR, GR, GW, SJ, FR> +pub struct DeleteAccount<'a, UR, DR, PIR, PR, GR, SJ, FR> where UR: UserRepository + ?Sized, DR: DocumentRepository + ?Sized, PIR: PluginInstallationRepository + ?Sized, PR: PluginRepository + ?Sized, GR: GitRepository + ?Sized, - GW: GitWorkspacePort + ?Sized, SJ: StorageProjectionQueue + ?Sized, FR: FilesRepository + ?Sized, { @@ -35,19 +33,17 @@ where pub plugin_repo: &'a PR, pub plugin_assets: Arc, pub git_repo: &'a GR, - pub git_workspace: &'a GW, pub storage_jobs: &'a SJ, pub files_repo: &'a FR, } -impl<'a, UR, DR, PIR, PR, GR, GW, SJ, FR> DeleteAccount<'a, UR, DR, PIR, PR, GR, GW, SJ, FR> +impl<'a, UR, DR, PIR, PR, GR, SJ, FR> DeleteAccount<'a, UR, DR, PIR, PR, GR, SJ, FR> where UR: UserRepository + ?Sized, DR: DocumentRepository + ?Sized, PIR: PluginInstallationRepository + ?Sized, PR: PluginRepository + ?Sized, GR: GitRepository + ?Sized, - GW: GitWorkspacePort + ?Sized, SJ: StorageProjectionQueue + ?Sized, FR: FilesRepository + ?Sized, { @@ -148,12 +144,8 @@ where } } - self.git_repo.delete_sync_logs(user_id).await?; - if let Err(err) = self.git_workspace.remove_repository(user_id).await { - tracing::warn!(user_id = %user_id, error = ?err, "failed to remove git workspace during account deletion"); - } + // Delete git config (git repository data is stored client-side in IndexedDB for E2EE) let _ = self.git_repo.delete_config(user_id).await?; - self.git_repo.delete_repository_state(user_id).await?; let deleted = self.user_repo.delete_user(user_id).await?; anyhow::ensure!(deleted, "user not found"); diff --git a/api/crates/application/src/plugins/services/management.rs b/api/crates/application/src/plugins/services/management.rs index 36e09465..0aca3049 100644 --- a/api/crates/application/src/plugins/services/management.rs +++ b/api/crates/application/src/plugins/services/management.rs @@ -27,9 +27,11 @@ pub struct PluginManifestItem { pub scope: PluginScope, pub mounts: Vec, pub frontend: Value, + pub backend: Value, pub permissions: Vec, pub config: Value, pub ui: Value, + pub renderers: Value, pub author: Option, pub repository: Option, } @@ -433,6 +435,67 @@ impl PluginManagementService { None => Value::Null, }; + // Build backend with signed WASM URL + let backend_value = manifest.get("backend"); + let backend = match backend_value { + Some(v) => { + let wasm_path = v + .get("wasm") + .and_then(|x| x.as_str()) + .unwrap_or("backend/plugin.wasm"); + let normalized = match normalize_manifest_path(wasm_path) { + Ok(path) => path, + Err(err) => { + warn!(error = ?err, plugin = id, version = version, "backend_wasm_path_invalid"); + return None; + } + }; + let signed = self.asset_signer.sign_url( + signer_scope, + id, + version, + &normalized, + self.manifest_ttl_secs, + ); + serde_json::json!({ + "wasm": signed, + }) + } + None => Value::Null, + }; + + // Build renderers with signed hydrate URLs + let renderers = manifest + .get("renderers") + .and_then(|r| r.as_array()) + .map(|arr| { + arr.iter() + .filter_map(|renderer| { + let mut r = renderer.clone(); + if let Some(hydrate) = r.get_mut("hydrate") { + if let Some(module) = hydrate.get("module").and_then(|m| m.as_str()) { + // Normalize and sign the module path + if let Ok(normalized) = normalize_manifest_path(module) { + let signed = self.asset_signer.sign_url( + signer_scope, + id, + version, + &normalized, + self.manifest_ttl_secs, + ); + if let Some(obj) = hydrate.as_object_mut() { + obj.insert("module".to_string(), json!(signed)); + } + } + } + } + Some(r) + }) + .collect::>() + }) + .map(|v| json!(v)) + .unwrap_or_else(|| json!([])); + Some(PluginManifestItem { id: id.to_string(), name, @@ -440,9 +503,11 @@ impl PluginManagementService { scope: scope.as_plugin_scope(), mounts, frontend, + backend, permissions, config, ui, + renderers, author, repository, }) diff --git a/api/crates/application/src/workspaces/dtos/keys.rs b/api/crates/application/src/workspaces/dtos/keys.rs new file mode 100644 index 00000000..add648b1 --- /dev/null +++ b/api/crates/application/src/workspaces/dtos/keys.rs @@ -0,0 +1,11 @@ +use uuid::Uuid; + +#[derive(Debug, Clone)] +pub struct WorkspaceEncryptedKeyDto { + pub id: Uuid, + pub workspace_id: Uuid, + pub user_id: Uuid, + pub encrypted_kek: Vec, + pub key_version: i32, + pub created_at: chrono::DateTime, +} diff --git a/api/crates/application/src/workspaces/dtos/mod.rs b/api/crates/application/src/workspaces/dtos/mod.rs index 33573843..1da1a6fc 100644 --- a/api/crates/application/src/workspaces/dtos/mod.rs +++ b/api/crates/application/src/workspaces/dtos/mod.rs @@ -1 +1,3 @@ -// Intentionally left empty for now. +mod keys; + +pub use keys::*; diff --git a/api/crates/application/src/workspaces/ports/mod.rs b/api/crates/application/src/workspaces/ports/mod.rs index 895c5f07..a15a601f 100644 --- a/api/crates/application/src/workspaces/ports/mod.rs +++ b/api/crates/application/src/workspaces/ports/mod.rs @@ -1 +1,2 @@ +pub mod workspace_keys_repository; pub mod workspace_repository; diff --git a/api/crates/application/src/workspaces/ports/workspace_keys_repository.rs b/api/crates/application/src/workspaces/ports/workspace_keys_repository.rs new file mode 100644 index 00000000..07ecf8df --- /dev/null +++ b/api/crates/application/src/workspaces/ports/workspace_keys_repository.rs @@ -0,0 +1,52 @@ +use async_trait::async_trait; +use uuid::Uuid; + +use crate::core::ports::errors::PortResult; + +#[derive(Debug, Clone)] +pub struct WorkspaceEncryptedKeyRow { + pub id: Uuid, + pub workspace_id: Uuid, + pub user_id: Uuid, + pub encrypted_kek: Vec, + pub key_version: i32, + pub created_at: chrono::DateTime, +} + +#[async_trait] +pub trait WorkspaceKeysRepository: Send + Sync { + /// Get the encrypted KEK for a user in a workspace + async fn get_encrypted_kek( + &self, + workspace_id: Uuid, + user_id: Uuid, + ) -> PortResult>; + + /// Get all encrypted KEKs for a workspace (for re-encryption during key rotation) + async fn list_encrypted_keks( + &self, + workspace_id: Uuid, + ) -> PortResult>; + + /// Store or update an encrypted KEK for a user + async fn upsert_encrypted_kek( + &self, + workspace_id: Uuid, + user_id: Uuid, + encrypted_kek: &[u8], + key_version: i32, + ) -> PortResult; + + /// Delete an encrypted KEK (when user is removed from workspace) + async fn delete_encrypted_kek(&self, workspace_id: Uuid, user_id: Uuid) -> PortResult; + + /// Delete a specific key version for a workspace (for key rotation cleanup) + async fn delete_encrypted_kek_version( + &self, + workspace_id: Uuid, + key_version: i32, + ) -> PortResult; + + /// Get the current key version for a workspace + async fn get_current_key_version(&self, workspace_id: Uuid) -> PortResult>; +} diff --git a/api/crates/application/src/workspaces/ports/workspace_repository.rs b/api/crates/application/src/workspaces/ports/workspace_repository.rs index c892c914..15ceb534 100644 --- a/api/crates/application/src/workspaces/ports/workspace_repository.rs +++ b/api/crates/application/src/workspaces/ports/workspace_repository.rs @@ -90,6 +90,9 @@ pub struct WorkspaceInvitationRecord { pub accepted_at: Option>, pub revoked_at: Option>, pub created_at: DateTime, + // E2EE fields + pub encrypted_kek_for_invite: Option, + pub kek_version: Option, } #[derive(Debug)] @@ -238,5 +241,14 @@ pub trait WorkspaceRepository: Send + Sync { invitation_id: Uuid, ) -> PortResult>; + /// Update invitation with encrypted KEK for E2EE + async fn update_invitation_kek( + &self, + workspace_id: Uuid, + invitation_id: Uuid, + encrypted_kek_for_invite: &str, + kek_version: i32, + ) -> PortResult>; + async fn list_all_workspace_ids(&self) -> PortResult>; } diff --git a/api/crates/application/src/workspaces/services/mod.rs b/api/crates/application/src/workspaces/services/mod.rs index d52b15cc..8af2fafa 100644 --- a/api/crates/application/src/workspaces/services/mod.rs +++ b/api/crates/application/src/workspaces/services/mod.rs @@ -12,6 +12,7 @@ use domain::workspaces::roles::{WorkspaceBaseRole, WorkspaceRoleKind, WorkspaceS pub mod permission_snapshot; mod slug; +pub mod workspace_keys; use crate::core::services::errors::ServiceError; use crate::workspaces::ports::workspace_repository::{ WorkspaceInvitationRecord, WorkspaceListItem, WorkspaceMemberDetail, WorkspaceMemberRow, @@ -160,6 +161,15 @@ pub trait WorkspaceServiceFacade: Send + Sync { user_email: &str, ) -> Result; + /// Update invitation with encrypted KEK for E2EE + async fn update_invitation_kek( + &self, + workspace_id: Uuid, + invitation_id: Uuid, + encrypted_kek_for_invite: &str, + kek_version: i32, + ) -> Result; + async fn ensure_owner_membership( &self, workspace_id: Uuid, @@ -374,6 +384,17 @@ impl WorkspaceServiceFacade for WorkspaceService { self.accept_invitation(token, user_id, user_email).await } + async fn update_invitation_kek( + &self, + workspace_id: Uuid, + invitation_id: Uuid, + encrypted_kek_for_invite: &str, + kek_version: i32, + ) -> Result { + self.update_invitation_kek(workspace_id, invitation_id, encrypted_kek_for_invite, kek_version) + .await + } + async fn ensure_owner_membership( &self, workspace_id: Uuid, @@ -584,6 +605,25 @@ impl WorkspaceService { Ok(record) } + /// Update invitation with encrypted KEK for E2EE + pub async fn update_invitation_kek( + &self, + workspace_id: Uuid, + invitation_id: Uuid, + encrypted_kek_for_invite: &str, + kek_version: i32, + ) -> Result { + let Some(record) = self + .repo + .update_invitation_kek(workspace_id, invitation_id, encrypted_kek_for_invite, kek_version) + .await + .map_err(ServiceError::from)? + else { + return Err(ServiceError::NotFound); + }; + Ok(record) + } + pub async fn set_default_workspace( &self, user_id: Uuid, diff --git a/api/crates/application/src/workspaces/services/workspace_keys.rs b/api/crates/application/src/workspaces/services/workspace_keys.rs new file mode 100644 index 00000000..ad41861d --- /dev/null +++ b/api/crates/application/src/workspaces/services/workspace_keys.rs @@ -0,0 +1,165 @@ +use std::sync::Arc; + +use async_trait::async_trait; +use uuid::Uuid; + +use crate::core::services::errors::ServiceError; +use crate::workspaces::dtos::WorkspaceEncryptedKeyDto; +use crate::workspaces::ports::workspace_keys_repository::{ + WorkspaceEncryptedKeyRow, WorkspaceKeysRepository, +}; + +pub struct WorkspaceKeysService { + repo: Arc, +} + +#[async_trait] +pub trait WorkspaceKeysServiceFacade: Send + Sync { + /// Get the encrypted KEK for the current user in a workspace + async fn get_encrypted_kek( + &self, + workspace_id: Uuid, + user_id: Uuid, + ) -> Result, ServiceError>; + + /// Store an encrypted KEK for a user (used when sharing workspace key) + async fn store_encrypted_kek( + &self, + workspace_id: Uuid, + user_id: Uuid, + encrypted_kek: Vec, + key_version: i32, + ) -> Result; + + /// Get all encrypted KEKs for a workspace (for key rotation) + async fn list_encrypted_keks( + &self, + workspace_id: Uuid, + ) -> Result, ServiceError>; + + /// Get the current key version for a workspace + async fn get_current_key_version(&self, workspace_id: Uuid) -> Result, ServiceError>; + + /// Delete a specific key version (for key rotation cleanup) + async fn delete_key_version( + &self, + workspace_id: Uuid, + key_version: i32, + ) -> Result; + + /// Rotate workspace KEK for all members + /// Returns the new key version and number of keys updated + async fn rotate_keys( + &self, + workspace_id: Uuid, + member_keys: Vec<(Uuid, Vec)>, // (user_id, encrypted_kek) + ) -> Result<(i32, usize), ServiceError>; +} + +impl WorkspaceKeysService { + pub fn new(repo: Arc) -> Self { + Self { repo } + } +} + +fn row_to_dto(row: WorkspaceEncryptedKeyRow) -> WorkspaceEncryptedKeyDto { + WorkspaceEncryptedKeyDto { + id: row.id, + workspace_id: row.workspace_id, + user_id: row.user_id, + encrypted_kek: row.encrypted_kek, + key_version: row.key_version, + created_at: row.created_at, + } +} + +#[async_trait] +impl WorkspaceKeysServiceFacade for WorkspaceKeysService { + async fn get_encrypted_kek( + &self, + workspace_id: Uuid, + user_id: Uuid, + ) -> Result, ServiceError> { + let row = self + .repo + .get_encrypted_kek(workspace_id, user_id) + .await + .map_err(ServiceError::from)?; + Ok(row.map(row_to_dto)) + } + + async fn store_encrypted_kek( + &self, + workspace_id: Uuid, + user_id: Uuid, + encrypted_kek: Vec, + key_version: i32, + ) -> Result { + let row = self + .repo + .upsert_encrypted_kek(workspace_id, user_id, &encrypted_kek, key_version) + .await + .map_err(ServiceError::from)?; + Ok(row_to_dto(row)) + } + + async fn list_encrypted_keks( + &self, + workspace_id: Uuid, + ) -> Result, ServiceError> { + let rows = self + .repo + .list_encrypted_keks(workspace_id) + .await + .map_err(ServiceError::from)?; + Ok(rows.into_iter().map(row_to_dto).collect()) + } + + async fn get_current_key_version(&self, workspace_id: Uuid) -> Result, ServiceError> { + self.repo + .get_current_key_version(workspace_id) + .await + .map_err(ServiceError::from) + } + + async fn delete_key_version( + &self, + workspace_id: Uuid, + key_version: i32, + ) -> Result { + self.repo + .delete_encrypted_kek_version(workspace_id, key_version) + .await + .map_err(ServiceError::from) + } + + async fn rotate_keys( + &self, + workspace_id: Uuid, + member_keys: Vec<(Uuid, Vec)>, + ) -> Result<(i32, usize), ServiceError> { + if member_keys.is_empty() { + return Err(ServiceError::BadRequest("no_member_keys_provided")); + } + + // Get current key version and increment + let current_version = self + .repo + .get_current_key_version(workspace_id) + .await + .map_err(ServiceError::from)?; + let new_version = current_version.unwrap_or(0) + 1; + + // Store encrypted KEKs for all members with new version + let mut updated_count = 0; + for (user_id, encrypted_kek) in member_keys { + self.repo + .upsert_encrypted_kek(workspace_id, user_id, &encrypted_kek, new_version) + .await + .map_err(ServiceError::from)?; + updated_count += 1; + } + + Ok((new_version, updated_count)) + } +} diff --git a/api/crates/bootstrap/src/app/build_runtime.rs b/api/crates/bootstrap/src/app/build_runtime.rs index 167036f5..a390617f 100644 --- a/api/crates/bootstrap/src/app/build_runtime.rs +++ b/api/crates/bootstrap/src/app/build_runtime.rs @@ -20,6 +20,7 @@ use application::core::services::storage::reconcile_scheduler::StorageReconcileS use application::documents::ports::doc_event_log::DocEventLog; use application::documents::services::DocumentService; use application::documents::services::files::FileService; +use application::documents::services::keys::DocumentKeysService; use application::documents::services::publishing::PublicService; use application::documents::services::realtime::snapshot::MarkdownExportProvider; use application::documents::services::sharing::ShareService; @@ -28,6 +29,8 @@ use application::identity::ports::secret_hasher::SecretHasher; use application::identity::services::api_tokens::ApiTokenService; use application::identity::services::auth::account::AccountService; use application::identity::services::auth::token_validation::TokenValidationService; +use application::identity::services::migration::MigrationService; +use application::identity::services::user_keys::UserKeysService; use application::identity::services::user_shortcuts::UserShortcutService; use application::plugins::ports::plugin_event_publisher::PluginEventPublisher; use application::plugins::ports::plugin_event_subscriber::PluginEventSubscriber; @@ -36,6 +39,7 @@ use application::plugins::services::data::PluginDataService; use application::plugins::services::execution::PluginExecutionService; use application::plugins::services::management::PluginManagementService; use application::plugins::services::permissions::PluginPermissionService; +use application::workspaces::services::workspace_keys::WorkspaceKeysService; use application::workspaces::services::{WorkspacePermissionResolver, WorkspaceService}; use infrastructure::core::storage::{ FsIngestWatcher, PgStorageIngestQueue, PgStorageReconcileJobs, StorageConsistencyMonitor, @@ -43,7 +47,6 @@ use infrastructure::core::storage::{ }; use infrastructure::documents::doc_event_log::PgDocEventLog; use infrastructure::documents::event_poller::DocEventPoller; -use infrastructure::documents::exporter::DefaultDocumentExporter; use infrastructure::documents::git_dirty_subscriber::GitDirtyDocEventSubscriber; use infrastructure::identity::crypto::Argon2SecretHasher; use presentation::context::{ @@ -154,6 +157,20 @@ pub async fn build_runtime( ), ); let share_service = Arc::new(ShareService::new(shares_repo_impl.clone())); + let document_keys_repo = Arc::new( + infrastructure::documents::db::repositories::document_keys_repository_sqlx::SqlxDocumentKeysRepository::new( + pool.clone(), + ), + ); + let share_keys_repo = Arc::new( + infrastructure::documents::db::repositories::share_keys_repository_sqlx::SqlxShareKeysRepository::new( + pool.clone(), + ), + ); + let document_keys_service = Arc::new(DocumentKeysService::new( + document_keys_repo.clone(), + share_keys_repo.clone(), + )); let access_repo = Arc::new( infrastructure::documents::db::repositories::access_repository_sqlx::SqlxAccessRepository::new( pool.clone(), @@ -192,6 +209,12 @@ pub async fn build_runtime( ), ); let workspace_service = Arc::new(WorkspaceService::new(workspace_repo.clone())); + let workspace_keys_repo = Arc::new( + infrastructure::workspaces::db::repositories::workspace_keys_repository_sqlx::SqlxWorkspaceKeysRepository::new( + pool.clone(), + ), + ); + let workspace_keys_service = Arc::new(WorkspaceKeysService::new(workspace_keys_repo.clone())); let workspace_permissions: Arc = workspace_service.clone(); { let reconcile_service = Arc::new(StorageReconcileService::new( @@ -213,12 +236,12 @@ pub async fn build_runtime( Duration::from_secs(60 * 60), ); } - let tag_repo = Arc::new( - infrastructure::documents::db::repositories::tag_repository_sqlx::SqlxTagRepository::new( + let encrypted_tag_repo = Arc::new( + infrastructure::documents::db::repositories::encrypted_tag_repository_sqlx::SqlxEncryptedTagRepository::new( pool.clone(), ), ); - let tag_service = Arc::new(TagService::new(tag_repo.clone())); + let tag_service = Arc::new(TagService::new(encrypted_tag_repo.clone())); let api_token_repo = Arc::new( infrastructure::identity::db::repositories::api_token_repository_sqlx::SqlxApiTokenRepository::new( pool.clone(), @@ -259,6 +282,27 @@ pub async fn build_runtime( ); let user_shortcut_service = Arc::new(UserShortcutService::new(user_shortcuts.clone(), 32 * 1024)); + let user_keys_repo = Arc::new( + infrastructure::identity::db::repositories::user_keys_repository_sqlx::SqlxUserKeysRepository::new( + pool.clone(), + ), + ); + let user_keys_service = Arc::new(UserKeysService::new(user_keys_repo.clone())); + let migration_repo = Arc::new( + infrastructure::identity::db::repositories::migration_repository_sqlx::SqlxMigrationRepository::new( + pool.clone(), + ), + ); + let migration_tx_runner: Arc = Arc::new( + infrastructure::identity::db::repositories::migration_tx_runner_sqlx::SqlxMigrationTxRunner::new( + pool.clone(), + migration_repo.clone(), + ), + ); + let migration_service = Arc::new(MigrationService::new( + migration_repo.clone(), + migration_tx_runner, + )); let realtime_stack = realtime::build_realtime_stack( &cfg, &pool, @@ -293,26 +337,9 @@ pub async fn build_runtime( } let crate::git::GitStack { - workspace: git_workspace, service: git_service, repo: git_repo, - rebuild, - rebuild_jobs: git_rebuild_jobs, - } = git::build_git_stack( - &cfg, - &pool, - storage_resolver.clone(), - snapshot_service_arc.clone(), - realtime_engine.clone(), - document_repo.clone(), - document_repo.clone(), - files_repo.clone(), - workspace_permissions.clone(), - metrics.clone(), - ) - .await?; - - jobs::spawn_git_rebuild_jobs(&mut jobs, spawn_background_tasks, rebuild); + } = git::build_git_stack(&cfg, &pool)?; let plugin_repo = Arc::new( infrastructure::plugins::db::repositories::plugin_repository_sqlx::SqlxPluginRepository::new( pool.clone(), @@ -343,7 +370,6 @@ pub async fn build_runtime( plugin_repo.clone(), plugin_assets.clone(), git_repo.clone(), - git_workspace.clone(), storage_job_queue.clone(), workspace_service.clone(), )); @@ -360,8 +386,6 @@ pub async fn build_runtime( let plugin_event_publisher: Arc = plugin_event_bus.clone(); let plugin_event_subscriber: Arc = plugin_event_bus.clone(); - let document_exporter = Arc::new(DefaultDocumentExporter::new()); - let document_service = Arc::new(DocumentService::new( documents_tx_runner, document_repo.clone(), @@ -373,7 +397,6 @@ pub async fn build_runtime( doc_event_log.clone(), realtime_engine.clone(), snapshot_service_arc.clone(), - document_exporter.clone(), )); { @@ -406,6 +429,7 @@ pub async fn build_runtime( let public_service = Arc::new(PublicService::new( public_repo.clone(), realtime_engine.clone(), + storage_resolver.clone(), )); let plugin_management_service = Arc::new(PluginManagementService::new( plugin_installations.clone(), @@ -442,6 +466,7 @@ pub async fn build_runtime( }, documents: DocumentServicesDeps { document_service: document_service.clone(), + document_keys_service: document_keys_service.clone(), share_service: share_service.clone(), file_service: file_service.clone(), public_service: public_service.clone(), @@ -454,6 +479,8 @@ pub async fn build_runtime( identity: IdentityServicesDeps { api_token_service: api_token_service.clone(), user_shortcut_service: user_shortcut_service.clone(), + user_keys_service: user_keys_service.clone(), + migration_service: migration_service.clone(), account_service: account_service.clone(), auth_service: auth_stack.auth_service.clone(), session_service: auth_stack.session_service.clone(), @@ -468,6 +495,7 @@ pub async fn build_runtime( }, workspaces: WorkspaceServicesDeps { workspace_service: workspace_service.clone(), + workspace_keys_service: workspace_keys_service.clone(), }, }); @@ -487,7 +515,6 @@ pub async fn build_runtime( jobs, storage_job_queue, storage_reconcile_jobs, - git_rebuild_jobs, plugin_assets, }) } diff --git a/api/crates/bootstrap/src/app/mod.rs b/api/crates/bootstrap/src/app/mod.rs index 7f929441..92e58eb8 100644 --- a/api/crates/bootstrap/src/app/mod.rs +++ b/api/crates/bootstrap/src/app/mod.rs @@ -8,7 +8,6 @@ use std::sync::Arc; use application::core::ports::storage::storage_projection_queue::StorageProjectionQueue; use application::core::ports::storage::storage_reconcile_jobs::StorageReconcileJobs; -use application::git::ports::git_rebuild_job_queue::GitRebuildJobQueue; use application::plugins::ports::plugin_asset_store::PluginAssetStore; use infrastructure::core::db::PgPool; use presentation::context::AppContext; @@ -21,7 +20,6 @@ pub struct AppRuntime { jobs: crate::jobs::Jobs, storage_job_queue: Arc, storage_reconcile_jobs: Arc, - git_rebuild_jobs: Arc, plugin_assets: Arc, } diff --git a/api/crates/bootstrap/src/app/runtime.rs b/api/crates/bootstrap/src/app/runtime.rs index b4dc2709..1384e67b 100644 --- a/api/crates/bootstrap/src/app/runtime.rs +++ b/api/crates/bootstrap/src/app/runtime.rs @@ -5,7 +5,6 @@ use tracing::{error, info}; use application::core::ports::storage::storage_projection_queue::StorageProjectionQueue; use application::core::ports::storage::storage_reconcile_jobs::StorageReconcileJobs; -use application::git::ports::git_rebuild_job_queue::GitRebuildJobQueue; use application::plugins::ports::plugin_asset_store::PluginAssetStore; use infrastructure::core::db::PgPool; use presentation::context::AppContext; @@ -21,7 +20,6 @@ type AppRuntimeParts = ( Jobs, Arc, Arc, - Arc, Arc, ); @@ -36,7 +34,6 @@ impl AppRuntime { self.jobs, self.storage_job_queue, self.storage_reconcile_jobs, - self.git_rebuild_jobs, self.plugin_assets, ) } @@ -50,7 +47,6 @@ impl AppRuntime { mut jobs, storage_job_queue: _, storage_reconcile_jobs: _, - git_rebuild_jobs: _, plugin_assets: _, } = self; diff --git a/api/crates/bootstrap/src/git.rs b/api/crates/bootstrap/src/git.rs index 229d224c..8eb7e536 100644 --- a/api/crates/bootstrap/src/git.rs +++ b/api/crates/bootstrap/src/git.rs @@ -1,144 +1,27 @@ use std::sync::Arc; -use anyhow::Context; -use tracing::info; - -use crate::config::{Config, StorageBackend}; -use application::core::ports::storage::storage_port::StorageResolverPort; -use application::core::services::metrics::MetricsRegistry; -use application::documents::services::realtime::snapshot::SnapshotService; -use application::git::ports::git_rebuild_job_queue::GitRebuildJobQueue; use application::git::services::GitService; -use application::git::services::rebuild::GitRebuildService; -use application::git::services::rebuild_scheduler::GitRebuildScheduler; -use application::workspaces::services::WorkspacePermissionResolver; use infrastructure::core::db::PgPool; -use infrastructure::git::PgGitRebuildJobQueue; -use infrastructure::git::storage::{GitStorageDriverConfig, build_git_storage}; -use infrastructure::git::workspace::GitWorkspaceService; - -pub struct GitRebuildStack { - pub service: Arc, - pub scheduler: GitRebuildScheduler, - pub interval: std::time::Duration, -} pub struct GitStack { - pub workspace: Arc, pub service: Arc, pub repo: Arc, - pub rebuild: Option, - pub rebuild_jobs: Arc, -} - -pub fn git_storage_driver_config(cfg: &Config) -> anyhow::Result { - let uploads_root = std::path::PathBuf::from(&cfg.storage_root); - let config = match cfg.storage_backend { - StorageBackend::Filesystem => GitStorageDriverConfig::Filesystem { - root: uploads_root.clone(), - }, - StorageBackend::S3 => { - let s3_settings = infrastructure::git::storage::S3GitStorageConfig { - storage_root_prefix: cfg.storage_root.clone(), - bucket: cfg - .s3_bucket - .clone() - .context("S3_BUCKET must be configured when using S3 storage backend")?, - region: cfg.s3_region.clone(), - endpoint: cfg.s3_endpoint.clone(), - access_key: cfg.s3_access_key.clone(), - secret_key: cfg.s3_secret_key.clone(), - use_path_style: cfg.s3_use_path_style, - }; - GitStorageDriverConfig::S3(s3_settings) - } - }; - Ok(config) } -#[allow(clippy::too_many_arguments)] -pub async fn build_git_stack( - cfg: &Config, +pub fn build_git_stack( + cfg: &crate::config::Config, pool: &PgPool, - storage_resolver: Arc, - snapshot_service: Arc, - realtime_engine: Arc< - dyn application::documents::ports::realtime::realtime_port::RealtimeEngine, - >, - document_repo: Arc, - document_paths: Arc< - dyn application::documents::ports::document_path_repository::DocumentPathRepository, - >, - files_repo: Arc, - workspace_permissions: Arc, - metrics: Arc, ) -> anyhow::Result { - let git_rebuild_jobs: Arc = - Arc::new(PgGitRebuildJobQueue::new(pool.clone())); - let git_repo = Arc::new( infrastructure::git::db::repositories::git_repository_sqlx::SqlxGitRepository::new( pool.clone(), cfg.encryption_key.clone(), ), ); - let git_pull_sessions = Arc::new( - infrastructure::git::db::repositories::git_pull_session_repository_sqlx::GitPullSessionRepositorySqlx::new( - pool.clone(), - ), - ); - let git_storage_cfg = git_storage_driver_config(cfg)?; - let git_storage = build_git_storage(git_storage_cfg).await?; - let gitignore_port = Arc::new(infrastructure::core::storage::gitignore::FsGitignorePort); - let git_workspace = Arc::new(GitWorkspaceService::new( - pool.clone(), - git_storage.clone(), - storage_resolver.clone(), - snapshot_service.clone(), - realtime_engine.clone(), - document_repo.clone(), - document_paths.clone(), - )?); - let git_service = Arc::new(GitService::new( - git_repo.clone(), - storage_resolver.clone(), - files_repo.clone(), - document_repo.clone(), - gitignore_port.clone(), - git_workspace.clone(), - git_pull_sessions.clone(), - )); - - let rebuild = if cfg.git_rebuild_enabled { - let rebuild_service = Arc::new(GitRebuildService::new( - git_rebuild_jobs.clone(), - git_workspace.clone(), - git_repo.clone(), - metrics.clone(), - workspace_permissions, - )); - let interval = std::time::Duration::from_secs(cfg.git_rebuild_interval_secs); - let rebuild_scheduler = GitRebuildScheduler::new( - git_rebuild_jobs.clone(), - git_repo.clone(), - git_workspace.clone(), - ); - info!("git_rebuild_scheduler_enabled"); - Some(GitRebuildStack { - service: rebuild_service, - scheduler: rebuild_scheduler, - interval, - }) - } else { - info!("git_rebuild_scheduler_disabled"); - None - }; + let git_service = Arc::new(GitService::new(git_repo.clone())); Ok(GitStack { - workspace: git_workspace, service: git_service, repo: git_repo, - rebuild, - rebuild_jobs: git_rebuild_jobs, }) } diff --git a/api/crates/bootstrap/src/http.rs b/api/crates/bootstrap/src/http.rs index 2b682ae9..e3c50141 100644 --- a/api/crates/bootstrap/src/http.rs +++ b/api/crates/bootstrap/src/http.rs @@ -35,6 +35,10 @@ pub async fn build_api_router(cfg: &Config, ctx: AppContext) -> anyhow::Result anyhow::Result anyhow::Result anyhow::Result anyhow::Result { http::header::AUTHORIZATION, http::header::HeaderName::from_static("x-workspace-id"), ]; - let cors_expose_headers = [http::header::WWW_AUTHENTICATE]; + let cors_expose_headers = [ + http::header::WWW_AUTHENTICATE, + http::header::HeaderName::from_static("x-encrypted-metadata"), + http::header::HeaderName::from_static("x-encrypted-metadata-nonce"), + http::header::HeaderName::from_static("x-encrypted-hash"), + ]; let cors = if let Some(origin) = frontend_origin.clone() { CorsLayer::new() .allow_origin(origin) diff --git a/api/crates/bootstrap/src/jobs.rs b/api/crates/bootstrap/src/jobs.rs index 0b229fdc..ec9b1fe4 100644 --- a/api/crates/bootstrap/src/jobs.rs +++ b/api/crates/bootstrap/src/jobs.rs @@ -9,7 +9,6 @@ use tokio::time::sleep; use tracing::{debug, error, info, warn}; use crate::config::Config; -use crate::git::GitRebuildStack; use application::core::services::storage::reconcile::StorageReconcileService; use application::core::services::storage::reconcile_scheduler::StorageReconcileScheduler; use application::core::services::worker::WorkerTick; @@ -204,38 +203,6 @@ pub fn spawn_storage_ingest_worker( } } -pub fn spawn_git_rebuild_jobs( - jobs: &mut Jobs, - spawn_background_tasks: bool, - rebuild: Option, -) { - if !spawn_background_tasks { - return; - } - if let Some(rebuild) = rebuild { - let svc = rebuild.service.clone(); - jobs.spawn("git_rebuild_worker", async move { - let idle = Duration::from_secs(1); - loop { - match svc.tick().await { - Ok(WorkerTick::Processed) => continue, - Ok(WorkerTick::Idle) => sleep(idle).await, - Err(err) => { - error!(error = ?err, "git_rebuild_worker_tick_failed"); - sleep(idle).await; - } - } - } - }); - jobs.spawn("git_rebuild_scheduler", async move { - loop { - rebuild.scheduler.tick().await; - sleep(rebuild.interval).await; - } - }); - } -} - pub fn spawn_plugin_prefetch( jobs: &mut Jobs, spawn_background_tasks: bool, diff --git a/api/crates/bootstrap/src/realtime.rs b/api/crates/bootstrap/src/realtime.rs index 21fe9f0a..f843f332 100644 --- a/api/crates/bootstrap/src/realtime.rs +++ b/api/crates/bootstrap/src/realtime.rs @@ -77,12 +77,6 @@ pub async fn build_realtime_stack( pool.clone(), ), ); - let tagging_repo: Arc = - Arc::new( - infrastructure::documents::db::repositories::tagging_repository_sqlx::SqlxTaggingRepository::new( - pool.clone(), - ), - ); let hydration_service = Arc::new(DocHydrationService::new( doc_state_reader.clone(), backlog_reader, @@ -92,7 +86,6 @@ pub async fn build_realtime_stack( doc_state_reader.clone(), doc_persistence.clone(), linkgraph_repo, - tagging_repo, snapshot_archive_repo.clone(), storage_job_queue.clone(), )); diff --git a/api/crates/bootstrap/src/telemetry.rs b/api/crates/bootstrap/src/telemetry.rs index 67c1262b..21d6a99c 100644 --- a/api/crates/bootstrap/src/telemetry.rs +++ b/api/crates/bootstrap/src/telemetry.rs @@ -1,10 +1,17 @@ pub fn init_tracing() { + use std::io; + use tracing_subscriber::fmt::writer::MakeWriterExt; + + let filter = std::env::var("RUST_LOG") + .unwrap_or_else(|_| "api=debug,warp=info,axum=info,tower_http=info".into()); + tracing_subscriber::fmt() - .with_env_filter( - std::env::var("RUST_LOG") - .unwrap_or_else(|_| "api=debug,warp=info,axum=info,tower_http=info".into()), - ) + .with_env_filter(&filter) + .with_writer(io::stderr.with_max_level(tracing::Level::TRACE)) + .with_ansi(false) .init(); + + eprintln!("[telemetry] tracing initialized with filter: {filter}"); } #[cfg(test)] diff --git a/api/crates/cli/src/cli.rs b/api/crates/cli/src/cli.rs index 7974d5a9..3b5e25e2 100644 --- a/api/crates/cli/src/cli.rs +++ b/api/crates/cli/src/cli.rs @@ -31,11 +31,6 @@ pub(crate) enum Command { #[command(subcommand)] command: WorkspaceCommand, }, - /// Git workspace helpers - Git { - #[command(subcommand)] - command: GitCommand, - }, /// Plugin asset utilities Plugins { #[command(subcommand)] @@ -123,11 +118,6 @@ pub(crate) enum JobsCommand { #[command(subcommand)] command: ReconcileCommand, }, - /// Git rebuild job operations - GitRebuild { - #[command(subcommand)] - command: GitRebuildCommand, - }, } #[derive(Subcommand)] @@ -188,19 +178,6 @@ pub(crate) enum ReconcileCommand { }, } -#[derive(Subcommand)] -pub(crate) enum GitRebuildCommand { - /// Print git rebuild queue metrics - Stats, - /// Enqueue a git rebuild job for a workspace - Enqueue { - #[arg(long)] - workspace_id: Uuid, - #[arg(long)] - actor_id: Option, - }, -} - #[derive(Subcommand)] pub(crate) enum WorkspaceCommand { /// List all workspaces @@ -260,25 +237,6 @@ pub(crate) enum ShareCommand { }, } -#[derive(Subcommand)] -pub(crate) enum GitCommand { - /// Show git workspace status summary - Status { - #[arg(long)] - workspace_id: Uuid, - }, - /// List dirty changes tracked for a workspace - Changes { - #[arg(long)] - workspace_id: Uuid, - }, - /// Remove git workspace data (DB + storage) - Remove { - #[arg(long)] - workspace_id: Uuid, - }, -} - #[derive(Subcommand)] pub(crate) enum PluginCommand { /// List latest global plugin manifests diff --git a/api/crates/cli/src/commands/git.rs b/api/crates/cli/src/commands/git.rs deleted file mode 100644 index 99c94a90..00000000 --- a/api/crates/cli/src/commands/git.rs +++ /dev/null @@ -1,36 +0,0 @@ -use anyhow::Result; - -use application::git::ports::git_workspace::GitWorkspacePort; -use bootstrap::application; - -use crate::cli::GitCommand; -use crate::deps::Deps; - -pub(crate) async fn handle(deps: &Deps, cmd: GitCommand) -> Result<()> { - match cmd { - GitCommand::Status { workspace_id } => { - let status = deps.git_workspace.status(workspace_id).await?; - println!( - "initialized={} branch={:?} uncommitted_changes={} untracked_files={}", - status.repository_initialized, - status.current_branch, - status.uncommitted_changes, - status.untracked_files - ); - Ok(()) - } - GitCommand::Changes { workspace_id } => { - let changes = deps.git_workspace.list_changes(workspace_id).await?; - println!("{} change(s)", changes.len()); - for c in changes { - println!("{} {}", c.status, c.path); - } - Ok(()) - } - GitCommand::Remove { workspace_id } => { - deps.git_workspace.remove_repository(workspace_id).await?; - println!("removed git workspace {}", workspace_id); - Ok(()) - } - } -} diff --git a/api/crates/cli/src/commands/jobs.rs b/api/crates/cli/src/commands/jobs.rs index 57b667d4..8621135a 100644 --- a/api/crates/cli/src/commands/jobs.rs +++ b/api/crates/cli/src/commands/jobs.rs @@ -11,10 +11,7 @@ use infrastructure::core::db::PgPool; use application::core::ports::storage::storage_ingest_queue::StorageIngestQueue; -use crate::cli::{ - GitRebuildCommand, IngestCommand, IngestKindArg, JobsCommand, ProjectionCommand, - ReconcileCommand, -}; +use crate::cli::{IngestCommand, IngestKindArg, JobsCommand, ProjectionCommand, ReconcileCommand}; use crate::deps::Deps; pub(crate) async fn handle(deps: &Deps, cmd: JobsCommand) -> Result<()> { @@ -62,23 +59,6 @@ pub(crate) async fn handle(deps: &Deps, cmd: JobsCommand) -> Result<()> { Ok(()) } }, - JobsCommand::GitRebuild { command } => match command { - GitRebuildCommand::Stats => print_git_rebuild_stats(&deps.pool).await, - GitRebuildCommand::Enqueue { - workspace_id, - actor_id, - } => { - let permissions = PermissionSet::all().to_vec(); - deps.git_rebuild_jobs - .enqueue(workspace_id, actor_id, &permissions) - .await?; - println!( - "enqueued git rebuild workspace={} actor_id={:?}", - workspace_id, actor_id - ); - Ok(()) - } - }, } } @@ -199,45 +179,3 @@ async fn print_reconcile_stats(pool: &PgPool) -> Result<()> { } Ok(()) } - -async fn print_git_rebuild_stats(pool: &PgPool) -> Result<()> { - let row = sqlx::query( - r#"SELECT - COUNT(*) FILTER (WHERE locked_at IS NULL) AS pending, - COUNT(*) FILTER (WHERE locked_at IS NOT NULL) AS locked, - COUNT(*) FILTER (WHERE pending_retry) AS retrying, - COUNT(*) AS total, - MIN(updated_at) FILTER (WHERE locked_at IS NOT NULL) AS oldest_locked_at, - MIN(created_at) FILTER (WHERE locked_at IS NULL) AS oldest_pending_created - FROM git_rebuild_jobs"#, - ) - .fetch_one(pool) - .await?; - - let pending: i64 = row.try_get("pending").unwrap_or(0); - let locked: i64 = row.try_get("locked").unwrap_or(0); - let retrying: i64 = row.try_get("retrying").unwrap_or(0); - let total: i64 = row.try_get("total").unwrap_or(0); - let oldest_locked_at: Option> = row.try_get("oldest_locked_at").ok(); - let oldest_pending: Option> = row.try_get("oldest_pending_created").ok(); - - println!("git_rebuild.total={total}"); - println!("git_rebuild.pending={pending}"); - println!("git_rebuild.locked={locked}"); - println!("git_rebuild.retrying={retrying}"); - match oldest_pending { - Some(ts) => println!( - "git_rebuild.oldest_pending_age_secs={}", - (Utc::now() - ts).num_seconds() - ), - None => println!("git_rebuild.oldest_pending_age_secs=-"), - } - match oldest_locked_at { - Some(ts) => println!( - "git_rebuild.oldest_locked_age_secs={}", - (Utc::now() - ts).num_seconds() - ), - None => println!("git_rebuild.oldest_locked_age_secs=-"), - } - Ok(()) -} diff --git a/api/crates/cli/src/commands/mod.rs b/api/crates/cli/src/commands/mod.rs index 899dbf04..f8a89460 100644 --- a/api/crates/cli/src/commands/mod.rs +++ b/api/crates/cli/src/commands/mod.rs @@ -3,7 +3,6 @@ use anyhow::Result; use super::cli::Command; use super::deps::Deps; -mod git; mod jobs; mod openapi; mod plugins; @@ -21,7 +20,6 @@ pub(crate) async fn run(deps: &Deps, command: Command) -> Result<()> { Command::Users { command } => users::handle(deps, command).await, Command::Jobs { command } => jobs::handle(deps, command).await, Command::Workspaces { command } => workspaces::handle(deps, command).await, - Command::Git { command } => git::handle(deps, command).await, Command::Plugins { command } => plugins::handle(deps, command).await, Command::Tokens { command } => tokens::handle(deps, command).await, Command::Shares { command } => shares::handle(deps, command).await, diff --git a/api/crates/cli/src/commands/users.rs b/api/crates/cli/src/commands/users.rs index 32f11613..5418d730 100644 --- a/api/crates/cli/src/commands/users.rs +++ b/api/crates/cli/src/commands/users.rs @@ -164,7 +164,6 @@ async fn delete_user(deps: &Deps, user_id: Uuid) -> Result<()> { plugin_repo: &deps.plugin_repo, plugin_assets: deps.plugin_assets.clone(), git_repo: &deps.git_repo, - git_workspace: deps.git_workspace.as_ref(), storage_jobs: deps.storage_jobs.as_ref(), files_repo: &deps.files_repo, }; diff --git a/api/crates/cli/src/deps.rs b/api/crates/cli/src/deps.rs index 006a0977..7721eaa5 100644 --- a/api/crates/cli/src/deps.rs +++ b/api/crates/cli/src/deps.rs @@ -6,31 +6,25 @@ use bootstrap::app::AppBuilder; use bootstrap::application::core::ports::storage::storage_ingest_queue::StorageIngestQueue; use bootstrap::application::core::ports::storage::storage_projection_queue::StorageProjectionQueue; use bootstrap::application::core::ports::storage::storage_reconcile_jobs::StorageReconcileJobs; -use bootstrap::application::git::ports::git_rebuild_job_queue::GitRebuildJobQueue; use bootstrap::application::plugins::ports::plugin_asset_store::PluginAssetStore; use bootstrap::application::workspaces::services::WorkspaceServiceFacade; use bootstrap::config::Config; -use bootstrap::git::git_storage_driver_config; use bootstrap::infrastructure::core::db::PgPool; use bootstrap::infrastructure::documents::db::repositories::document_repository_sqlx::SqlxDocumentRepository; use bootstrap::infrastructure::documents::db::repositories::files_repository_sqlx::SqlxFilesRepository; use bootstrap::infrastructure::documents::db::repositories::shares_repository_sqlx::SqlxSharesRepository; -use bootstrap::infrastructure::git::storage::build_git_storage; use bootstrap::infrastructure::identity::db::repositories::api_token_repository_sqlx::SqlxApiTokenRepository; use bootstrap::infrastructure::identity::db::repositories::user_repository_sqlx::SqlxUserRepository; use bootstrap::infrastructure::identity::db::repositories::user_session_repository_sqlx::SqlxUserSessionRepository; use bootstrap::infrastructure::plugins::db::repositories::plugin_installation_repository_sqlx::SqlxPluginInstallationRepository; use bootstrap::infrastructure::plugins::db::repositories::plugin_repository_sqlx::SqlxPluginRepository; -use super::git_workspace::CliGitWorkspace; - pub(crate) struct Deps { pub(crate) pool: PgPool, pub(crate) user_repo: SqlxUserRepository, pub(crate) workspace_service: Arc, pub(crate) ingest_queue: Arc, pub(crate) reconcile_jobs: Arc, - pub(crate) git_rebuild_jobs: Arc, pub(crate) session_repo: SqlxUserSessionRepository, pub(crate) document_repo: SqlxDocumentRepository, pub(crate) files_repo: SqlxFilesRepository, @@ -42,7 +36,6 @@ pub(crate) struct Deps { pub(crate) git_repo: bootstrap::infrastructure::git::db::repositories::git_repository_sqlx::SqlxGitRepository, pub(crate) storage_jobs: Arc, - pub(crate) git_workspace: Arc, } pub(crate) async fn build(database_url: Option) -> Result { @@ -63,7 +56,6 @@ pub(crate) async fn build(database_url: Option) -> Result { _jobs, storage_jobs, reconcile_jobs, - git_rebuild_jobs, plugin_assets, ) = runtime.into_parts(); @@ -82,9 +74,6 @@ pub(crate) async fn build(database_url: Option) -> Result { pool.clone(), cfg.encryption_key.clone(), ); - let git_storage_cfg = git_storage_driver_config(&cfg)?; - let git_storage = build_git_storage(git_storage_cfg).await?; - let git_workspace = Arc::new(CliGitWorkspace::new(pool.clone(), git_storage.clone())); Ok(Deps { pool, @@ -93,7 +82,6 @@ pub(crate) async fn build(database_url: Option) -> Result { ingest_queue, storage_jobs, reconcile_jobs, - git_rebuild_jobs, session_repo, document_repo, files_repo, @@ -103,6 +91,5 @@ pub(crate) async fn build(database_url: Option) -> Result { shares_repo, plugin_assets, git_repo, - git_workspace, }) } diff --git a/api/crates/cli/src/git_workspace.rs b/api/crates/cli/src/git_workspace.rs deleted file mode 100644 index 72e0a872..00000000 --- a/api/crates/cli/src/git_workspace.rs +++ /dev/null @@ -1,365 +0,0 @@ -use std::sync::Arc; - -use chrono::{DateTime, Utc}; -use sqlx::{Row, types::Json}; -use uuid::Uuid; - -use bootstrap::{application, infrastructure}; - -use application::core::ports::errors::PortResult; -use application::git::ports::git_storage::GitStorage; -use application::git::ports::git_workspace::GitWorkspacePort; -use infrastructure::core::db::PgPool; - -pub(crate) struct CliGitWorkspace { - pool: PgPool, - git_storage: Arc, -} - -impl CliGitWorkspace { - pub(crate) fn new(pool: PgPool, git_storage: Arc) -> Self { - Self { pool, git_storage } - } - - async fn load_repository_state( - &self, - workspace_id: Uuid, - ) -> anyhow::Result> { - let row = sqlx::query( - "SELECT initialized, default_branch FROM git_repository_state WHERE workspace_id = $1", - ) - .bind(workspace_id) - .fetch_optional(&self.pool) - .await?; - Ok(row.map(|r| (r.get("initialized"), r.get("default_branch")))) - } - - async fn latest_commit_meta( - &self, - workspace_id: Uuid, - ) -> anyhow::Result> { - let row = sqlx::query( - r#"SELECT commit_id, parent_commit_id, message, author_name, author_email, - committed_at, pack_key, file_hash_index - FROM git_commits - WHERE workspace_id = $1 - ORDER BY committed_at DESC - LIMIT 1"#, - ) - .bind(workspace_id) - .fetch_optional(&self.pool) - .await?; - - row.map(row_to_commit_meta).transpose() - } - - async fn fetch_dirty(&self, workspace_id: Uuid) -> anyhow::Result> { - let rows = sqlx::query( - r#"SELECT path, is_text, op, content_hash - FROM git_dirty_files - WHERE workspace_id = $1 - ORDER BY created_at ASC"#, - ) - .bind(workspace_id) - .fetch_all(&self.pool) - .await?; - - let mut out = Vec::new(); - for r in rows { - let path: String = r.get("path"); - let op: String = r.get("op"); - let content_hash: Option = r.try_get("content_hash").ok(); - out.push(DirtyRow { - path, - op, - content_hash, - }); - } - Ok(out) - } -} - -struct DirtyRow { - path: String, - op: String, - content_hash: Option, -} - -#[async_trait::async_trait] -impl GitWorkspacePort for CliGitWorkspace { - async fn ensure_repository( - &self, - _workspace_id: Uuid, - _default_branch: &str, - ) -> PortResult<()> { - Err(anyhow::anyhow!("ensure_repository not supported in refmd CLI").into()) - } - - async fn remove_repository(&self, workspace_id: Uuid) -> PortResult<()> { - let out: anyhow::Result<()> = async { - let mut tx = self.pool.begin().await?; - sqlx::query("DELETE FROM git_dirty_files WHERE workspace_id = $1") - .bind(workspace_id) - .execute(&mut *tx) - .await?; - sqlx::query("DELETE FROM git_commits WHERE workspace_id = $1") - .bind(workspace_id) - .execute(&mut *tx) - .await?; - sqlx::query( - "UPDATE git_repository_state SET initialized = false, updated_at = now() WHERE workspace_id = $1", - ) - .bind(workspace_id) - .execute(&mut *tx) - .await?; - tx.commit().await?; - self.git_storage.delete_all(workspace_id).await?; - Ok(()) - } - .await; - out.map_err(Into::into) - } - - async fn status( - &self, - workspace_id: Uuid, - ) -> PortResult { - let out: anyhow::Result = async { - let state = self.load_repository_state(workspace_id).await?; - let Some((initialized, branch)) = state else { - return Ok(application::git::dtos::GitWorkspaceStatus { - repository_initialized: false, - current_branch: None, - uncommitted_changes: 0, - untracked_files: 0, - }); - }; - if !initialized { - return Ok(application::git::dtos::GitWorkspaceStatus { - repository_initialized: false, - current_branch: Some(branch), - uncommitted_changes: 0, - untracked_files: 0, - }); - } - - let latest = self.latest_commit_meta(workspace_id).await?; - let previous_index: std::collections::HashMap = latest - .as_ref() - .map(|c| c.file_hash_index.clone()) - .unwrap_or_default(); - - let dirty = self.fetch_dirty(workspace_id).await?; - let mut added: u32 = 0; - let mut modified: u32 = 0; - let mut deleted: u32 = 0; - - for d in dirty.iter() { - match d.op.as_str() { - "upsert" => { - if let Some(prev_hash) = previous_index.get(&d.path) { - match d.content_hash.as_ref() { - Some(h) if h == prev_hash => {} - _ => modified += 1, - } - } else { - added += 1; - } - } - "delete" => { - deleted += 1; - } - _ => {} - } - } - - Ok(application::git::dtos::GitWorkspaceStatus { - repository_initialized: true, - current_branch: Some(branch), - uncommitted_changes: modified + deleted, - untracked_files: added, - }) - } - .await; - out.map_err(Into::into) - } - - async fn list_changes( - &self, - workspace_id: Uuid, - ) -> PortResult> { - let out: anyhow::Result> = async { - if let Some((initialized, _)) = self.load_repository_state(workspace_id).await? { - if !initialized { - return Ok(Vec::new()); - } - } else { - return Ok(Vec::new()); - } - - let latest = self.latest_commit_meta(workspace_id).await?; - let previous_index: std::collections::HashMap = latest - .as_ref() - .map(|c| c.file_hash_index.clone()) - .unwrap_or_default(); - let dirty = self.fetch_dirty(workspace_id).await?; - - let mut out = Vec::new(); - for d in dirty { - let status = match d.op.as_str() { - "delete" => "deleted", - "upsert" => { - if previous_index.contains_key(&d.path) { - "modified" - } else { - "added" - } - } - _ => "unknown", - }; - out.push(application::git::dtos::GitChangeItem { - path: d.path, - status: status.to_string(), - }); - } - Ok(out) - } - .await; - out.map_err(Into::into) - } - - async fn working_diff( - &self, - _workspace_id: Uuid, - ) -> PortResult> { - Err(anyhow::anyhow!("working_diff not supported in refmd CLI").into()) - } - - async fn commit_diff( - &self, - _workspace_id: Uuid, - _from: &str, - _to: &str, - ) -> PortResult> { - Err(anyhow::anyhow!("commit_diff not supported in refmd CLI").into()) - } - - async fn history( - &self, - _workspace_id: Uuid, - ) -> PortResult> { - Err(anyhow::anyhow!("history not supported in refmd CLI").into()) - } - - async fn sync( - &self, - _workspace_id: Uuid, - _req: &application::git::dtos::GitSyncRequestDto, - _cfg: Option<&application::git::ports::git_repository::UserGitCfg>, - ) -> PortResult { - Err(anyhow::anyhow!("sync not supported in refmd CLI").into()) - } - - async fn pull( - &self, - _workspace_id: Uuid, - _actor_id: Uuid, - _req: &application::git::dtos::GitPullRequestDto, - _cfg: &application::git::ports::git_repository::UserGitCfg, - ) -> PortResult { - Err(anyhow::anyhow!("pull not supported in refmd CLI").into()) - } - - async fn import_repository( - &self, - _workspace_id: Uuid, - _actor_id: Uuid, - _cfg: &application::git::ports::git_repository::UserGitCfg, - ) -> PortResult { - Err(anyhow::anyhow!("import not supported in refmd CLI").into()) - } - - async fn head_commit(&self, workspace_id: Uuid) -> PortResult>> { - let out: anyhow::Result>> = async { - Ok(self - .latest_commit_meta(workspace_id) - .await? - .map(|m| m.commit_id)) - } - .await; - out.map_err(Into::into) - } - - async fn remote_head( - &self, - _workspace_id: Uuid, - _cfg: &application::git::ports::git_repository::UserGitCfg, - ) -> PortResult>> { - Ok(None) - } - - async fn has_pending_changes(&self, workspace_id: Uuid) -> PortResult { - let out: anyhow::Result = async { - let dirty_rows = self.fetch_dirty(workspace_id).await?; - Ok(!dirty_rows.is_empty()) - } - .await; - out.map_err(Into::into) - } - - async fn drift_since_commit(&self, workspace_id: Uuid, base_commit: &[u8]) -> PortResult { - let out: anyhow::Result = async { - // CLI helper: fallback to dirty check when full state comparison is not available. - if self.has_pending_changes(workspace_id).await? { - return Ok(true); - } - // If the base commit is not the latest, consider it stale. - let latest = self.latest_commit_meta(workspace_id).await?; - if let Some(meta) = latest - && meta.commit_id.as_slice() != base_commit - { - return Ok(true); - } - Ok(false) - } - .await; - out.map_err(Into::into) - } - - async fn check_remote( - &self, - _workspace_id: Uuid, - _cfg: &application::git::ports::git_repository::UserGitCfg, - ) -> PortResult { - Ok(application::git::dtos::GitRemoteCheckDto { - ok: false, - message: "remote check not supported in CLI".to_string(), - reason: Some("unsupported".to_string()), - }) - } -} - -fn row_to_commit_meta( - row: sqlx::postgres::PgRow, -) -> anyhow::Result { - let commit_id: Vec = row.get("commit_id"); - let parent_commit_id: Option> = row.try_get("parent_commit_id").ok(); - let message: Option = row.try_get("message").ok(); - let author_name: Option = row.try_get("author_name").ok(); - let author_email: Option = row.try_get("author_email").ok(); - let committed_at: DateTime = row.get("committed_at"); - let pack_key: String = row.get("pack_key"); - let file_hash_index: Json> = - row.get("file_hash_index"); - - Ok(application::git::ports::git_storage::CommitMeta { - commit_id, - parent_commit_id, - message, - author_name, - author_email, - committed_at, - pack_key, - file_hash_index: file_hash_index.0, - }) -} diff --git a/api/crates/cli/src/lib.rs b/api/crates/cli/src/lib.rs index ae058049..ef8569f4 100644 --- a/api/crates/cli/src/lib.rs +++ b/api/crates/cli/src/lib.rs @@ -2,7 +2,6 @@ mod cli; mod commands; mod deps; -mod git_workspace; use anyhow::Result; use clap::Parser; diff --git a/api/crates/domain/src/documents/document.rs b/api/crates/domain/src/documents/document.rs index 6faea782..edfd6790 100644 --- a/api/crates/domain/src/documents/document.rs +++ b/api/crates/domain/src/documents/document.rs @@ -22,6 +22,9 @@ pub struct Document { archived_at: Option>, archived_by: Option, archived_parent_id: Option, + // E2EE fields + encrypted_title: Option>, + encrypted_title_nonce: Option>, } impl Document { @@ -43,6 +46,8 @@ impl Document { archived_at: Option>, archived_by: Option, archived_parent_id: Option, + encrypted_title: Option>, + encrypted_title_nonce: Option>, ) -> Self { Self { id, @@ -61,6 +66,8 @@ impl Document { archived_at, archived_by, archived_parent_id, + encrypted_title, + encrypted_title_nonce, } } @@ -127,6 +134,14 @@ impl Document { pub fn archived_parent_id(&self) -> Option { self.archived_parent_id } + + pub fn encrypted_title(&self) -> Option<&[u8]> { + self.encrypted_title.as_deref() + } + + pub fn encrypted_title_nonce(&self) -> Option<&[u8]> { + self.encrypted_title_nonce.as_deref() + } } #[derive(Debug, Clone)] diff --git a/api/crates/domain/src/documents/keys.rs b/api/crates/domain/src/documents/keys.rs new file mode 100644 index 00000000..2ad254ea --- /dev/null +++ b/api/crates/domain/src/documents/keys.rs @@ -0,0 +1,55 @@ +//! E2EE key types for documents + +use chrono::{DateTime, Utc}; +use uuid::Uuid; + +use crate::identity::keys::KdfParams; + +/// Document encrypted key (DEK encrypted with workspace KEK) +#[derive(Debug, Clone)] +pub struct DocumentEncryptedKey { + pub document_id: Uuid, + pub encrypted_dek: Vec, + pub nonce: Vec, + pub key_version: i32, + pub created_at: DateTime, + pub updated_at: DateTime, +} + +/// Share encrypted key (DEK encrypted for share access) +#[derive(Debug, Clone)] +pub struct ShareEncryptedKey { + pub share_id: Uuid, + pub encrypted_dek: Vec, + /// Salt for password-protected shares (optional) + pub salt: Option>, + /// KDF params for password-protected shares (optional) + pub kdf_params: Option, + pub created_at: DateTime, +} + +impl ShareEncryptedKey { + pub fn is_password_protected(&self) -> bool { + self.salt.is_some() + } +} + +/// Public document content (plaintext for published documents) +#[derive(Debug, Clone)] +pub struct PublicDocumentContent { + pub document_id: Uuid, + pub content: String, + pub title: String, + pub content_hash: String, + pub updated_at: DateTime, +} + +/// Encrypted tag index entry (deterministic encryption for searchable tags) +#[derive(Debug, Clone)] +pub struct EncryptedTagIndex { + pub id: Uuid, + pub workspace_id: Uuid, + pub document_id: Uuid, + pub encrypted_tag: Vec, + pub created_at: DateTime, +} diff --git a/api/crates/domain/src/documents/mod.rs b/api/crates/domain/src/documents/mod.rs index a2d007e2..a2fd50cb 100644 --- a/api/crates/domain/src/documents/mod.rs +++ b/api/crates/domain/src/documents/mod.rs @@ -3,6 +3,7 @@ pub mod delete_plan; pub mod doc_type; pub mod document; pub mod hierarchy; +pub mod keys; pub mod meta; pub mod path; pub mod permissions; diff --git a/api/crates/domain/src/git/mod.rs b/api/crates/domain/src/git/mod.rs index f1c614ea..c9a36371 100644 --- a/api/crates/domain/src/git/mod.rs +++ b/api/crates/domain/src/git/mod.rs @@ -1,6 +1,3 @@ -// Git sync/pull/ignore/rebuild domain lives here. +// Git authentication domain pub mod auth; -pub mod policy; -pub mod pull_session; -pub mod sync_log; diff --git a/api/crates/domain/src/git/policy.rs b/api/crates/domain/src/git/policy.rs deleted file mode 100644 index 80700d5a..00000000 --- a/api/crates/domain/src/git/policy.rs +++ /dev/null @@ -1,31 +0,0 @@ -use crate::access::permissions::{PERM_GIT_SYNC, PermissionSet}; - -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub enum GitPolicyError { - Forbidden, -} - -pub fn ensure_git_sync_allowed(permissions: &PermissionSet) -> Result<(), GitPolicyError> { - if permissions.allows(PERM_GIT_SYNC) { - Ok(()) - } else { - Err(GitPolicyError::Forbidden) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::access::permissions::PERM_GIT_SYNC; - - #[test] - fn git_sync_requires_permission() { - let perms = PermissionSet::default(); - assert_eq!( - ensure_git_sync_allowed(&perms), - Err(GitPolicyError::Forbidden) - ); - let perms = PermissionSet::from_slice(&[PERM_GIT_SYNC]); - assert_eq!(ensure_git_sync_allowed(&perms), Ok(())); - } -} diff --git a/api/crates/domain/src/git/pull_session.rs b/api/crates/domain/src/git/pull_session.rs deleted file mode 100644 index f06e3a0d..00000000 --- a/api/crates/domain/src/git/pull_session.rs +++ /dev/null @@ -1,110 +0,0 @@ -use std::fmt; -use std::str::FromStr; - -use serde::{Deserialize, Serialize}; - -pub const GIT_PULL_STATUS_PENDING: &str = "pending"; -pub const GIT_PULL_STATUS_RESOLVING: &str = "resolving"; -pub const GIT_PULL_STATUS_MERGED: &str = "merged"; -pub const GIT_PULL_STATUS_STALE: &str = "stale"; -pub const GIT_PULL_STATUS_ERROR: &str = "error"; - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "lowercase")] -pub enum GitPullSessionStatus { - Pending, - Resolving, - Merged, - Stale, - Error, -} - -impl GitPullSessionStatus { - pub fn parse(value: &str) -> Option { - match value.trim() { - GIT_PULL_STATUS_PENDING => Some(Self::Pending), - GIT_PULL_STATUS_RESOLVING => Some(Self::Resolving), - GIT_PULL_STATUS_MERGED => Some(Self::Merged), - GIT_PULL_STATUS_STALE => Some(Self::Stale), - GIT_PULL_STATUS_ERROR => Some(Self::Error), - _ => None, - } - } - - pub const fn as_str(self) -> &'static str { - match self { - Self::Pending => GIT_PULL_STATUS_PENDING, - Self::Resolving => GIT_PULL_STATUS_RESOLVING, - Self::Merged => GIT_PULL_STATUS_MERGED, - Self::Stale => GIT_PULL_STATUS_STALE, - Self::Error => GIT_PULL_STATUS_ERROR, - } - } - - pub const fn is_in_progress(self) -> bool { - matches!(self, Self::Pending | Self::Resolving) - } -} - -impl fmt::Display for GitPullSessionStatus { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str(self.as_str()) - } -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub struct InvalidGitPullSessionStatus; - -impl fmt::Display for InvalidGitPullSessionStatus { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str("invalid git pull session status") - } -} - -impl std::error::Error for InvalidGitPullSessionStatus {} - -impl FromStr for GitPullSessionStatus { - type Err = InvalidGitPullSessionStatus; - - fn from_str(s: &str) -> Result { - Self::parse(s).ok_or(InvalidGitPullSessionStatus) - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn parses_formats_and_in_progress() { - assert_eq!( - GitPullSessionStatus::parse(" pending "), - Some(GitPullSessionStatus::Pending) - ); - assert_eq!( - GitPullSessionStatus::parse("resolving"), - Some(GitPullSessionStatus::Resolving) - ); - assert_eq!( - GitPullSessionStatus::parse("merged"), - Some(GitPullSessionStatus::Merged) - ); - assert_eq!( - GitPullSessionStatus::parse("stale"), - Some(GitPullSessionStatus::Stale) - ); - assert_eq!( - GitPullSessionStatus::parse("error"), - Some(GitPullSessionStatus::Error) - ); - assert_eq!(GitPullSessionStatus::parse("nope"), None); - - assert!(GitPullSessionStatus::Pending.is_in_progress()); - assert!(GitPullSessionStatus::Resolving.is_in_progress()); - assert!(!GitPullSessionStatus::Merged.is_in_progress()); - assert_eq!( - GitPullSessionStatus::Merged.as_str(), - GIT_PULL_STATUS_MERGED - ); - } -} diff --git a/api/crates/domain/src/git/sync_log.rs b/api/crates/domain/src/git/sync_log.rs deleted file mode 100644 index 53cd82a2..00000000 --- a/api/crates/domain/src/git/sync_log.rs +++ /dev/null @@ -1,145 +0,0 @@ -use std::fmt; -use std::str::FromStr; - -use serde::{Deserialize, Serialize}; - -pub const GIT_SYNC_OPERATION_PUSH: &str = "push"; -pub const GIT_SYNC_OPERATION_PULL: &str = "pull"; -pub const GIT_SYNC_OPERATION_COMMIT: &str = "commit"; -pub const GIT_SYNC_OPERATION_INIT: &str = "init"; - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "lowercase")] -pub enum GitSyncOperation { - Push, - Pull, - Commit, - Init, -} - -impl GitSyncOperation { - pub fn parse(value: &str) -> Option { - match value.trim() { - GIT_SYNC_OPERATION_PUSH => Some(Self::Push), - GIT_SYNC_OPERATION_PULL => Some(Self::Pull), - GIT_SYNC_OPERATION_COMMIT => Some(Self::Commit), - GIT_SYNC_OPERATION_INIT => Some(Self::Init), - _ => None, - } - } - - pub const fn as_str(self) -> &'static str { - match self { - Self::Push => GIT_SYNC_OPERATION_PUSH, - Self::Pull => GIT_SYNC_OPERATION_PULL, - Self::Commit => GIT_SYNC_OPERATION_COMMIT, - Self::Init => GIT_SYNC_OPERATION_INIT, - } - } -} - -impl fmt::Display for GitSyncOperation { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str(self.as_str()) - } -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub struct InvalidGitSyncOperation; - -impl fmt::Display for InvalidGitSyncOperation { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str("invalid git sync operation") - } -} - -impl std::error::Error for InvalidGitSyncOperation {} - -impl FromStr for GitSyncOperation { - type Err = InvalidGitSyncOperation; - - fn from_str(s: &str) -> Result { - Self::parse(s).ok_or(InvalidGitSyncOperation) - } -} - -pub const GIT_SYNC_STATUS_SUCCESS: &str = "success"; -pub const GIT_SYNC_STATUS_ERROR: &str = "error"; - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "lowercase")] -pub enum GitSyncStatus { - Success, - Error, -} - -impl GitSyncStatus { - pub fn parse(value: &str) -> Option { - match value.trim() { - GIT_SYNC_STATUS_SUCCESS => Some(Self::Success), - GIT_SYNC_STATUS_ERROR => Some(Self::Error), - _ => None, - } - } - - pub const fn as_str(self) -> &'static str { - match self { - Self::Success => GIT_SYNC_STATUS_SUCCESS, - Self::Error => GIT_SYNC_STATUS_ERROR, - } - } -} - -impl fmt::Display for GitSyncStatus { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str(self.as_str()) - } -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub struct InvalidGitSyncStatus; - -impl fmt::Display for InvalidGitSyncStatus { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str("invalid git sync status") - } -} - -impl std::error::Error for InvalidGitSyncStatus {} - -impl FromStr for GitSyncStatus { - type Err = InvalidGitSyncStatus; - - fn from_str(s: &str) -> Result { - Self::parse(s).ok_or(InvalidGitSyncStatus) - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn parses_and_formats() { - assert_eq!( - GitSyncOperation::parse(" push "), - Some(GitSyncOperation::Push) - ); - assert_eq!( - GitSyncOperation::parse("commit"), - Some(GitSyncOperation::Commit) - ); - assert_eq!(GitSyncOperation::parse("nope"), None); - assert_eq!(GitSyncOperation::Init.as_str(), "init"); - assert_eq!(GitSyncOperation::Pull.to_string(), "pull"); - - assert_eq!( - GitSyncStatus::parse(" success "), - Some(GitSyncStatus::Success) - ); - assert_eq!(GitSyncStatus::parse("error"), Some(GitSyncStatus::Error)); - assert_eq!(GitSyncStatus::parse("nope"), None); - assert_eq!(GitSyncStatus::Success.as_str(), "success"); - assert_eq!(GitSyncStatus::Error.to_string(), "error"); - } -} diff --git a/api/crates/domain/src/identity/keys.rs b/api/crates/domain/src/identity/keys.rs new file mode 100644 index 00000000..c151ac7a --- /dev/null +++ b/api/crates/domain/src/identity/keys.rs @@ -0,0 +1,132 @@ +//! E2EE key types for user identity + +use chrono::{DateTime, Utc}; +use uuid::Uuid; + +pub const KDF_TYPE_ARGON2ID: &str = "argon2id"; +pub const KDF_TYPE_PBKDF2: &str = "pbkdf2"; +pub const KEY_TYPE_ECDH_P256: &str = "ecdh-p256"; +pub const KEY_TYPE_X25519: &str = "x25519"; +pub const KEY_TYPE_ED25519: &str = "ed25519"; + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum KdfType { + Argon2id, + Pbkdf2, +} + +impl KdfType { + pub fn parse(s: &str) -> Option { + match s { + KDF_TYPE_ARGON2ID => Some(Self::Argon2id), + KDF_TYPE_PBKDF2 => Some(Self::Pbkdf2), + _ => None, + } + } + + pub fn as_str(self) -> &'static str { + match self { + Self::Argon2id => KDF_TYPE_ARGON2ID, + Self::Pbkdf2 => KDF_TYPE_PBKDF2, + } + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum KeyType { + EcdhP256, + X25519, + Ed25519, +} + +impl KeyType { + pub fn parse(s: &str) -> Option { + match s { + KEY_TYPE_ECDH_P256 => Some(Self::EcdhP256), + KEY_TYPE_X25519 => Some(Self::X25519), + KEY_TYPE_ED25519 => Some(Self::Ed25519), + _ => None, + } + } + + pub fn as_str(self) -> &'static str { + match self { + Self::EcdhP256 => KEY_TYPE_ECDH_P256, + Self::X25519 => KEY_TYPE_X25519, + Self::Ed25519 => KEY_TYPE_ED25519, + } + } +} + +/// KDF parameters for key derivation +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] +pub struct KdfParams { + #[serde(skip_serializing_if = "Option::is_none")] + pub memory: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub iterations: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub parallelism: Option, +} + +impl Default for KdfParams { + fn default() -> Self { + Self { + memory: Some(65536), + iterations: Some(3), + parallelism: Some(4), + } + } +} + +/// User's public key for ECDH key exchange +#[derive(Debug, Clone)] +pub struct UserPublicKey { + pub user_id: Uuid, + pub public_key: Vec, + pub key_type: KeyType, + pub created_at: DateTime, + pub updated_at: DateTime, +} + +/// User's encrypted master key (for recovery via passphrase) +#[derive(Debug, Clone)] +pub struct UserEncryptedMasterKey { + pub user_id: Uuid, + pub encrypted_key: Vec, + pub salt: Vec, + pub kdf_type: KdfType, + pub kdf_params: KdfParams, + pub created_at: DateTime, + pub updated_at: DateTime, +} + +/// User's encrypted private key (encrypted with UMK) +#[derive(Debug, Clone)] +pub struct UserEncryptedPrivateKey { + pub user_id: Uuid, + pub encrypted_private_key: Vec, + pub nonce: Vec, + pub created_at: DateTime, + pub updated_at: DateTime, +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn kdf_type_parses() { + assert_eq!(KdfType::parse("argon2id"), Some(KdfType::Argon2id)); + assert_eq!(KdfType::parse("pbkdf2"), Some(KdfType::Pbkdf2)); + assert_eq!(KdfType::parse("unknown"), None); + } + + #[test] + fn key_type_parses() { + assert_eq!(KeyType::parse("ecdh-p256"), Some(KeyType::EcdhP256)); + assert_eq!(KeyType::parse("x25519"), Some(KeyType::X25519)); + assert_eq!(KeyType::parse("ed25519"), Some(KeyType::Ed25519)); + assert_eq!(KeyType::parse("unknown"), None); + } +} diff --git a/api/crates/domain/src/identity/mod.rs b/api/crates/domain/src/identity/mod.rs index a178a951..8148e7ad 100644 --- a/api/crates/domain/src/identity/mod.rs +++ b/api/crates/domain/src/identity/mod.rs @@ -1,4 +1,5 @@ // Identity (auth/sessions/api_tokens) domain lives here. pub mod api_token; +pub mod keys; pub mod policy; diff --git a/api/crates/domain/src/workspaces/keys.rs b/api/crates/domain/src/workspaces/keys.rs new file mode 100644 index 00000000..c5691313 --- /dev/null +++ b/api/crates/domain/src/workspaces/keys.rs @@ -0,0 +1,15 @@ +//! E2EE key types for workspaces + +use chrono::{DateTime, Utc}; +use uuid::Uuid; + +/// Workspace encrypted key (KEK encrypted with user's public key) +#[derive(Debug, Clone)] +pub struct WorkspaceEncryptedKey { + pub id: Uuid, + pub workspace_id: Uuid, + pub user_id: Uuid, + pub encrypted_kek: Vec, + pub key_version: i32, + pub created_at: DateTime, +} diff --git a/api/crates/domain/src/workspaces/mod.rs b/api/crates/domain/src/workspaces/mod.rs index e6f32be6..e2f4f774 100644 --- a/api/crates/domain/src/workspaces/mod.rs +++ b/api/crates/domain/src/workspaces/mod.rs @@ -1,2 +1,3 @@ +pub mod keys; pub mod permissions; pub mod roles; diff --git a/api/crates/infrastructure/Cargo.toml b/api/crates/infrastructure/Cargo.toml index 25f964fe..892551da 100644 --- a/api/crates/infrastructure/Cargo.toml +++ b/api/crates/infrastructure/Cargo.toml @@ -13,7 +13,6 @@ chrono = { version = "0.4", features = ["serde", "clock"] } dotenvy = "0.15" futures-core = "0.3" futures-util = { version = "0.3", features = ["sink"] } -git2 = { version = "0.18", default-features = true, features = ["vendored-libgit2"] } htmlescape = "0.3" hex = "0.4" hmac = "0.12" @@ -52,12 +51,13 @@ password-hash = "0.5" aes-gcm = "0.10" aead = "0.5" +chacha20poly1305 = "0.10" +zeroize = { version = "1.6", features = ["derive"] } aws-config = { version = "1", features = ["behavior-version-latest"] } aws-sdk-s3 = "1" extism = { version = "1" } -pandoc = "0.8" - base64 = "0.21" +ed25519-dalek = { version = "2", features = ["std"] } diff --git a/api/crates/infrastructure/src/core/crypto/ed25519.rs b/api/crates/infrastructure/src/core/crypto/ed25519.rs new file mode 100644 index 00000000..4219a03a --- /dev/null +++ b/api/crates/infrastructure/src/core/crypto/ed25519.rs @@ -0,0 +1,130 @@ +//! Ed25519 signature verification for E2EE messages +//! +//! This module provides signature verification for E2EE realtime messages. +//! The server verifies signatures to ensure message integrity and authenticity, +//! but does not decrypt the message content. +//! +//! Signature format follows secsync specification: +//! `domain + canonicalize({nonce, ciphertext, publicData})` + +use anyhow::Result; +use ed25519_dalek::{Signature, Verifier, VerifyingKey}; + +/// Ed25519 signature verifier for E2EE messages +pub struct Ed25519Verifier; + +impl Ed25519Verifier { + /// Verify an Ed25519 signature + /// + /// # Arguments + /// * `public_key` - 32-byte Ed25519 public key + /// * `message` - Message bytes to verify + /// * `signature` - 64-byte Ed25519 signature + /// + /// # Returns + /// * `Ok(true)` if signature is valid + /// * `Ok(false)` if signature is invalid + /// * `Err` if key/signature format is invalid + pub fn verify(public_key: &[u8], message: &[u8], signature: &[u8]) -> Result { + // Validate key length (32 bytes) + if public_key.len() != 32 { + anyhow::bail!( + "invalid public key length: expected 32, got {}", + public_key.len() + ); + } + + // Validate signature length (64 bytes) + if signature.len() != 64 { + anyhow::bail!( + "invalid signature length: expected 64, got {}", + signature.len() + ); + } + + let verifying_key = VerifyingKey::from_bytes( + public_key + .try_into() + .map_err(|_| anyhow::anyhow!("invalid public key"))?, + ) + .map_err(|e| anyhow::anyhow!("failed to parse public key: {}", e))?; + + let sig = Signature::from_bytes( + signature + .try_into() + .map_err(|_| anyhow::anyhow!("invalid signature"))?, + ); + + Ok(verifying_key.verify(message, &sig).is_ok()) + } + + /// Build message bytes for signature verification (secsync format) + /// + /// Format: `domain + canonicalize({nonce, ciphertext, publicData})` + /// + /// Where canonicalize produces deterministic JSON (RFC 8785 JSON Canonicalization Scheme). + /// The publicData is passed as a Base64-encoded string (already canonicalized by client). + /// + /// # Arguments + /// * `domain` - Signature domain (e.g., "refmd_update", "refmd_snapshot", "refmd_ephemeral") + /// * `nonce` - Base64-encoded nonce string + /// * `ciphertext` - Base64-encoded ciphertext string + /// * `public_data` - Base64-encoded canonicalized publicData string + pub fn build_signing_message( + domain: &str, + nonce: &str, + ciphertext: &str, + public_data: &str, + ) -> Vec { + // Canonicalize {nonce, ciphertext, publicData} as JSON + // Keys must be sorted alphabetically per RFC 8785 + let canonical_json = format!( + r#"{{"ciphertext":"{}","nonce":"{}","publicData":"{}"}}"#, + ciphertext, nonce, public_data + ); + + // domain + canonicalized JSON + let mut message = Vec::with_capacity(domain.len() + canonical_json.len()); + message.extend_from_slice(domain.as_bytes()); + message.extend_from_slice(canonical_json.as_bytes()); + message + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_build_signing_message_secsync_format() { + let msg = Ed25519Verifier::build_signing_message( + "refmd_update", + "bm9uY2U=", // "nonce" in base64 + "Y2lwaGVy", // "cipher" in base64 + "cHVibGljRGF0YQ==", // "publicData" in base64 + ); + + let expected = r#"refmd_update{"ciphertext":"Y2lwaGVy","nonce":"bm9uY2U=","publicData":"cHVibGljRGF0YQ=="}"#; + assert_eq!(String::from_utf8(msg).unwrap(), expected); + } + + #[test] + fn test_verify_invalid_key_length() { + let result = Ed25519Verifier::verify(&[0u8; 16], &[0u8; 32], &[0u8; 64]); + assert!(result.is_err()); + assert!(result + .unwrap_err() + .to_string() + .contains("invalid public key length")); + } + + #[test] + fn test_verify_invalid_signature_length() { + let result = Ed25519Verifier::verify(&[0u8; 32], &[0u8; 32], &[0u8; 32]); + assert!(result.is_err()); + assert!(result + .unwrap_err() + .to_string() + .contains("invalid signature length")); + } +} diff --git a/api/crates/infrastructure/src/core/crypto/mod.rs b/api/crates/infrastructure/src/core/crypto/mod.rs index 989b68a4..2c5595f6 100644 --- a/api/crates/infrastructure/src/core/crypto/mod.rs +++ b/api/crates/infrastructure/src/core/crypto/mod.rs @@ -1,3 +1,9 @@ +pub mod ed25519; +pub mod xchacha20; + +pub use ed25519::Ed25519Verifier; +pub use xchacha20::{encrypt, decrypt, encrypt_dek, decrypt_dek, generate_nonce, SecretKey, CryptoError}; + use aes_gcm::aead::{Aead, KeyInit}; use aes_gcm::{Aes256Gcm, Key, Nonce}; use base64::Engine as _; @@ -56,6 +62,12 @@ pub fn decrypt_string(secret: &str, ciphertext: &str) -> anyhow::Result pub fn encrypt_auth_data(secret: &str, auth_data: &serde_json::Value) -> serde_json::Value { match auth_data { serde_json::Value::Object(map) => { + // Check if this is E2EE encrypted data from client + // If e2ee: true, store as-is without server-side encryption + if map.get("e2ee").and_then(|v| v.as_bool()).unwrap_or(false) { + return auth_data.clone(); + } + let mut out = serde_json::Map::new(); for (k, v) in map { if (k == "token" || k == "private_key" || k == "passphrase") && v.is_string() { diff --git a/api/crates/infrastructure/src/core/crypto/xchacha20.rs b/api/crates/infrastructure/src/core/crypto/xchacha20.rs new file mode 100644 index 00000000..5db07b51 --- /dev/null +++ b/api/crates/infrastructure/src/core/crypto/xchacha20.rs @@ -0,0 +1,279 @@ +//! XChaCha20-Poly1305 encryption module for E2EE. +//! +//! This module provides AEAD encryption using XChaCha20-Poly1305, +//! which is the recommended cipher for E2EE document encryption. + +use chacha20poly1305::{ + aead::{Aead, KeyInit}, + XChaCha20Poly1305, XNonce, +}; +use rand::RngCore; +use zeroize::Zeroize; + +/// XChaCha20-Poly1305 nonce size (24 bytes). +pub const NONCE_SIZE: usize = 24; + +/// XChaCha20-Poly1305 key size (32 bytes). +pub const KEY_SIZE: usize = 32; + +/// Error type for encryption/decryption operations. +#[derive(Debug, thiserror::Error)] +pub enum CryptoError { + #[error("Invalid key length: expected {KEY_SIZE}, got {0}")] + InvalidKeyLength(usize), + + #[error("Invalid nonce length: expected {NONCE_SIZE}, got {0}")] + InvalidNonceLength(usize), + + #[error("Encryption failed")] + EncryptionFailed, + + #[error("Decryption failed: authentication tag mismatch or corrupted data")] + DecryptionFailed, +} + +/// Generate a random 24-byte nonce for XChaCha20-Poly1305. +/// +/// Each encryption operation MUST use a unique nonce. +/// Using the same nonce twice with the same key is catastrophic. +pub fn generate_nonce() -> [u8; NONCE_SIZE] { + let mut nonce = [0u8; NONCE_SIZE]; + rand::thread_rng().fill_bytes(&mut nonce); + nonce +} + +/// Encrypt plaintext using XChaCha20-Poly1305. +/// +/// # Arguments +/// * `key` - 32-byte encryption key (DEK) +/// * `plaintext` - Data to encrypt +/// +/// # Returns +/// A tuple of (ciphertext, nonce) on success. +/// The ciphertext includes the 16-byte Poly1305 authentication tag. +pub fn encrypt(key: &[u8], plaintext: &[u8]) -> Result<(Vec, [u8; NONCE_SIZE]), CryptoError> { + if key.len() != KEY_SIZE { + return Err(CryptoError::InvalidKeyLength(key.len())); + } + + let cipher = XChaCha20Poly1305::new_from_slice(key) + .map_err(|_| CryptoError::InvalidKeyLength(key.len()))?; + + let nonce = generate_nonce(); + let xnonce = XNonce::from_slice(&nonce); + + let ciphertext = cipher + .encrypt(xnonce, plaintext) + .map_err(|_| CryptoError::EncryptionFailed)?; + + Ok((ciphertext, nonce)) +} + +/// Decrypt ciphertext using XChaCha20-Poly1305. +/// +/// # Arguments +/// * `key` - 32-byte encryption key (DEK) +/// * `ciphertext` - Encrypted data (including auth tag) +/// * `nonce` - 24-byte nonce used during encryption +/// +/// # Returns +/// The decrypted plaintext on success. +pub fn decrypt( + key: &[u8], + ciphertext: &[u8], + nonce: &[u8], +) -> Result, CryptoError> { + if key.len() != KEY_SIZE { + return Err(CryptoError::InvalidKeyLength(key.len())); + } + if nonce.len() != NONCE_SIZE { + return Err(CryptoError::InvalidNonceLength(nonce.len())); + } + + let cipher = XChaCha20Poly1305::new_from_slice(key) + .map_err(|_| CryptoError::InvalidKeyLength(key.len()))?; + + let xnonce = XNonce::from_slice(nonce); + + cipher + .decrypt(xnonce, ciphertext) + .map_err(|_| CryptoError::DecryptionFailed) +} + +/// Encrypt a DEK with a KEK. +/// +/// Used for storing document encryption keys encrypted with workspace keys. +/// +/// # Arguments +/// * `kek` - 32-byte Key Encryption Key +/// * `dek` - 32-byte Data Encryption Key to encrypt +/// +/// # Returns +/// A tuple of (encrypted_dek, nonce) on success. +pub fn encrypt_dek(kek: &[u8], dek: &[u8]) -> Result<(Vec, [u8; NONCE_SIZE]), CryptoError> { + if dek.len() != KEY_SIZE { + return Err(CryptoError::InvalidKeyLength(dek.len())); + } + encrypt(kek, dek) +} + +/// Decrypt a DEK with a KEK. +/// +/// # Arguments +/// * `kek` - 32-byte Key Encryption Key +/// * `encrypted_dek` - Encrypted DEK (including auth tag) +/// * `nonce` - 24-byte nonce used during encryption +/// +/// # Returns +/// The decrypted 32-byte DEK on success. +pub fn decrypt_dek( + kek: &[u8], + encrypted_dek: &[u8], + nonce: &[u8], +) -> Result<[u8; KEY_SIZE], CryptoError> { + let decrypted = decrypt(kek, encrypted_dek, nonce)?; + if decrypted.len() != KEY_SIZE { + return Err(CryptoError::InvalidKeyLength(decrypted.len())); + } + + let mut dek = [0u8; KEY_SIZE]; + dek.copy_from_slice(&decrypted); + Ok(dek) +} + +/// A wrapper for sensitive key material that zeroizes on drop. +#[derive(Zeroize)] +#[zeroize(drop)] +pub struct SecretKey { + key: [u8; KEY_SIZE], +} + +impl SecretKey { + /// Create a new SecretKey from bytes. + pub fn new(key: [u8; KEY_SIZE]) -> Self { + Self { key } + } + + /// Create from a slice, returning error if length is invalid. + pub fn from_slice(slice: &[u8]) -> Result { + if slice.len() != KEY_SIZE { + return Err(CryptoError::InvalidKeyLength(slice.len())); + } + let mut key = [0u8; KEY_SIZE]; + key.copy_from_slice(slice); + Ok(Self { key }) + } + + /// Get the key bytes. + pub fn as_bytes(&self) -> &[u8; KEY_SIZE] { + &self.key + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_encrypt_decrypt_roundtrip() { + let key = [0x42u8; KEY_SIZE]; + let plaintext = b"Hello, E2EE World!"; + + let (ciphertext, nonce) = encrypt(&key, plaintext).unwrap(); + let decrypted = decrypt(&key, &ciphertext, &nonce).unwrap(); + + assert_eq!(plaintext.as_slice(), decrypted.as_slice()); + } + + #[test] + fn test_encrypt_decrypt_empty() { + let key = [0x42u8; KEY_SIZE]; + let plaintext = b""; + + let (ciphertext, nonce) = encrypt(&key, plaintext).unwrap(); + let decrypted = decrypt(&key, &ciphertext, &nonce).unwrap(); + + assert_eq!(plaintext.as_slice(), decrypted.as_slice()); + } + + #[test] + fn test_encrypt_decrypt_large_data() { + let key = [0x42u8; KEY_SIZE]; + let plaintext = vec![0xABu8; 1024 * 1024]; // 1MB + + let (ciphertext, nonce) = encrypt(&key, &plaintext).unwrap(); + let decrypted = decrypt(&key, &ciphertext, &nonce).unwrap(); + + assert_eq!(plaintext, decrypted); + } + + #[test] + fn test_nonce_uniqueness() { + let nonce1 = generate_nonce(); + let nonce2 = generate_nonce(); + assert_ne!(nonce1, nonce2); + } + + #[test] + fn test_invalid_key_length() { + let short_key = [0x42u8; 16]; + let plaintext = b"test"; + + let result = encrypt(&short_key, plaintext); + assert!(matches!(result, Err(CryptoError::InvalidKeyLength(16)))); + } + + #[test] + fn test_invalid_nonce_length() { + let key = [0x42u8; KEY_SIZE]; + let ciphertext = vec![0u8; 32]; + let short_nonce = [0u8; 12]; + + let result = decrypt(&key, &ciphertext, &short_nonce); + assert!(matches!(result, Err(CryptoError::InvalidNonceLength(12)))); + } + + #[test] + fn test_corrupted_ciphertext() { + let key = [0x42u8; KEY_SIZE]; + let plaintext = b"Hello, E2EE World!"; + + let (mut ciphertext, nonce) = encrypt(&key, plaintext).unwrap(); + // Corrupt the ciphertext + ciphertext[0] ^= 0xFF; + + let result = decrypt(&key, &ciphertext, &nonce); + assert!(matches!(result, Err(CryptoError::DecryptionFailed))); + } + + #[test] + fn test_wrong_key() { + let key1 = [0x42u8; KEY_SIZE]; + let key2 = [0x43u8; KEY_SIZE]; + let plaintext = b"Secret message"; + + let (ciphertext, nonce) = encrypt(&key1, plaintext).unwrap(); + let result = decrypt(&key2, &ciphertext, &nonce); + + assert!(matches!(result, Err(CryptoError::DecryptionFailed))); + } + + #[test] + fn test_dek_encrypt_decrypt() { + let kek = [0x42u8; KEY_SIZE]; + let dek = [0x55u8; KEY_SIZE]; + + let (encrypted_dek, nonce) = encrypt_dek(&kek, &dek).unwrap(); + let decrypted_dek = decrypt_dek(&kek, &encrypted_dek, &nonce).unwrap(); + + assert_eq!(dek, decrypted_dek); + } + + #[test] + fn test_secret_key_zeroize() { + let key_bytes = [0x42u8; KEY_SIZE]; + let secret_key = SecretKey::new(key_bytes); + assert_eq!(secret_key.as_bytes(), &key_bytes); + // SecretKey will zeroize on drop + } +} diff --git a/api/crates/infrastructure/src/core/storage/fs_ingest_watcher.rs b/api/crates/infrastructure/src/core/storage/fs_ingest_watcher.rs index c2e5583c..11bccd46 100644 --- a/api/crates/infrastructure/src/core/storage/fs_ingest_watcher.rs +++ b/api/crates/infrastructure/src/core/storage/fs_ingest_watcher.rs @@ -12,7 +12,7 @@ use uuid::Uuid; use application::core::ports::storage::storage_ingest_queue::{ StorageIngestKind, StorageIngestQueue, }; -use application::core::services::storage::ingest::normalize_repo_path; +use application::core::services::storage::ingest::{normalize_repo_path, RME1_MAGIC}; use application::core::services::utils::hash::sha256_hex; use domain::access::permissions::PermissionSet; use domain::storage::ingest_backend::StorageIngestBackend; @@ -177,6 +177,8 @@ impl FsIngestWatcher { Ok(()) } + /// E2EE: Capture metadata for encrypted files (RME1 format) + /// Hash is computed on encrypted bytes - content is not interpreted async fn capture_file_metadata( &self, path: &Path, @@ -184,13 +186,24 @@ impl FsIngestWatcher { ) -> (Option, Option) { match tokio::fs::read(path).await { Ok(bytes) => { - let hash = sha256_hex(&bytes); + // E2EE: Validate RME1 magic number + let is_valid_rme1 = bytes.len() >= 4 && &bytes[0..4] == RME1_MAGIC; + if !is_valid_rme1 { + warn!( + repo_path = repo_path, + size = bytes.len(), + "fs_ingest_invalid_rme1_format" + ); + } + + // E2EE: Hash computed on encrypted bytes (used as encrypted_hash) + let encrypted_hash = sha256_hex(&bytes); let payload = serde_json::json!({ "file_kind": file_kind(repo_path), - "is_text": repo_path.ends_with(".md"), + "is_encrypted": is_valid_rme1, "size": bytes.len(), }); - (Some(hash), Some(payload)) + (Some(encrypted_hash), Some(payload)) } Err(err) => { warn!(error = ?err, repo_path = repo_path, "fs_ingest_metadata_failed"); diff --git a/api/crates/infrastructure/src/core/storage/gitignore_port_impl.rs b/api/crates/infrastructure/src/core/storage/gitignore_port_impl.rs deleted file mode 100644 index 9d143be0..00000000 --- a/api/crates/infrastructure/src/core/storage/gitignore_port_impl.rs +++ /dev/null @@ -1,117 +0,0 @@ -use application::core::ports::errors::PortResult; -use application::git::ports::gitignore_port::GitignorePort; - -pub struct FsGitignorePort; - -#[async_trait::async_trait] -impl GitignorePort for FsGitignorePort { - async fn ensure_gitignore(&self, dir: &str) -> PortResult { - let out: anyhow::Result = async { - use tokio::io::AsyncWriteExt; - let path = std::path::Path::new(dir).join(".gitignore"); - if let Some(parent) = path.parent() { - tokio::fs::create_dir_all(parent).await?; - } - let defaults = vec![ - "# RefMD auto-generated .gitignore", - "*.md.tmp", - ".DS_Store", - "Thumbs.db", - ".env", - ".env.local", - ]; - let mut created_or_updated = false; - if tokio::fs::try_exists(&path).await.unwrap_or(false) { - let existing = tokio::fs::read_to_string(&path).await.unwrap_or_default(); - let mut lines: std::collections::BTreeSet = - existing.lines().map(|s| s.to_string()).collect(); - let mut changed = false; - for d in &defaults { - if !lines.contains(*d) { - lines.insert((*d).to_string()); - changed = true; - } - } - if changed { - let mut buf = String::new(); - for l in lines { - buf.push_str(&l); - buf.push('\n'); - } - let mut f = tokio::fs::File::create(&path).await?; - f.write_all(buf.as_bytes()).await?; - created_or_updated = true; - } - } else { - let mut f = tokio::fs::File::create(&path).await?; - for d in &defaults { - f.write_all(d.as_bytes()).await?; - f.write_all(b"\n").await?; - } - created_or_updated = true; - } - Ok(created_or_updated) - } - .await; - out.map_err(Into::into) - } - - async fn upsert_gitignore_patterns(&self, dir: &str, patterns: &[String]) -> PortResult { - let out: anyhow::Result = async { - use tokio::io::AsyncWriteExt; - let path = std::path::Path::new(dir).join(".gitignore"); - if let Some(parent) = path.parent() { - tokio::fs::create_dir_all(parent).await?; - } - let mut set: std::collections::BTreeSet = - if tokio::fs::try_exists(&path).await.unwrap_or(false) { - tokio::fs::read_to_string(&path) - .await - .unwrap_or_default() - .lines() - .map(|s| s.to_string()) - .collect() - } else { - Default::default() - }; - let before = set.len(); - for p in patterns { - if !p.trim().is_empty() { - set.insert(p.trim().to_string()); - } - } - if set.len() != before { - let mut buf = String::new(); - for l in &set { - buf.push_str(l); - buf.push('\n'); - } - let mut f = tokio::fs::File::create(&path).await?; - f.write_all(buf.as_bytes()).await?; - return Ok(set.len() - before); - } - Ok(0) - } - .await; - out.map_err(Into::into) - } - - async fn read_gitignore_patterns(&self, dir: &str) -> PortResult> { - let out: anyhow::Result> = async { - let path = std::path::Path::new(dir).join(".gitignore"); - let content = if tokio::fs::try_exists(&path).await.unwrap_or(false) { - tokio::fs::read_to_string(&path).await.unwrap_or_default() - } else { - String::new() - }; - let patterns: Vec = content - .lines() - .map(|s| s.trim().to_string()) - .filter(|s| !s.is_empty() && !s.starts_with('#')) - .collect(); - Ok(patterns) - } - .await; - out.map_err(Into::into) - } -} diff --git a/api/crates/infrastructure/src/core/storage/mod.rs b/api/crates/infrastructure/src/core/storage/mod.rs index 75d528b2..00f226a1 100644 --- a/api/crates/infrastructure/src/core/storage/mod.rs +++ b/api/crates/infrastructure/src/core/storage/mod.rs @@ -1,6 +1,5 @@ mod dirty; mod fs_ingest_watcher; -mod gitignore_port_impl; mod ingest_queue; mod ingest_worker; mod job_queue; @@ -25,9 +24,6 @@ pub use worker::StorageProjectionWorker; pub mod port_impl { pub use super::storage_port_impl::*; } -pub mod gitignore { - pub use super::gitignore_port_impl::*; -} pub mod s3 { pub use super::s3_port_impl::*; } diff --git a/api/crates/infrastructure/src/core/storage/paths.rs b/api/crates/infrastructure/src/core/storage/paths.rs index f809445d..18e4bf18 100644 --- a/api/crates/infrastructure/src/core/storage/paths.rs +++ b/api/crates/infrastructure/src/core/storage/paths.rs @@ -103,24 +103,16 @@ pub async fn build_doc_file_path( uploads_root: &Path, doc_id: Uuid, ) -> anyhow::Result { - let row = sqlx::query( - "SELECT owner_id, desired_path, type, archived_at FROM documents WHERE id = $1", - ) - .bind(doc_id) - .fetch_one(pool) - .await?; + let row = sqlx::query("SELECT workspace_id, type FROM documents WHERE id = $1") + .bind(doc_id) + .fetch_one(pool) + .await?; let dtype: String = row.get("type"); if dtype == DOC_TYPE_FOLDER { - anyhow::bail!("folder_has_no_markdown_path"); + anyhow::bail!("folder_has_no_file_path"); } - let owner_id: Uuid = row.get("owner_id"); - let desired_path: String = row.get("desired_path"); - let archived = row - .try_get::>, _>("archived_at") - .ok() - .flatten() - .is_some(); - let rel = owner_relative_buf(owner_id, &desired_path, archived); + let workspace_id: Uuid = row.get("workspace_id"); + let rel = format!("{}/{}.md", workspace_id, doc_id); Ok(uploads_root.join(rel)) } @@ -247,7 +239,7 @@ pub async fn move_doc_paths( doc_id: Uuid, ) -> anyhow::Result<()> { let row = sqlx::query( - "SELECT owner_id, type, path, desired_path, archived_at FROM documents WHERE id = $1", + "SELECT workspace_id, owner_id, type, path, desired_path, archived_at FROM documents WHERE id = $1", ) .bind(doc_id) .fetch_optional(pool) @@ -256,6 +248,7 @@ pub async fn move_doc_paths( Some(r) => r, None => return Ok(()), }; + let workspace_id: Uuid = row.get("workspace_id"); let owner_id: Uuid = row.get("owner_id"); let dtype: String = row.get("type"); if dtype == DOC_TYPE_FOLDER { @@ -268,8 +261,11 @@ pub async fn move_doc_paths( .ok() .flatten() .is_some(); - let target_rel = owner_relative_from_desired(owner_id, &desired_path, archived); + // E2EE: Document uses new path format + let target_rel = format!("{}/{}.md", workspace_id, doc_id); let target_abs = uploads_root.join(&target_rel); + // Attachments keep original path logic + let attachment_parent_rel = owner_relative_parent_from_desired(owner_id, &desired_path, archived); if let Some(parent) = target_abs.parent() { let _ = tokio::fs::create_dir_all(parent).await; @@ -287,44 +283,42 @@ pub async fn move_doc_paths( } } - // Move only attachments belonging to this document - let new_dir = target_abs.parent().map(|p| p.to_path_buf()); - if let Some(nd) = new_dir { - // Get list of files belonging to this document from DB - let files = sqlx::query("SELECT filename, storage_path FROM files WHERE document_id = $1") - .bind(doc_id) - .fetch_all(pool) - .await?; - - if !files.is_empty() { - let dst_attachments = nd.join("attachments"); - let _ = tokio::fs::create_dir_all(&dst_attachments).await; - - for row in files { - let filename: String = row.get("filename"); - let old_path: String = row.get("storage_path"); - let old_full = uploads_root.join(&old_path); - - // Only move if file exists - if tokio::fs::try_exists(&old_full).await.unwrap_or(false) { - let new_path = dst_attachments.join(&filename); - if let Some(parent) = new_path.parent() { - let _ = tokio::fs::create_dir_all(parent).await; - } - let _ = tokio::fs::rename(&old_full, &new_path).await; - - // Update DB with new path - let new_rel = relative_from_uploads(uploads_root, &new_path); - let _ = sqlx::query("UPDATE files SET storage_path = $2 WHERE document_id = $1 AND filename = $3") - .bind(doc_id) - .bind(&new_rel) - .bind(&filename) - .execute(pool).await; - - // Mark move: old delete, new upsert (binary) - let _ = mark_dirty_delete_relative(pool, &old_path).await; - let _ = mark_dirty_upsert_relative(pool, &new_rel, false, None).await; + // Move only attachments belonging to this document (using original path logic) + let attachment_dir = uploads_root.join(&attachment_parent_rel); + // Get list of files belonging to this document from DB + let files = sqlx::query("SELECT filename, storage_path FROM files WHERE document_id = $1") + .bind(doc_id) + .fetch_all(pool) + .await?; + + if !files.is_empty() { + let dst_attachments = attachment_dir.join("attachments"); + let _ = tokio::fs::create_dir_all(&dst_attachments).await; + + for row in files { + let filename: String = row.get("filename"); + let old_path: String = row.get("storage_path"); + let old_full = uploads_root.join(&old_path); + + // Only move if file exists + if tokio::fs::try_exists(&old_full).await.unwrap_or(false) { + let new_path = dst_attachments.join(&filename); + if let Some(parent) = new_path.parent() { + let _ = tokio::fs::create_dir_all(parent).await; } + let _ = tokio::fs::rename(&old_full, &new_path).await; + + // Update DB with new path + let new_rel = relative_from_uploads(uploads_root, &new_path); + let _ = sqlx::query("UPDATE files SET storage_path = $2 WHERE document_id = $1 AND filename = $3") + .bind(doc_id) + .bind(&new_rel) + .bind(&filename) + .execute(pool).await; + + // Mark move: old delete, new upsert (binary) + let _ = mark_dirty_delete_relative(pool, &old_path).await; + let _ = mark_dirty_upsert_relative(pool, &new_rel, false, None).await; } } } @@ -339,8 +333,8 @@ pub async fn move_doc_paths( .execute(pool) .await; - // Mark new path as upsert (text) - let _ = mark_dirty_upsert_relative(pool, &target_rel, true, None).await; + // Mark new path as upsert (binary for encrypted data) + let _ = mark_dirty_upsert_relative(pool, &target_rel, false, None).await; Ok(()) } diff --git a/api/crates/infrastructure/src/core/storage/s3_port_impl.rs b/api/crates/infrastructure/src/core/storage/s3_port_impl.rs index 90443765..01d38bda 100644 --- a/api/crates/infrastructure/src/core/storage/s3_port_impl.rs +++ b/api/crates/infrastructure/src/core/storage/s3_port_impl.rs @@ -186,7 +186,7 @@ impl S3StoragePort { use sqlx::Row; let row = sqlx::query( - "SELECT owner_id, type, path, desired_path, archived_at FROM documents WHERE id = $1", + "SELECT workspace_id, owner_id, type, path, desired_path, archived_at FROM documents WHERE id = $1", ) .bind(doc_id) .fetch_optional(&self.pool) @@ -195,6 +195,7 @@ impl S3StoragePort { Some(row) => row, None => return Ok(()), }; + let workspace_id: Uuid = row.get("workspace_id"); let owner_id: Uuid = row.get("owner_id"); let dtype: String = row.get("type"); if dtype == DOC_TYPE_FOLDER { @@ -202,19 +203,8 @@ impl S3StoragePort { } let old_rel: Option = row.try_get("path").ok(); - let desired_path: String = row.get("desired_path"); - let archived = row - .try_get::>, _>("archived_at") - .ok() - .flatten() - .is_some(); - let target_rel = - crate::core::storage::owner_relative_from_desired(owner_id, &desired_path, archived); - let target_parent_rel = crate::core::storage::owner_relative_parent_from_desired( - owner_id, - &desired_path, - archived, - ); + // E2EE: Use {workspace_id}/{doc_id}.md path format for document + let target_rel = format!("{}/{}.md", workspace_id, doc_id); if let Some(old_rel) = old_rel.clone() { if old_rel != target_rel { @@ -229,6 +219,18 @@ impl S3StoragePort { } } + // Attachments: Keep original path logic (owner_id based) + let desired_path: String = row.get("desired_path"); + let archived = row + .try_get::>, _>("archived_at") + .ok() + .flatten() + .is_some(); + let target_parent_rel = crate::core::storage::owner_relative_parent_from_desired( + owner_id, + &desired_path, + archived, + ); let new_dir = self.root.join(&target_parent_rel); let files = sqlx::query("SELECT filename, storage_path FROM files WHERE document_id = $1") @@ -281,8 +283,9 @@ impl S3StoragePort { .execute(&self.pool) .await?; + // E2EE: Mark as binary (not text) since content is encrypted let _ = - crate::core::storage::mark_dirty_upsert_relative(&self.pool, &target_rel, true, None) + crate::core::storage::mark_dirty_upsert_relative(&self.pool, &target_rel, false, None) .await; Ok(()) @@ -302,21 +305,6 @@ fn normalize_prefix(root: &Path) -> String { parts.join("/") } -fn sanitize_filename(name: &str) -> String { - let mut s = name.trim().to_string(); - let invalid = ['/', '\\', ':', '*', '?', '"', '<', '>', '|', '\0']; - for ch in invalid { - s = s.replace(ch, "-"); - } - if s.is_empty() { - s = "attachment".into(); - } - if s.len() > 120 { - s.truncate(120); - } - s -} - #[async_trait] impl StorageProjectionPort for S3StoragePort { async fn move_folder_subtree(&self, folder_id: Uuid) -> PortResult { @@ -547,32 +535,14 @@ impl StorageResolverPort for S3StoragePort { let attachments_dir = base_dir.join("attachments"); let _ = fs::create_dir_all(&attachments_dir).await; - let sanitized = sanitize_filename(original_filename.unwrap_or("attachment")); - let mut target = attachments_dir.join(&sanitized); - let mut relative = + // Use UUID for storage filename to hide the original filename (E2EE) + // Original filename is stored in encrypted_metadata + let _original = original_filename; // Kept for API compatibility, but not used in path + let file_uuid = Uuid::new_v4(); + let safe = file_uuid.to_string(); + let target = attachments_dir.join(&safe); + let relative = crate::core::storage::relative_from_uploads(&self.root, &target).replace('\\', "/"); - let mut counter = 1; - loop { - let key = self.relative_to_key(&relative); - if !self.object_exists(&key).await? { - break; - } - let stem = target - .file_stem() - .and_then(|s| s.to_str()) - .unwrap_or("attachment"); - let ext = target - .extension() - .and_then(|s| s.to_str()) - .filter(|s| !s.is_empty()) - .map(|s| format!(".{s}")) - .unwrap_or_default(); - let new_name = format!("{stem}-{counter}{ext}"); - target = attachments_dir.join(&new_name); - relative = crate::core::storage::relative_from_uploads(&self.root, &target) - .replace('\\', "/"); - counter += 1; - } if let Some(parent) = target.parent() { let _ = fs::create_dir_all(parent).await; @@ -582,11 +552,7 @@ impl StorageResolverPort for S3StoragePort { let size = bytes.len() as i64; let hash = sha256_hex(bytes); Ok(StoredAttachment { - filename: target - .file_name() - .and_then(|s| s.to_str()) - .unwrap_or("attachment") - .to_string(), + filename: safe, relative_path: relative, size, content_hash: hash, @@ -595,10 +561,104 @@ impl StorageResolverPort for S3StoragePort { .await; out.map_err(Into::into) } + + // --- Public file storage --- + + async fn store_public_file( + &self, + workspace_id: Uuid, + document_id: Uuid, + file_id: Uuid, + bytes: &[u8], + ) -> PortResult { + let out: anyhow::Result = async { + // Path: public/{workspace_id}/{document_id}/{file_id} + let storage_path = format!("public/{}/{}/{}", workspace_id, document_id, file_id); + let key = self.relative_to_key(&storage_path); + self.put_object(&key, bytes).await?; + Ok(storage_path) + } + .await; + out.map_err(Into::into) + } + + async fn read_public_file( + &self, + workspace_id: Uuid, + document_id: Uuid, + file_id: Uuid, + ) -> PortResult> { + let out: anyhow::Result> = async { + let storage_path = format!("public/{}/{}/{}", workspace_id, document_id, file_id); + let key = self.relative_to_key(&storage_path); + + let resp = self + .client + .get_object() + .bucket(&self.bucket) + .key(&key) + .send() + .await; + + let object = match resp { + Ok(obj) => obj, + Err(SdkError::ServiceError(service_err)) => { + if service_err.err().is_no_such_key() { + let err = io::Error::new( + io::ErrorKind::NotFound, + format!("public file {key} not found"), + ); + return Err(err.into()); + } + return Err(anyhow!("failed to get public file {key}: {}", service_err.err())); + } + Err(err) => { + return Err(anyhow!("failed to get public file {key}: {err}")); + } + }; + + let mut reader = object.body.into_async_read(); + let mut data = Vec::new(); + reader.read_to_end(&mut data).await?; + Ok(data) + } + .await; + out.map_err(Into::into) + } + + async fn delete_public_file( + &self, + workspace_id: Uuid, + document_id: Uuid, + file_id: Uuid, + ) -> PortResult<()> { + let out: anyhow::Result<()> = async { + let storage_path = format!("public/{}/{}/{}", workspace_id, document_id, file_id); + let key = self.relative_to_key(&storage_path); + let _ = self.delete_object(&key).await; + Ok(()) + } + .await; + out.map_err(Into::into) + } + + async fn delete_public_files_for_document( + &self, + workspace_id: Uuid, + document_id: Uuid, + ) -> PortResult<()> { + let out: anyhow::Result<()> = async { + let prefix = format!("public/{}/{}/", workspace_id, document_id); + self.delete_children_with_prefix(&prefix).await?; + Ok(()) + } + .await; + out.map_err(Into::into) + } } impl S3StoragePort { - #[allow(dead_code)] + /// Delete all objects with the given prefix async fn delete_children_with_prefix(&self, rel: &str) -> anyhow::Result<()> { let mut key_prefix = self.relative_to_key(rel); if key_prefix.is_empty() { diff --git a/api/crates/infrastructure/src/core/storage/storage_port_impl.rs b/api/crates/infrastructure/src/core/storage/storage_port_impl.rs index 50bf2897..da14c2a5 100644 --- a/api/crates/infrastructure/src/core/storage/storage_port_impl.rs +++ b/api/crates/infrastructure/src/core/storage/storage_port_impl.rs @@ -246,47 +246,12 @@ impl StorageResolverPort for FsStoragePort { let attachments_dir = base_dir.join("attachments"); let _ = fs::create_dir_all(&attachments_dir).await; - let original = original_filename.unwrap_or("file.bin"); - let mut safe = crate::core::storage::sanitize_title(original); - - let ts = chrono::Utc::now().format("%Y%m%d-%H%M%S"); - let (stem, ext) = { - let p = Path::new(&safe); - let stem = p - .file_stem() - .and_then(|s| s.to_str()) - .filter(|s| !s.is_empty()) - .unwrap_or("file") - .to_string(); - let ext = p - .extension() - .and_then(|s| s.to_str()) - .unwrap_or("") - .to_string(); - (stem, ext) - }; - - safe = if ext.is_empty() { - format!("{}_{}", stem, ts) - } else { - format!("{}_{}.{}", stem, ts, ext) - }; - - let mut candidate = attachments_dir.join(&safe); - let mut counter = 1; - while fs::try_exists(&candidate).await.unwrap_or(false) { - let p = Path::new(&safe); - let stem = p.file_stem().and_then(|s| s.to_str()).unwrap_or("file"); - let ext = p.extension().and_then(|s| s.to_str()).unwrap_or(""); - let new_name = if ext.is_empty() { - format!("{}-{}", stem, counter) - } else { - format!("{}-{}.{}", stem, counter, ext) - }; - candidate = attachments_dir.join(&new_name); - safe = new_name; - counter += 1; - } + // Use UUID for storage filename to hide the original filename (E2EE) + // Original filename is stored in encrypted_metadata + let _original = original_filename; // Kept for API compatibility, but not used in path + let file_uuid = Uuid::new_v4(); + let safe = file_uuid.to_string(); + let candidate = attachments_dir.join(&safe); fs::write(&candidate, bytes).await?; let relative = crate::core::storage::relative_from_uploads( @@ -308,4 +273,104 @@ impl StorageResolverPort for FsStoragePort { .await; out.map_err(Into::into) } + + // --- Public file storage --- + + async fn store_public_file( + &self, + workspace_id: Uuid, + document_id: Uuid, + file_id: Uuid, + bytes: &[u8], + ) -> PortResult { + let out: anyhow::Result = async { + use tokio::fs; + + // Path: public/{workspace_id}/{document_id}/{file_id} + let public_dir = self + .uploads_root + .join("public") + .join(workspace_id.to_string()) + .join(document_id.to_string()); + fs::create_dir_all(&public_dir).await?; + + let file_path = public_dir.join(file_id.to_string()); + fs::write(&file_path, bytes).await?; + + let storage_path = format!("public/{}/{}/{}", workspace_id, document_id, file_id); + Ok(storage_path) + } + .await; + out.map_err(Into::into) + } + + async fn read_public_file( + &self, + workspace_id: Uuid, + document_id: Uuid, + file_id: Uuid, + ) -> PortResult> { + let out: anyhow::Result> = async { + let file_path = self + .uploads_root + .join("public") + .join(workspace_id.to_string()) + .join(document_id.to_string()) + .join(file_id.to_string()); + let data = tokio::fs::read(&file_path).await?; + Ok(data) + } + .await; + out.map_err(Into::into) + } + + async fn delete_public_file( + &self, + workspace_id: Uuid, + document_id: Uuid, + file_id: Uuid, + ) -> PortResult<()> { + let out: anyhow::Result<()> = async { + use std::io::ErrorKind; + + let file_path = self + .uploads_root + .join("public") + .join(workspace_id.to_string()) + .join(document_id.to_string()) + .join(file_id.to_string()); + + match tokio::fs::remove_file(&file_path).await { + Ok(()) => Ok(()), + Err(e) if e.kind() == ErrorKind::NotFound => Ok(()), + Err(e) => Err(e.into()), + } + } + .await; + out.map_err(Into::into) + } + + async fn delete_public_files_for_document( + &self, + workspace_id: Uuid, + document_id: Uuid, + ) -> PortResult<()> { + let out: anyhow::Result<()> = async { + use std::io::ErrorKind; + + let doc_dir = self + .uploads_root + .join("public") + .join(workspace_id.to_string()) + .join(document_id.to_string()); + + match tokio::fs::remove_dir_all(&doc_dir).await { + Ok(()) => Ok(()), + Err(e) if e.kind() == ErrorKind::NotFound => Ok(()), + Err(e) => Err(e.into()), + } + } + .await; + out.map_err(Into::into) + } } diff --git a/api/crates/infrastructure/src/documents/db/repositories/document_keys_repository_sqlx/mod.rs b/api/crates/infrastructure/src/documents/db/repositories/document_keys_repository_sqlx/mod.rs new file mode 100644 index 00000000..61e1a7fc --- /dev/null +++ b/api/crates/infrastructure/src/documents/db/repositories/document_keys_repository_sqlx/mod.rs @@ -0,0 +1,101 @@ +use async_trait::async_trait; +use sqlx::Row; +use uuid::Uuid; + +use crate::core::db::PgPool; +use application::core::ports::errors::PortResult; +use application::documents::ports::document_keys_repository::{ + DocumentEncryptedKeyRow, DocumentKeysRepository, +}; + +pub struct SqlxDocumentKeysRepository { + pool: PgPool, +} + +impl SqlxDocumentKeysRepository { + pub fn new(pool: PgPool) -> Self { + Self { pool } + } +} + +#[async_trait] +impl DocumentKeysRepository for SqlxDocumentKeysRepository { + async fn get_encrypted_dek( + &self, + document_id: Uuid, + ) -> PortResult> { + let out: anyhow::Result> = async { + let row = sqlx::query( + r#"SELECT document_id, encrypted_dek, nonce, key_version, created_at, updated_at + FROM document_encrypted_keys + WHERE document_id = $1"#, + ) + .bind(document_id) + .fetch_optional(&self.pool) + .await?; + + Ok(row.map(|row| DocumentEncryptedKeyRow { + document_id: row.get("document_id"), + encrypted_dek: row.get("encrypted_dek"), + nonce: row.get("nonce"), + key_version: row.get("key_version"), + created_at: row.get("created_at"), + updated_at: row.get("updated_at"), + })) + } + .await; + out.map_err(Into::into) + } + + async fn upsert_encrypted_dek( + &self, + document_id: Uuid, + encrypted_dek: &[u8], + nonce: &[u8], + key_version: i32, + ) -> PortResult { + let out: anyhow::Result = async { + let row = sqlx::query( + r#"INSERT INTO document_encrypted_keys (document_id, encrypted_dek, nonce, key_version, created_at, updated_at) + VALUES ($1, $2, $3, $4, now(), now()) + ON CONFLICT (document_id) + DO UPDATE SET + encrypted_dek = EXCLUDED.encrypted_dek, + nonce = EXCLUDED.nonce, + key_version = EXCLUDED.key_version, + updated_at = now() + RETURNING document_id, encrypted_dek, nonce, key_version, created_at, updated_at"#, + ) + .bind(document_id) + .bind(encrypted_dek) + .bind(nonce) + .bind(key_version) + .fetch_one(&self.pool) + .await?; + + Ok(DocumentEncryptedKeyRow { + document_id: row.get("document_id"), + encrypted_dek: row.get("encrypted_dek"), + nonce: row.get("nonce"), + key_version: row.get("key_version"), + created_at: row.get("created_at"), + updated_at: row.get("updated_at"), + }) + } + .await; + out.map_err(Into::into) + } + + async fn delete_encrypted_dek(&self, document_id: Uuid) -> PortResult { + let out: anyhow::Result = async { + let result = sqlx::query(r#"DELETE FROM document_encrypted_keys WHERE document_id = $1"#) + .bind(document_id) + .execute(&self.pool) + .await?; + + Ok(result.rows_affected() > 0) + } + .await; + out.map_err(Into::into) + } +} diff --git a/api/crates/infrastructure/src/documents/db/repositories/document_repository_sqlx/helpers.rs b/api/crates/infrastructure/src/documents/db/repositories/document_repository_sqlx/helpers.rs index 240ab015..fd052782 100644 --- a/api/crates/infrastructure/src/documents/db/repositories/document_repository_sqlx/helpers.rs +++ b/api/crates/infrastructure/src/documents/db/repositories/document_repository_sqlx/helpers.rs @@ -64,6 +64,8 @@ impl SqlxDocumentRepository { row.try_get("archived_at").ok(), row.try_get("archived_by").ok(), row.try_get("archived_parent_id").ok(), + row.try_get("encrypted_title").ok(), + row.try_get("encrypted_title_nonce").ok(), )) } diff --git a/api/crates/infrastructure/src/documents/db/repositories/document_repository_sqlx/repository.rs b/api/crates/infrastructure/src/documents/db/repositories/document_repository_sqlx/repository.rs index 3ea889b1..d63c4419 100644 --- a/api/crates/infrastructure/src/documents/db/repositories/document_repository_sqlx/repository.rs +++ b/api/crates/infrastructure/src/documents/db/repositories/document_repository_sqlx/repository.rs @@ -10,7 +10,7 @@ use application::documents::ports::document_repository::{ SubtreeDocument, }; use domain::documents::doc_type::DocumentType; -use domain::documents::document::{Document as DomainDocument, SearchHit}; +use domain::documents::document::Document as DomainDocument; use domain::documents::path as doc_path; use domain::documents::title::Title; @@ -25,7 +25,6 @@ impl DocumentRepository for SqlxDocumentRepository { async fn list_for_user( &self, workspace_id: Uuid, - query: Option, tag: Option, state: DocumentListState, ) -> DocumentRepoResult> { @@ -50,20 +49,6 @@ impl DocumentRepository for SqlxDocumentRepository { .fetch_all(&self.pool) .await .map_err(unexpected_sqlx)? - } else if let Some(ref qq) = query.as_ref().filter(|s| !s.trim().is_empty()) { - let like = format!("%{}%", qq); - let sql = format!( - r#"SELECT d.* - FROM documents d - WHERE d.workspace_id = $1 AND {archived_condition} AND d.title ILIKE $2 - ORDER BY d.updated_at DESC LIMIT 100"#, - ); - sqlx::query(&sql) - .bind(workspace_id) - .bind(like) - .fetch_all(&self.pool) - .await - .map_err(unexpected_sqlx)? } else { let sql = format!( r#"SELECT d.* @@ -119,61 +104,6 @@ impl DocumentRepository for SqlxDocumentRepository { .map_err(DocumentRepositoryError::from) } - async fn search_for_user( - &self, - workspace_id: Uuid, - query: Option, - limit: i64, - ) -> DocumentRepoResult> { - let q = query.unwrap_or_default(); - let like = format!("%{}%", q); - let rows = if q.trim().is_empty() { - sqlx::query( - r#"SELECT id, title, type, path, updated_at, archived_at - FROM documents WHERE workspace_id = $1 - AND archived_at IS NULL - ORDER BY updated_at DESC - LIMIT $2"#, - ) - .bind(workspace_id) - .bind(limit) - .fetch_all(&self.pool) - .await - .map_err(unexpected_sqlx)? - } else { - sqlx::query( - r#"SELECT id, title, type, path, updated_at, archived_at FROM documents - WHERE workspace_id = $1 AND archived_at IS NULL - AND (LOWER(title) LIKE LOWER($2) OR title ILIKE $2) - ORDER BY CASE WHEN LOWER(title) = LOWER($3) THEN 0 ELSE 1 END, LENGTH(title), updated_at DESC - LIMIT $4"#, - ) - .bind(workspace_id) - .bind(like) - .bind(&q) - .bind(limit) - .fetch_all(&self.pool) - .await - .map_err(unexpected_sqlx)? - }; - rows.into_iter() - .map(|r| { - let doc_type_str: String = r.get("type"); - let doc_type = DocumentType::try_from(doc_type_str.as_str()) - .context("invalid_document_type")?; - let title: String = r.get("title"); - Ok(SearchHit { - id: r.get("id"), - title: Title::new(title), - doc_type, - path: r.try_get("path").ok(), - updated_at: r.get("updated_at"), - }) - }) - .collect::>>() - .map_err(DocumentRepositoryError::from) - } - async fn create_for_user( &self, workspace_id: Uuid, @@ -326,6 +256,26 @@ impl DocumentRepository for SqlxDocumentRepository { .collect::>>() .map_err(DocumentRepositoryError::from) } + + async fn update_encrypted_title( + &self, + doc_id: Uuid, + encrypted_title: Vec, + encrypted_title_nonce: Vec, + ) -> DocumentRepoResult<()> { + sqlx::query( + r#"UPDATE documents + SET encrypted_title = $2, encrypted_title_nonce = $3, updated_at = now() + WHERE id = $1"#, + ) + .bind(doc_id) + .bind(&encrypted_title) + .bind(&encrypted_title_nonce) + .execute(&self.pool) + .await + .map_err(unexpected_sqlx)?; + Ok(()) + } } #[async_trait] diff --git a/api/crates/infrastructure/src/documents/db/repositories/document_snapshot_archive_repository_sqlx/mod.rs b/api/crates/infrastructure/src/documents/db/repositories/document_snapshot_archive_repository_sqlx/mod.rs index 577e6fd6..26fbc350 100644 --- a/api/crates/infrastructure/src/documents/db/repositories/document_snapshot_archive_repository_sqlx/mod.rs +++ b/api/crates/infrastructure/src/documents/db/repositories/document_snapshot_archive_repository_sqlx/mod.rs @@ -47,7 +47,9 @@ impl DocumentSnapshotArchiveRepository for SqlxDocumentSnapshotArchiveRepository created_at, created_by, byte_size, - content_hash"#, + content_hash, + nonce, + signature"#, ) .bind(input.document_id) .bind(input.version as i32) @@ -75,7 +77,9 @@ impl DocumentSnapshotArchiveRepository for SqlxDocumentSnapshotArchiveRepository created_at, created_by, byte_size, - content_hash + content_hash, + nonce, + signature FROM document_snapshot_archives WHERE document_id = $1 AND version = $2"#, ) @@ -97,6 +101,8 @@ impl DocumentSnapshotArchiveRepository for SqlxDocumentSnapshotArchiveRepository created_by: row.try_get("created_by").ok(), byte_size: row.get("byte_size"), content_hash: row.get("content_hash"), + nonce: row.try_get("nonce").ok(), + signature: row.try_get("signature").ok(), }) } .await; @@ -117,7 +123,9 @@ impl DocumentSnapshotArchiveRepository for SqlxDocumentSnapshotArchiveRepository created_at, created_by, byte_size, - content_hash + content_hash, + nonce, + signature FROM document_snapshot_archives WHERE id = $1"#, ) @@ -137,6 +145,8 @@ impl DocumentSnapshotArchiveRepository for SqlxDocumentSnapshotArchiveRepository created_by: row.try_get("created_by").ok(), byte_size: row.get("byte_size"), content_hash: row.get("content_hash"), + nonce: row.try_get("nonce").ok(), + signature: row.try_get("signature").ok(), }, bytes: row.get("snapshot"), })) @@ -163,7 +173,9 @@ impl DocumentSnapshotArchiveRepository for SqlxDocumentSnapshotArchiveRepository created_at, created_by, byte_size, - content_hash + content_hash, + nonce, + signature FROM document_snapshot_archives WHERE document_id = $1 ORDER BY created_at DESC @@ -188,6 +200,8 @@ impl DocumentSnapshotArchiveRepository for SqlxDocumentSnapshotArchiveRepository created_by: row.try_get("created_by").ok(), byte_size: row.get("byte_size"), content_hash: row.get("content_hash"), + nonce: row.try_get("nonce").ok(), + signature: row.try_get("signature").ok(), }) .collect()) } @@ -213,7 +227,9 @@ impl DocumentSnapshotArchiveRepository for SqlxDocumentSnapshotArchiveRepository created_at, created_by, byte_size, - content_hash + content_hash, + nonce, + signature FROM document_snapshot_archives WHERE document_id = $1 AND version < $2 ORDER BY version DESC @@ -236,6 +252,8 @@ impl DocumentSnapshotArchiveRepository for SqlxDocumentSnapshotArchiveRepository created_by: row.try_get("created_by").ok(), byte_size: row.get("byte_size"), content_hash: row.get("content_hash"), + nonce: row.try_get("nonce").ok(), + signature: row.try_get("signature").ok(), }, bytes: row.get("snapshot"), })) diff --git a/api/crates/infrastructure/src/documents/db/repositories/encrypted_tag_repository_sqlx/mod.rs b/api/crates/infrastructure/src/documents/db/repositories/encrypted_tag_repository_sqlx/mod.rs new file mode 100644 index 00000000..bf3d3d3c --- /dev/null +++ b/api/crates/infrastructure/src/documents/db/repositories/encrypted_tag_repository_sqlx/mod.rs @@ -0,0 +1,189 @@ +use async_trait::async_trait; +use sqlx::Row; +use uuid::Uuid; + +use crate::core::db::PgPool; +use application::core::ports::errors::PortResult; +use application::documents::ports::tagging::encrypted_tag_repository::{ + EncryptedTagEntry, EncryptedTagRepository, EncryptedTagSummary, +}; + +pub struct SqlxEncryptedTagRepository { + pool: PgPool, +} + +impl SqlxEncryptedTagRepository { + pub fn new(pool: PgPool) -> Self { + Self { pool } + } +} + +#[async_trait] +impl EncryptedTagRepository for SqlxEncryptedTagRepository { + async fn list_encrypted_tags( + &self, + workspace_id: Uuid, + ) -> PortResult> { + let out: anyhow::Result> = async { + let rows = sqlx::query( + r#"SELECT encrypted_tag, COUNT(*) as count + FROM encrypted_tag_index + WHERE workspace_id = $1 + GROUP BY encrypted_tag + ORDER BY count DESC"#, + ) + .bind(workspace_id) + .fetch_all(&self.pool) + .await?; + + Ok(rows + .into_iter() + .map(|row| EncryptedTagSummary { + encrypted_tag: row.get("encrypted_tag"), + count: row.get("count"), + }) + .collect()) + } + .await; + out.map_err(Into::into) + } + + async fn list_document_encrypted_tags( + &self, + document_id: Uuid, + ) -> PortResult> { + let out: anyhow::Result> = async { + let rows = sqlx::query( + r#"SELECT id, workspace_id, document_id, encrypted_tag, created_at + FROM encrypted_tag_index + WHERE document_id = $1 + ORDER BY created_at"#, + ) + .bind(document_id) + .fetch_all(&self.pool) + .await?; + + Ok(rows + .into_iter() + .map(|row| EncryptedTagEntry { + id: row.get("id"), + workspace_id: row.get("workspace_id"), + document_id: row.get("document_id"), + encrypted_tag: row.get("encrypted_tag"), + created_at: row.get("created_at"), + }) + .collect()) + } + .await; + out.map_err(Into::into) + } + + async fn replace_document_encrypted_tags( + &self, + workspace_id: Uuid, + document_id: Uuid, + encrypted_tags: &[Vec], + ) -> PortResult> { + let out: anyhow::Result> = async { + // Use a transaction for atomicity + let mut tx = self.pool.begin().await?; + + // Delete existing tags for this document + sqlx::query(r#"DELETE FROM encrypted_tag_index WHERE document_id = $1"#) + .bind(document_id) + .execute(&mut *tx) + .await?; + + // Insert new tags + let mut entries = Vec::with_capacity(encrypted_tags.len()); + for encrypted_tag in encrypted_tags { + let row = sqlx::query( + r#"INSERT INTO encrypted_tag_index (workspace_id, document_id, encrypted_tag) + VALUES ($1, $2, $3) + RETURNING id, workspace_id, document_id, encrypted_tag, created_at"#, + ) + .bind(workspace_id) + .bind(document_id) + .bind(encrypted_tag) + .fetch_one(&mut *tx) + .await?; + + entries.push(EncryptedTagEntry { + id: row.get("id"), + workspace_id: row.get("workspace_id"), + document_id: row.get("document_id"), + encrypted_tag: row.get("encrypted_tag"), + created_at: row.get("created_at"), + }); + } + + tx.commit().await?; + Ok(entries) + } + .await; + out.map_err(Into::into) + } + + async fn find_documents_by_encrypted_tag( + &self, + workspace_id: Uuid, + encrypted_tag: &[u8], + ) -> PortResult> { + let out: anyhow::Result> = async { + let rows = sqlx::query( + r#"SELECT DISTINCT document_id + FROM encrypted_tag_index + WHERE workspace_id = $1 AND encrypted_tag = $2"#, + ) + .bind(workspace_id) + .bind(encrypted_tag) + .fetch_all(&self.pool) + .await?; + + Ok(rows.into_iter().map(|row| row.get("document_id")).collect()) + } + .await; + out.map_err(Into::into) + } + + async fn find_encrypted_tag( + &self, + workspace_id: Uuid, + encrypted_tag: &[u8], + ) -> PortResult> { + let out: anyhow::Result> = async { + let rows = sqlx::query( + r#"SELECT encrypted_tag, COUNT(*) as count + FROM encrypted_tag_index + WHERE workspace_id = $1 AND encrypted_tag = $2 + GROUP BY encrypted_tag"#, + ) + .bind(workspace_id) + .bind(encrypted_tag) + .fetch_all(&self.pool) + .await?; + + Ok(rows + .into_iter() + .map(|row| EncryptedTagSummary { + encrypted_tag: row.get("encrypted_tag"), + count: row.get("count"), + }) + .collect()) + } + .await; + out.map_err(Into::into) + } + + async fn delete_document_encrypted_tags(&self, document_id: Uuid) -> PortResult<()> { + let out: anyhow::Result<()> = async { + sqlx::query(r#"DELETE FROM encrypted_tag_index WHERE document_id = $1"#) + .bind(document_id) + .execute(&self.pool) + .await?; + Ok(()) + } + .await; + out.map_err(Into::into) + } +} diff --git a/api/crates/infrastructure/src/documents/db/repositories/files_repository_sqlx/mod.rs b/api/crates/infrastructure/src/documents/db/repositories/files_repository_sqlx/mod.rs index ece2503c..e2a930f0 100644 --- a/api/crates/infrastructure/src/documents/db/repositories/files_repository_sqlx/mod.rs +++ b/api/crates/infrastructure/src/documents/db/repositories/files_repository_sqlx/mod.rs @@ -5,7 +5,7 @@ use uuid::Uuid; use crate::core::db::PgPool; use application::core::ports::errors::PortResult; use application::documents::ports::files::files_repository::{ - FileMeta, FilePathMeta, FileRecord, FilesRepository, StoredFileScope, + FileInsert, FileMeta, FileRecord, FilesRepository, StoredFileScope, }; pub struct SqlxFilesRepository { @@ -50,27 +50,23 @@ impl FilesRepository for SqlxFilesRepository { out.map_err(Into::into) } - async fn insert_file( - &self, - doc_id: Uuid, - filename: &str, - content_type: Option<&str>, - size: i64, - storage_path: &str, - content_hash: &str, - ) -> PortResult { + async fn insert_file(&self, input: FileInsert<'_>) -> PortResult { let out: anyhow::Result = async { + // E2EE files: filename is stored in encrypted_metadata, use placeholder for DB let row = sqlx::query( - r#"INSERT INTO files (document_id, filename, content_type, size, storage_path, content_hash) - VALUES ($1, $2, $3, $4, $5, $6) - RETURNING id"#, + r#"INSERT INTO files ( + document_id, size, storage_path, filename, + encrypted_metadata, encrypted_metadata_nonce, encrypted_hash + ) + VALUES ($1, $2, $3, '[encrypted]', $4, $5, $6) + RETURNING id"#, ) - .bind(doc_id) - .bind(filename) - .bind(content_type) - .bind(size) - .bind(storage_path) - .bind(content_hash) + .bind(input.doc_id) + .bind(input.size) + .bind(input.storage_path) + .bind(input.encrypted_metadata) + .bind(input.encrypted_metadata_nonce) + .bind(input.encrypted_hash) .fetch_one(&self.pool) .await?; Ok(row.get("id")) @@ -82,7 +78,8 @@ impl FilesRepository for SqlxFilesRepository { async fn get_file_meta(&self, file_id: Uuid) -> PortResult> { let out: anyhow::Result> = async { let row = sqlx::query( - r#"SELECT f.storage_path, f.content_type, f.document_id, d.workspace_id + r#"SELECT f.storage_path, f.document_id, d.workspace_id, + f.encrypted_metadata, f.encrypted_metadata_nonce, f.encrypted_hash FROM files f JOIN documents d ON f.document_id = d.id WHERE f.id = $1"#, ) @@ -91,31 +88,11 @@ impl FilesRepository for SqlxFilesRepository { .await?; Ok(row.map(|r| FileMeta { storage_path: r.get("storage_path"), - content_type: r.try_get("content_type").ok(), document_id: r.get("document_id"), workspace_id: r.get("workspace_id"), - })) - } - .await; - out.map_err(Into::into) - } - - async fn get_file_path_by_doc_and_name( - &self, - doc_id: Uuid, - filename: &str, - ) -> PortResult> { - let out: anyhow::Result> = async { - let row = sqlx::query( - r#"SELECT storage_path, content_type FROM files WHERE document_id = $1 AND filename = $2"#, - ) - .bind(doc_id) - .bind(filename) - .fetch_optional(&self.pool) - .await?; - Ok(row.map(|r| FilePathMeta { - storage_path: r.get("storage_path"), - content_type: r.try_get("content_type").ok(), + encrypted_metadata: r.get::>, _>("encrypted_metadata"), + encrypted_metadata_nonce: r.get::>, _>("encrypted_metadata_nonce"), + encrypted_hash: r.get::, _>("encrypted_hash"), })) } .await; @@ -140,7 +117,8 @@ impl FilesRepository for SqlxFilesRepository { async fn list_files_for_document(&self, doc_id: Uuid) -> PortResult> { let out: anyhow::Result> = async { let rows = sqlx::query( - r#"SELECT id, filename, content_type, size, storage_path, content_hash + r#"SELECT id, size, storage_path, + encrypted_metadata, encrypted_metadata_nonce, encrypted_hash FROM files WHERE document_id = $1"#, ) @@ -151,11 +129,11 @@ impl FilesRepository for SqlxFilesRepository { .into_iter() .map(|r| FileRecord { id: r.get("id"), - filename: r.get("filename"), - content_type: r.try_get("content_type").ok(), size: r.get("size"), storage_path: r.get("storage_path"), - content_hash: r.get("content_hash"), + encrypted_metadata: r.get::>, _>("encrypted_metadata"), + encrypted_metadata_nonce: r.get::>, _>("encrypted_metadata_nonce"), + encrypted_hash: r.get::, _>("encrypted_hash"), }) .collect()) } @@ -229,20 +207,20 @@ impl FilesRepository for SqlxFilesRepository { out.map_err(Into::into) } - async fn update_hash_and_size( + async fn update_size_and_hash( &self, file_id: Uuid, size: i64, - content_hash: &str, + encrypted_hash: &str, ) -> PortResult<()> { let out: anyhow::Result<()> = async { sqlx::query( - r#"UPDATE files SET size = $2, content_hash = $3, updated_at = now() + r#"UPDATE files SET size = $2, encrypted_hash = $3, updated_at = now() WHERE id = $1"#, ) .bind(file_id) .bind(size) - .bind(content_hash) + .bind(encrypted_hash) .execute(&self.pool) .await?; Ok(()) @@ -262,4 +240,5 @@ impl FilesRepository for SqlxFilesRepository { .await; out.map_err(Into::into) } + } diff --git a/api/crates/infrastructure/src/documents/db/repositories/mod.rs b/api/crates/infrastructure/src/documents/db/repositories/mod.rs index ee581066..a931c1c3 100644 --- a/api/crates/infrastructure/src/documents/db/repositories/mod.rs +++ b/api/crates/infrastructure/src/documents/db/repositories/mod.rs @@ -1,9 +1,10 @@ pub mod access_repository_sqlx; +pub mod document_keys_repository_sqlx; pub mod document_repository_sqlx; pub mod document_snapshot_archive_repository_sqlx; +pub mod encrypted_tag_repository_sqlx; pub mod files_repository_sqlx; pub mod linkgraph_repository_sqlx; pub mod public_repository_sqlx; +pub mod share_keys_repository_sqlx; pub mod shares_repository_sqlx; -pub mod tag_repository_sqlx; -pub mod tagging_repository_sqlx; diff --git a/api/crates/infrastructure/src/documents/db/repositories/public_repository_sqlx/mod.rs b/api/crates/infrastructure/src/documents/db/repositories/public_repository_sqlx/mod.rs index 002ce43c..c3700ff2 100644 --- a/api/crates/infrastructure/src/documents/db/repositories/public_repository_sqlx/mod.rs +++ b/api/crates/infrastructure/src/documents/db/repositories/public_repository_sqlx/mod.rs @@ -6,7 +6,8 @@ use uuid::Uuid; use crate::core::db::PgPool; use application::core::ports::errors::PortResult; use application::documents::ports::publishing::public_repository::{ - PublicDocumentSummaryRow, PublicRepository, PublishStatusRow, WorkspaceTitleAndSlug, + PublicContentRow, PublicDocumentSummaryRow, PublicFileRow, PublicRepository, PublishStatusRow, + StorePublicFileInput, WorkspaceTitleAndSlug, }; use domain::documents::doc_type::DocumentType; use domain::documents::document::Document; @@ -47,11 +48,12 @@ impl PublicRepository for SqlxPublicRepository { out.map_err(Into::into) } - async fn upsert_public_document(&self, doc_id: Uuid, slug: &str) -> PortResult<()> { + async fn upsert_public_document(&self, doc_id: Uuid, slug: &str, noindex: bool) -> PortResult<()> { let out: anyhow::Result<()> = async { - let _ = sqlx::query("INSERT INTO public_documents (document_id, slug, published_at) VALUES ($1, $2, now()) ON CONFLICT (document_id) DO UPDATE SET slug = EXCLUDED.slug, published_at = now()") + let _ = sqlx::query("INSERT INTO public_documents (document_id, slug, noindex, published_at) VALUES ($1, $2, $3, now()) ON CONFLICT (document_id) DO UPDATE SET slug = EXCLUDED.slug, noindex = EXCLUDED.noindex, published_at = now()") .bind(doc_id) .bind(slug) + .bind(noindex) .execute(&self.pool) .await?; Ok(()) @@ -60,6 +62,19 @@ impl PublicRepository for SqlxPublicRepository { out.map_err(Into::into) } + async fn update_noindex(&self, doc_id: Uuid, noindex: bool) -> PortResult { + let out: anyhow::Result = async { + let res = sqlx::query("UPDATE public_documents SET noindex = $1 WHERE document_id = $2") + .bind(noindex) + .bind(doc_id) + .execute(&self.pool) + .await?; + Ok(res.rows_affected() > 0) + } + .await; + out.map_err(Into::into) + } + async fn slug_exists(&self, slug: &str) -> PortResult { let out: anyhow::Result = async { let n = sqlx::query_scalar::<_, i64>( @@ -108,7 +123,7 @@ impl PublicRepository for SqlxPublicRepository { ) -> PortResult> { let out: anyhow::Result> = async { let row = sqlx::query( - r#"SELECT p.slug, w.slug as workspace_slug + r#"SELECT p.slug, p.noindex, w.slug as workspace_slug FROM public_documents p JOIN documents d ON p.document_id = d.id JOIN workspaces w ON d.workspace_id = w.id @@ -121,6 +136,7 @@ impl PublicRepository for SqlxPublicRepository { Ok(row.map(|r| PublishStatusRow { slug: r.get("slug"), workspace_slug: r.get("workspace_slug"), + noindex: r.get("noindex"), })) } .await; @@ -214,6 +230,8 @@ impl PublicRepository for SqlxPublicRepository { r.try_get("archived_at").ok(), r.try_get("archived_by").ok(), r.try_get("archived_parent_id").ok(), + r.try_get("encrypted_title").ok(), + r.try_get("encrypted_title_nonce").ok(), )) }) .transpose() @@ -250,4 +268,244 @@ impl PublicRepository for SqlxPublicRepository { .await; out.map_err(Into::into) } + + async fn get_noindex_by_workspace_and_id( + &self, + workspace_slug: &str, + doc_id: Uuid, + ) -> PortResult> { + let out: anyhow::Result> = async { + let row = sqlx::query_scalar::<_, bool>( + r#"SELECT p.noindex + FROM public_documents p + JOIN documents d ON p.document_id = d.id + JOIN workspaces w ON d.workspace_id = w.id + WHERE (w.slug = $1 + OR (w.is_personal AND EXISTS ( + SELECT 1 + FROM users u + WHERE u.id = w.id AND lower(u.name) = lower($1) + ))) + AND d.id = $2"#, + ) + .bind(workspace_slug) + .bind(doc_id) + .fetch_optional(&self.pool) + .await?; + Ok(row) + } + .await; + out.map_err(Into::into) + } + + async fn store_public_content( + &self, + doc_id: Uuid, + title: &str, + content: &str, + content_hash: &str, + ) -> PortResult<()> { + let out: anyhow::Result<()> = async { + sqlx::query( + r#"INSERT INTO public_document_contents (document_id, title, content, content_hash, updated_at) + VALUES ($1, $2, $3, $4, now()) + ON CONFLICT (document_id) DO UPDATE SET + title = EXCLUDED.title, + content = EXCLUDED.content, + content_hash = EXCLUDED.content_hash, + updated_at = now()"#, + ) + .bind(doc_id) + .bind(title) + .bind(content) + .bind(content_hash) + .execute(&self.pool) + .await?; + Ok(()) + } + .await; + out.map_err(Into::into) + } + + async fn get_public_content(&self, doc_id: Uuid) -> PortResult> { + let out: anyhow::Result> = async { + let row = sqlx::query( + r#"SELECT document_id, title, content, content_hash, updated_at + FROM public_document_contents + WHERE document_id = $1"#, + ) + .bind(doc_id) + .fetch_optional(&self.pool) + .await?; + Ok(row.map(|r| PublicContentRow { + document_id: r.get("document_id"), + title: r.get("title"), + content: r.get("content"), + content_hash: r.get("content_hash"), + updated_at: r.get("updated_at"), + })) + } + .await; + out.map_err(Into::into) + } + + async fn delete_public_content(&self, doc_id: Uuid) -> PortResult<()> { + let out: anyhow::Result<()> = async { + sqlx::query(r#"DELETE FROM public_document_contents WHERE document_id = $1"#) + .bind(doc_id) + .execute(&self.pool) + .await?; + Ok(()) + } + .await; + out.map_err(Into::into) + } + + // --- Public file methods --- + + async fn store_public_file(&self, input: StorePublicFileInput) -> PortResult { + let out: anyhow::Result = async { + let id: Uuid = sqlx::query_scalar( + r#"INSERT INTO public_document_files + (document_id, workspace_id, file_id, original_filename, logical_filename, mime_type, size, storage_path, content_hash) + VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9) + ON CONFLICT (document_id, file_id) DO UPDATE SET + original_filename = EXCLUDED.original_filename, + logical_filename = EXCLUDED.logical_filename, + mime_type = EXCLUDED.mime_type, + size = EXCLUDED.size, + storage_path = EXCLUDED.storage_path, + content_hash = EXCLUDED.content_hash, + updated_at = now() + RETURNING id"#, + ) + .bind(input.document_id) + .bind(input.workspace_id) + .bind(input.file_id) + .bind(&input.original_filename) + .bind(&input.logical_filename) + .bind(&input.mime_type) + .bind(input.size) + .bind(&input.storage_path) + .bind(&input.content_hash) + .fetch_one(&self.pool) + .await?; + Ok(id) + } + .await; + out.map_err(Into::into) + } + + async fn get_public_files(&self, doc_id: Uuid) -> PortResult> { + let out: anyhow::Result> = async { + let rows = sqlx::query( + r#"SELECT id, document_id, workspace_id, file_id, original_filename, logical_filename, + mime_type, size, storage_path, content_hash, created_at + FROM public_document_files + WHERE document_id = $1 + ORDER BY created_at"#, + ) + .bind(doc_id) + .fetch_all(&self.pool) + .await?; + Ok(rows + .into_iter() + .map(|r| PublicFileRow { + id: r.get("id"), + document_id: r.get("document_id"), + workspace_id: r.get("workspace_id"), + file_id: r.get("file_id"), + original_filename: r.get("original_filename"), + logical_filename: r.get("logical_filename"), + mime_type: r.get("mime_type"), + size: r.get("size"), + storage_path: r.get("storage_path"), + content_hash: r.get("content_hash"), + created_at: r.get("created_at"), + }) + .collect()) + } + .await; + out.map_err(Into::into) + } + + async fn get_public_file( + &self, + doc_id: Uuid, + file_id: Uuid, + ) -> PortResult> { + let out: anyhow::Result> = async { + let row = sqlx::query( + r#"SELECT id, document_id, workspace_id, file_id, original_filename, logical_filename, + mime_type, size, storage_path, content_hash, created_at + FROM public_document_files + WHERE document_id = $1 AND file_id = $2"#, + ) + .bind(doc_id) + .bind(file_id) + .fetch_optional(&self.pool) + .await?; + Ok(row.map(|r| PublicFileRow { + id: r.get("id"), + document_id: r.get("document_id"), + workspace_id: r.get("workspace_id"), + file_id: r.get("file_id"), + original_filename: r.get("original_filename"), + logical_filename: r.get("logical_filename"), + mime_type: r.get("mime_type"), + size: r.get("size"), + storage_path: r.get("storage_path"), + content_hash: r.get("content_hash"), + created_at: r.get("created_at"), + })) + } + .await; + out.map_err(Into::into) + } + + async fn get_public_file_by_logical_filename( + &self, + doc_id: Uuid, + logical_filename: &str, + ) -> PortResult> { + let out: anyhow::Result> = async { + let row = sqlx::query( + r#"SELECT id, document_id, workspace_id, file_id, original_filename, logical_filename, + mime_type, size, storage_path, content_hash, created_at + FROM public_document_files + WHERE document_id = $1 AND logical_filename = $2"#, + ) + .bind(doc_id) + .bind(logical_filename) + .fetch_optional(&self.pool) + .await?; + Ok(row.map(|r| PublicFileRow { + id: r.get("id"), + document_id: r.get("document_id"), + workspace_id: r.get("workspace_id"), + file_id: r.get("file_id"), + original_filename: r.get("original_filename"), + logical_filename: r.get("logical_filename"), + mime_type: r.get("mime_type"), + size: r.get("size"), + storage_path: r.get("storage_path"), + content_hash: r.get("content_hash"), + created_at: r.get("created_at"), + })) + } + .await; + out.map_err(Into::into) + } + + async fn delete_public_files(&self, doc_id: Uuid) -> PortResult { + let out: anyhow::Result = async { + let res = sqlx::query(r#"DELETE FROM public_document_files WHERE document_id = $1"#) + .bind(doc_id) + .execute(&self.pool) + .await?; + Ok(res.rows_affected() as usize) + } + .await; + out.map_err(Into::into) + } } diff --git a/api/crates/infrastructure/src/documents/db/repositories/share_keys_repository_sqlx/mod.rs b/api/crates/infrastructure/src/documents/db/repositories/share_keys_repository_sqlx/mod.rs new file mode 100644 index 00000000..0325c30a --- /dev/null +++ b/api/crates/infrastructure/src/documents/db/repositories/share_keys_repository_sqlx/mod.rs @@ -0,0 +1,165 @@ +use async_trait::async_trait; +use sqlx::Row; +use uuid::Uuid; + +use crate::core::db::PgPool; +use application::core::ports::errors::PortResult; +use application::documents::ports::share_keys_repository::{ShareEncryptedKeyRow, ShareKeysRepository}; +use domain::identity::keys::KdfParams; + +pub struct SqlxShareKeysRepository { + pool: PgPool, +} + +impl SqlxShareKeysRepository { + pub fn new(pool: PgPool) -> Self { + Self { pool } + } +} + +#[async_trait] +impl ShareKeysRepository for SqlxShareKeysRepository { + async fn get_encrypted_dek(&self, share_id: Uuid) -> PortResult> { + let out: anyhow::Result> = async { + let row = sqlx::query( + r#"SELECT share_id, encrypted_dek, salt, kdf_params, + creator_encrypted_share_key, creator_share_key_nonce, created_at + FROM share_encrypted_keys + WHERE share_id = $1"#, + ) + .bind(share_id) + .fetch_optional(&self.pool) + .await?; + + Ok(row.map(|row| { + let kdf_params_json: Option = row.get("kdf_params"); + ShareEncryptedKeyRow { + share_id: row.get("share_id"), + encrypted_dek: row.get("encrypted_dek"), + salt: row.get("salt"), + kdf_params: kdf_params_json.and_then(|v| serde_json::from_value(v).ok()), + creator_encrypted_share_key: row.get("creator_encrypted_share_key"), + creator_share_key_nonce: row.get("creator_share_key_nonce"), + created_at: row.get("created_at"), + } + })) + } + .await; + out.map_err(Into::into) + } + + async fn get_salt(&self, share_id: Uuid) -> PortResult>> { + let out: anyhow::Result>> = async { + let row = sqlx::query(r#"SELECT salt FROM share_encrypted_keys WHERE share_id = $1"#) + .bind(share_id) + .fetch_optional(&self.pool) + .await?; + + Ok(row.and_then(|r| r.get("salt"))) + } + .await; + out.map_err(Into::into) + } + + async fn store_encrypted_dek( + &self, + share_id: Uuid, + encrypted_dek: &[u8], + creator_encrypted_share_key: Option<&[u8]>, + creator_share_key_nonce: Option<&[u8]>, + ) -> PortResult { + let out: anyhow::Result = async { + let row = sqlx::query( + r#"INSERT INTO share_encrypted_keys (share_id, encrypted_dek, creator_encrypted_share_key, creator_share_key_nonce, created_at) + VALUES ($1, $2, $3, $4, now()) + ON CONFLICT (share_id) + DO UPDATE SET + encrypted_dek = EXCLUDED.encrypted_dek, + salt = NULL, + kdf_params = NULL, + creator_encrypted_share_key = EXCLUDED.creator_encrypted_share_key, + creator_share_key_nonce = EXCLUDED.creator_share_key_nonce + RETURNING share_id, encrypted_dek, salt, kdf_params, creator_encrypted_share_key, creator_share_key_nonce, created_at"#, + ) + .bind(share_id) + .bind(encrypted_dek) + .bind(creator_encrypted_share_key) + .bind(creator_share_key_nonce) + .fetch_one(&self.pool) + .await?; + + let kdf_params_json: Option = row.get("kdf_params"); + Ok(ShareEncryptedKeyRow { + share_id: row.get("share_id"), + encrypted_dek: row.get("encrypted_dek"), + salt: row.get("salt"), + kdf_params: kdf_params_json.and_then(|v| serde_json::from_value(v).ok()), + creator_encrypted_share_key: row.get("creator_encrypted_share_key"), + creator_share_key_nonce: row.get("creator_share_key_nonce"), + created_at: row.get("created_at"), + }) + } + .await; + out.map_err(Into::into) + } + + async fn store_password_protected_dek( + &self, + share_id: Uuid, + encrypted_dek: &[u8], + salt: &[u8], + kdf_params: &KdfParams, + creator_encrypted_share_key: Option<&[u8]>, + creator_share_key_nonce: Option<&[u8]>, + ) -> PortResult { + let out: anyhow::Result = async { + let kdf_params_json = serde_json::to_value(kdf_params)?; + let row = sqlx::query( + r#"INSERT INTO share_encrypted_keys (share_id, encrypted_dek, salt, kdf_params, creator_encrypted_share_key, creator_share_key_nonce, created_at) + VALUES ($1, $2, $3, $4, $5, $6, now()) + ON CONFLICT (share_id) + DO UPDATE SET + encrypted_dek = EXCLUDED.encrypted_dek, + salt = EXCLUDED.salt, + kdf_params = EXCLUDED.kdf_params, + creator_encrypted_share_key = EXCLUDED.creator_encrypted_share_key, + creator_share_key_nonce = EXCLUDED.creator_share_key_nonce + RETURNING share_id, encrypted_dek, salt, kdf_params, creator_encrypted_share_key, creator_share_key_nonce, created_at"#, + ) + .bind(share_id) + .bind(encrypted_dek) + .bind(salt) + .bind(&kdf_params_json) + .bind(creator_encrypted_share_key) + .bind(creator_share_key_nonce) + .fetch_one(&self.pool) + .await?; + + let kdf_params_json: Option = row.get("kdf_params"); + Ok(ShareEncryptedKeyRow { + share_id: row.get("share_id"), + encrypted_dek: row.get("encrypted_dek"), + salt: row.get("salt"), + kdf_params: kdf_params_json.and_then(|v| serde_json::from_value(v).ok()), + creator_encrypted_share_key: row.get("creator_encrypted_share_key"), + creator_share_key_nonce: row.get("creator_share_key_nonce"), + created_at: row.get("created_at"), + }) + } + .await; + out.map_err(Into::into) + } + + async fn delete_encrypted_dek(&self, share_id: Uuid) -> PortResult { + let out: anyhow::Result = async { + let result = sqlx::query(r#"DELETE FROM share_encrypted_keys WHERE share_id = $1"#) + .bind(share_id) + .execute(&self.pool) + .await?; + + Ok(result.rows_affected() > 0) + } + .await; + out.map_err(Into::into) + } +} diff --git a/api/crates/infrastructure/src/documents/db/repositories/shares_repository_sqlx/mod.rs b/api/crates/infrastructure/src/documents/db/repositories/shares_repository_sqlx/mod.rs index ecaa2b6d..44ba591a 100644 --- a/api/crates/infrastructure/src/documents/db/repositories/shares_repository_sqlx/mod.rs +++ b/api/crates/infrastructure/src/documents/db/repositories/shares_repository_sqlx/mod.rs @@ -146,8 +146,11 @@ impl SharesRepository for SqlxSharesRepository { let out: anyhow::Result> = async { let rows = sqlx::query( r#"SELECT s.id, s.token, s.permission, s.expires_at, s.parent_share_id, s.created_at, - d.id as document_id, d.title as document_title, d.type as document_type - FROM shares s JOIN documents d ON d.id = s.document_id + d.id as document_id, d.title as document_title, d.type as document_type, + sek.creator_encrypted_share_key, sek.creator_share_key_nonce + FROM shares s + JOIN documents d ON d.id = s.document_id + LEFT JOIN share_encrypted_keys sek ON sek.share_id = s.id WHERE s.document_id = $1 AND d.workspace_id = $2 ORDER BY s.created_at DESC"#, ) @@ -169,6 +172,8 @@ impl SharesRepository for SqlxSharesRepository { document_type: Self::parse_document_type(&document_type_raw)?, document_title: Title::new(r.get::("document_title")), created_at: r.get("created_at"), + creator_encrypted_share_key: r.try_get("creator_encrypted_share_key").ok().flatten(), + creator_share_key_nonce: r.try_get("creator_share_key_nonce").ok().flatten(), }); } Ok(out) @@ -387,9 +392,11 @@ impl SharesRepository for SqlxSharesRepository { let out: anyhow::Result> = async { let rows = sqlx::query( r#"SELECT s.id, s.token, s.permission, s.expires_at, s.created_at, s.parent_share_id, - d.id as document_id, d.title as document_title, d.type as document_type + d.id as document_id, d.title as document_title, d.type as document_type, + sek.creator_encrypted_share_key, sek.creator_share_key_nonce FROM shares s JOIN documents d ON d.id = s.document_id + LEFT JOIN share_encrypted_keys sek ON sek.share_id = s.id WHERE d.workspace_id = $1 AND (s.expires_at IS NULL OR s.expires_at > now()) ORDER BY s.created_at DESC"#, ) @@ -410,6 +417,8 @@ impl SharesRepository for SqlxSharesRepository { document_type: Self::parse_document_type(&document_type_raw)?, document_title: Title::new(r.get::("document_title")), created_at: r.get("created_at"), + creator_encrypted_share_key: r.try_get("creator_encrypted_share_key").ok().flatten(), + creator_share_key_nonce: r.try_get("creator_share_key_nonce").ok().flatten(), }); } Ok(out) @@ -487,6 +496,32 @@ impl SharesRepository for SqlxSharesRepository { out.map_err(Into::into) } + async fn list_child_share_info(&self, parent_share_id: Uuid) -> PortResult> { + let out: anyhow::Result> = async { + let rows = sqlx::query( + r#"SELECT s.id as share_id, s.document_id, s.token, sek.encrypted_dek + FROM shares s + LEFT JOIN share_encrypted_keys sek ON sek.share_id = s.id + WHERE s.parent_share_id = $1 AND (s.expires_at IS NULL OR s.expires_at > now())"# + ) + .bind(parent_share_id) + .fetch_all(&self.pool) + .await?; + let mut out = Vec::with_capacity(rows.len()); + for r in rows.into_iter() { + out.push(application::documents::ports::sharing::shares_repository::ChildShareInfo { + share_id: r.get("share_id"), + document_id: r.get("document_id"), + token: r.get("token"), + encrypted_dek: r.try_get::>, _>("encrypted_dek").ok().flatten(), + }); + } + Ok(out) + } + .await; + out.map_err(Into::into) + } + async fn materialize_folder_share( &self, workspace_id: Uuid, diff --git a/api/crates/infrastructure/src/documents/db/repositories/tag_repository_sqlx/mod.rs b/api/crates/infrastructure/src/documents/db/repositories/tag_repository_sqlx/mod.rs deleted file mode 100644 index 5cf7d889..00000000 --- a/api/crates/infrastructure/src/documents/db/repositories/tag_repository_sqlx/mod.rs +++ /dev/null @@ -1,66 +0,0 @@ -use async_trait::async_trait; -use sqlx::Row; -use uuid::Uuid; - -use crate::core::db::PgPool; -use application::core::ports::errors::PortResult; -use application::documents::ports::tagging::tag_repository::{TagRepository, TagSummary}; - -pub struct SqlxTagRepository { - pub pool: PgPool, -} - -impl SqlxTagRepository { - pub fn new(pool: PgPool) -> Self { - Self { pool } - } -} - -#[async_trait] -impl TagRepository for SqlxTagRepository { - async fn list_tags( - &self, - owner_id: Uuid, - filter: Option, - ) -> PortResult> { - let out: anyhow::Result> = async { - let rows = if let Some(f) = filter.filter(|s| !s.trim().is_empty()) { - let like = format!("%{}%", f); - sqlx::query( - r#"SELECT t.name, COUNT(*)::BIGINT AS count - FROM document_tags dt - JOIN tags t ON t.id = dt.tag_id - JOIN documents d ON d.id = dt.document_id AND d.owner_id = $1 - WHERE t.name ILIKE $2 - GROUP BY t.name - ORDER BY count DESC, t.name ASC"#, - ) - .bind(owner_id) - .bind(like) - .fetch_all(&self.pool) - .await? - } else { - sqlx::query( - r#"SELECT t.name, COUNT(*)::BIGINT AS count - FROM document_tags dt - JOIN tags t ON t.id = dt.tag_id - JOIN documents d ON d.id = dt.document_id AND d.owner_id = $1 - GROUP BY t.name - ORDER BY count DESC, t.name ASC"#, - ) - .bind(owner_id) - .fetch_all(&self.pool) - .await? - }; - Ok(rows - .into_iter() - .map(|r| TagSummary { - name: r.get("name"), - count: r.get("count"), - }) - .collect()) - } - .await; - out.map_err(Into::into) - } -} diff --git a/api/crates/infrastructure/src/documents/db/repositories/tagging_repository_sqlx/mod.rs b/api/crates/infrastructure/src/documents/db/repositories/tagging_repository_sqlx/mod.rs deleted file mode 100644 index a5acbc53..00000000 --- a/api/crates/infrastructure/src/documents/db/repositories/tagging_repository_sqlx/mod.rs +++ /dev/null @@ -1,72 +0,0 @@ -use async_trait::async_trait; -use sqlx::Row; -use uuid::Uuid; - -use crate::core::db::PgPool; -use application::core::ports::errors::PortResult; -use application::documents::ports::tagging::tagging_repository::TaggingRepository; - -pub struct SqlxTaggingRepository { - pub pool: PgPool, -} - -impl SqlxTaggingRepository { - pub fn new(pool: PgPool) -> Self { - Self { pool } - } -} - -#[async_trait] -impl TaggingRepository for SqlxTaggingRepository { - async fn clear_document_tags(&self, doc_id: Uuid) -> PortResult<()> { - let out: anyhow::Result<()> = async { - sqlx::query("DELETE FROM document_tags WHERE document_id = $1") - .bind(doc_id) - .execute(&self.pool) - .await?; - Ok(()) - } - .await; - out.map_err(Into::into) - } - - async fn upsert_tag_return_id(&self, name: &str) -> PortResult { - let out: anyhow::Result = async { - let row = sqlx::query("INSERT INTO tags(name) VALUES ($1) ON CONFLICT (name) DO UPDATE SET name = EXCLUDED.name RETURNING id") - .bind(name) - .fetch_one(&self.pool) - .await?; - Ok(row.get("id")) - } - .await; - out.map_err(Into::into) - } - - async fn owner_doc_exists(&self, doc_id: Uuid, owner_id: Uuid) -> PortResult { - let out: anyhow::Result = async { - let n = sqlx::query_scalar::<_, i64>( - "SELECT COUNT(1) FROM documents WHERE id = $1 AND owner_id = $2", - ) - .bind(doc_id) - .bind(owner_id) - .fetch_one(&self.pool) - .await?; - Ok(n > 0) - } - .await; - out.map_err(Into::into) - } - - async fn associate_document_tag(&self, doc_id: Uuid, tag_id: i64) -> PortResult<()> { - let out: anyhow::Result<()> = async { - sqlx::query("INSERT INTO document_tags(document_id, tag_id) VALUES ($1, $2)") - .bind(doc_id) - .bind(tag_id) - .execute(&self.pool) - .await?; - Ok(()) - } - .await; - out.map_err(Into::into) - } -} diff --git a/api/crates/infrastructure/src/documents/exporter.rs b/api/crates/infrastructure/src/documents/exporter.rs deleted file mode 100644 index bca7b09f..00000000 --- a/api/crates/infrastructure/src/documents/exporter.rs +++ /dev/null @@ -1,634 +0,0 @@ -use std::io::Write; -use std::path::{Path, PathBuf}; -use std::sync::Mutex; - -use anyhow::Context; -use async_trait::async_trait; -use once_cell::sync::Lazy; -use pandoc::{self, InputFormat, InputKind, OutputFormat, OutputKind, PandocOption, PandocOutput}; -use tempfile::tempdir; -use tokio::fs; -use tokio::task; -use zip::write::FileOptions; -use zip::{self, CompressionMethod}; - -use application::core::ports::errors::PortResult; -use application::documents::dtos::{DocumentDownload, DocumentDownloadFormat}; -use application::documents::ports::document_exporter::{ - DocumentExportAssets, DocumentExportAttachment, DocumentExporter, -}; - -const DEFAULT_PDF_CSS: &str = r#" -body { - font-family: 'Noto Sans CJK JP', 'Noto Sans CJK SC', 'Noto Sans CJK TC', 'Noto Sans CJK KR', - 'Noto Sans JP', 'Noto Sans', 'Noto Serif CJK JP', 'Noto Serif CJK SC', - 'Noto Serif CJK TC', 'Noto Serif CJK KR', 'Source Han Sans JP', 'Source Han Sans SC', - 'Source Han Sans TC', 'Source Han Sans KR', 'Hiragino Kaku Gothic ProN', 'Yu Gothic', - 'PingFang SC', 'Microsoft YaHei', 'Microsoft JhengHei', 'Malgun Gothic', sans-serif; -} - -code, -pre { - font-family: 'Noto Sans Mono CJK JP', 'Noto Sans Mono', 'Source Code Pro', 'Roboto Mono', - 'Menlo', 'Consolas', 'monospace'; -} -"#; - -static PANDOC_WORKDIR_LOCK: Lazy> = Lazy::new(|| Mutex::new(())); - -#[derive(Default)] -pub struct DefaultDocumentExporter; - -impl DefaultDocumentExporter { - pub fn new() -> Self { - Self - } -} - -#[async_trait] -impl DocumentExporter for DefaultDocumentExporter { - async fn export( - &self, - assets: DocumentExportAssets, - format: DocumentDownloadFormat, - ) -> PortResult { - let out: anyhow::Result = async { - let bytes = match format { - DocumentDownloadFormat::Archive => build_archive(&assets)?, - DocumentDownloadFormat::Markdown => assets.markdown.clone(), - _ if needs_pandoc(&format) => render_with_pandoc(format, &assets).await?, - _ => unreachable!("unsupported format"), - }; - - Ok(DocumentDownload { - filename: format.file_name(&assets.safe_title), - content_type: format.content_type().to_string(), - bytes, - }) - } - .await; - out.map_err(Into::into) - } -} - -fn needs_pandoc(format: &DocumentDownloadFormat) -> bool { - !matches!( - format, - DocumentDownloadFormat::Archive | DocumentDownloadFormat::Markdown - ) -} - -fn build_archive(assets: &DocumentExportAssets) -> anyhow::Result> { - let markdown_entry = format!("{}/{}.md", assets.safe_title, assets.safe_title); - let mut cursor = std::io::Cursor::new(Vec::new()); - { - let mut zip = zip::ZipWriter::new(&mut cursor); - let options = FileOptions::default() - .compression_method(CompressionMethod::Deflated) - .unix_permissions(0o644); - zip.start_file(markdown_entry, options)?; - zip.write_all(&assets.markdown)?; - for attachment in &assets.attachments { - let entry = format!( - "{}/{}", - assets.safe_title, - attachment_trimmed_path(attachment) - ); - zip.start_file(entry, options)?; - zip.write_all(&attachment.bytes)?; - } - zip.finish()?; - } - Ok(cursor.into_inner()) -} - -async fn render_with_pandoc( - format: DocumentDownloadFormat, - assets: &DocumentExportAssets, -) -> anyhow::Result> { - let tmp_dir = tempdir().context("unable to create temporary directory for pandoc")?; - let markdown_source = markdown_string(assets)?; - let display_title = assets.display_title.clone(); - - for attachment in &assets.attachments { - materialize_attachment_under(attachment, tmp_dir.path()).await?; - } - - let resource_dir = tmp_dir.path().to_path_buf(); - let config = PandocCommandConfig::for_format(format) - .ok_or_else(|| anyhow::anyhow!("unsupported pandoc format {:?}", format))?; - let format_copy = format; - let output_bytes = task::spawn_blocking(move || -> anyhow::Result> { - let mut pandoc_cmd = pandoc::new(); - pandoc_cmd.set_input(InputKind::Pipe(markdown_source)); - pandoc_cmd.set_input_format(InputFormat::Markdown, Vec::new()); - pandoc_cmd.add_option(PandocOption::ResourcePath(vec![resource_dir.clone()])); - pandoc_cmd.add_option(PandocOption::Meta("title".to_string(), Some(String::new()))); - if let Some(title) = display_title.as_deref() { - if !title.is_empty() { - pandoc_cmd.add_option(PandocOption::Meta( - "pagetitle".to_string(), - Some(title.to_string()), - )); - } - } - - pandoc_cmd.set_output_format(config.output_format, Vec::new()); - match config.destination { - PandocOutputKind::Pipe => { - pandoc_cmd.set_output(OutputKind::Pipe); - } - PandocOutputKind::File(file_name) => { - let target = tmp_dir.path().join(file_name); - pandoc_cmd.set_output(OutputKind::File(target)); - } - } - if config.standalone { - pandoc_cmd.add_option(PandocOption::Standalone); - } - if config.self_contained { - pandoc_cmd.add_option(PandocOption::SelfContained); - } - if config.include_default_css { - let css_path = resource_dir.join("refmd-defaults.css"); - std::fs::write(&css_path, DEFAULT_PDF_CSS).with_context(|| { - format!("failed to write temporary CSS file {}", css_path.display()) - })?; - pandoc_cmd.add_option(PandocOption::Css(css_path.to_string_lossy().to_string())); - } - let mut pdf_engine_opts: Vec = config - .pdf_engine_opts - .iter() - .map(|opt| opt.to_string()) - .collect(); - if config.pdf_engine.is_some() { - pdf_engine_opts.push("--allow".to_string()); - pdf_engine_opts.push(resource_dir.display().to_string()); - } - if let Some(engine) = config.pdf_engine { - pandoc_cmd.add_option(PandocOption::PdfEngine(PathBuf::from(engine))); - } - for opt in pdf_engine_opts { - pandoc_cmd.add_option(PandocOption::PdfEngineOpt(opt)); - } - - let _lock = PANDOC_WORKDIR_LOCK.lock().unwrap(); - let _cwd_guard = WorkingDirGuard::change_to(&resource_dir)?; - - let output = pandoc_cmd.execute().map_err(|err| match err { - pandoc::PandocError::PandocNotFound => anyhow::anyhow!( - "pandoc executable not found in PATH; install pandoc to enable {} export", - format_copy.extension() - ), - pandoc::PandocError::IoErr(io_err) => anyhow::Error::new(io_err), - pandoc::PandocError::Err(output) => { - let stderr = String::from_utf8_lossy(&output.stderr); - anyhow::anyhow!( - "pandoc failed (status {}): {}", - output.status, - stderr.trim() - ) - } - other => anyhow::Error::new(other), - })?; - let bytes = match output { - PandocOutput::ToBuffer(text) => text.into_bytes(), - PandocOutput::ToBufferRaw(raw) => raw, - PandocOutput::ToFile(path) => std::fs::read(&path).map_err(anyhow::Error::new)?, - }; - Ok(bytes) - }) - .await? - .with_context(|| format!("pandoc conversion failed for format {:?}", format))?; - - Ok(output_bytes) -} - -fn markdown_string(assets: &DocumentExportAssets) -> anyhow::Result { - String::from_utf8(assets.markdown.clone()) - .map_err(|_| anyhow::anyhow!("document markdown is not valid UTF-8")) -} - -async fn materialize_attachment_under( - attachment: &DocumentExportAttachment, - root: &Path, -) -> anyhow::Result<()> { - let clean_path = Path::new(&attachment.relative_path); - if clean_path.as_os_str().is_empty() { - return Ok(()); - } - let target = root.join(clean_path); - if let Some(parent) = target.parent() { - fs::create_dir_all(parent) - .await - .with_context(|| format!("failed to prepare {}", parent.display()))?; - } - fs::write(&target, &attachment.bytes) - .await - .with_context(|| format!("failed to write attachment {}", attachment.relative_path))?; - Ok(()) -} - -fn attachment_trimmed_path(attachment: &DocumentExportAttachment) -> &str { - attachment.relative_path.trim_start_matches('/') -} - -struct WorkingDirGuard { - original: Option, -} - -impl WorkingDirGuard { - fn change_to(target: &Path) -> anyhow::Result { - let original = - std::env::current_dir().context("unable to read current working directory")?; - std::env::set_current_dir(target).with_context(|| { - format!("failed to change working directory to {}", target.display()) - })?; - Ok(Self { - original: Some(original), - }) - } -} - -impl Drop for WorkingDirGuard { - fn drop(&mut self) { - if let Some(original) = self.original.take() { - if let Err(error) = std::env::set_current_dir(&original) { - tracing::error!( - "failed to restore working directory to {}: {}", - original.display(), - error - ); - } - } - } -} - -#[derive(Clone)] -struct PandocCommandConfig { - output_format: OutputFormat, - destination: PandocOutputKind, - standalone: bool, - self_contained: bool, - pdf_engine: Option<&'static str>, - pdf_engine_opts: &'static [&'static str], - include_default_css: bool, -} - -impl PandocCommandConfig { - fn for_format(format: DocumentDownloadFormat) -> Option { - use DocumentDownloadFormat::*; - let config = match format { - Archive | Markdown => return None, - Html => Self { - output_format: OutputFormat::Html, - destination: PandocOutputKind::Pipe, - standalone: true, - self_contained: true, - include_default_css: false, - pdf_engine: None, - pdf_engine_opts: &[], - }, - Html5 => Self { - output_format: OutputFormat::Html5, - destination: PandocOutputKind::Pipe, - standalone: true, - self_contained: true, - include_default_css: false, - pdf_engine: None, - pdf_engine_opts: &[], - }, - Pdf => Self { - output_format: OutputFormat::Pdf, - destination: PandocOutputKind::Pipe, - standalone: true, - self_contained: true, - include_default_css: true, - pdf_engine: Some("wkhtmltopdf"), - pdf_engine_opts: &["--enable-local-file-access"], - }, - Docx => Self { - output_format: OutputFormat::Docx, - destination: PandocOutputKind::File("document.docx"), - standalone: false, - self_contained: false, - include_default_css: false, - pdf_engine: None, - pdf_engine_opts: &[], - }, - Latex => Self { - output_format: OutputFormat::Latex, - destination: PandocOutputKind::Pipe, - standalone: true, - self_contained: false, - include_default_css: false, - pdf_engine: None, - pdf_engine_opts: &[], - }, - Beamer => Self { - output_format: OutputFormat::Beamer, - destination: PandocOutputKind::Pipe, - standalone: true, - self_contained: false, - include_default_css: false, - pdf_engine: None, - pdf_engine_opts: &[], - }, - Context => Self { - output_format: OutputFormat::Context, - destination: PandocOutputKind::Pipe, - standalone: true, - self_contained: false, - include_default_css: false, - pdf_engine: None, - pdf_engine_opts: &[], - }, - Man => Self { - output_format: OutputFormat::Man, - destination: PandocOutputKind::Pipe, - standalone: true, - self_contained: false, - include_default_css: false, - pdf_engine: None, - pdf_engine_opts: &[], - }, - MediaWiki => Self { - output_format: OutputFormat::MediaWiki, - destination: PandocOutputKind::Pipe, - standalone: false, - self_contained: false, - include_default_css: false, - pdf_engine: None, - pdf_engine_opts: &[], - }, - Dokuwiki => Self { - output_format: OutputFormat::Dokuwiki, - destination: PandocOutputKind::Pipe, - standalone: false, - self_contained: false, - include_default_css: false, - pdf_engine: None, - pdf_engine_opts: &[], - }, - Textile => Self { - output_format: OutputFormat::Textile, - destination: PandocOutputKind::Pipe, - standalone: false, - self_contained: false, - include_default_css: false, - pdf_engine: None, - pdf_engine_opts: &[], - }, - Org => Self { - output_format: OutputFormat::Org, - destination: PandocOutputKind::Pipe, - standalone: false, - self_contained: false, - include_default_css: false, - pdf_engine: None, - pdf_engine_opts: &[], - }, - Texinfo => Self { - output_format: OutputFormat::Texinfo, - destination: PandocOutputKind::Pipe, - standalone: true, - self_contained: false, - include_default_css: false, - pdf_engine: None, - pdf_engine_opts: &[], - }, - Opml => Self { - output_format: OutputFormat::Opml, - destination: PandocOutputKind::Pipe, - standalone: false, - self_contained: false, - include_default_css: false, - pdf_engine: None, - pdf_engine_opts: &[], - }, - Docbook => Self { - output_format: OutputFormat::Docbook, - destination: PandocOutputKind::Pipe, - standalone: true, - self_contained: false, - include_default_css: false, - pdf_engine: None, - pdf_engine_opts: &[], - }, - OpenDocument => Self { - output_format: OutputFormat::OpenDocument, - destination: PandocOutputKind::File("document.odt"), - standalone: true, - self_contained: false, - include_default_css: false, - pdf_engine: None, - pdf_engine_opts: &[], - }, - Odt => Self { - output_format: OutputFormat::Odt, - destination: PandocOutputKind::File("document.odt"), - standalone: true, - self_contained: false, - include_default_css: false, - pdf_engine: None, - pdf_engine_opts: &[], - }, - Rtf => Self { - output_format: OutputFormat::Rtf, - destination: PandocOutputKind::Pipe, - standalone: true, - self_contained: false, - include_default_css: false, - pdf_engine: None, - pdf_engine_opts: &[], - }, - Epub => Self { - output_format: OutputFormat::Epub, - destination: PandocOutputKind::File("document.epub"), - standalone: true, - self_contained: true, - include_default_css: false, - pdf_engine: None, - pdf_engine_opts: &[], - }, - Epub3 => Self { - output_format: OutputFormat::Epub3, - destination: PandocOutputKind::File("document.epub"), - standalone: true, - self_contained: true, - include_default_css: false, - pdf_engine: None, - pdf_engine_opts: &[], - }, - Fb2 => Self { - output_format: OutputFormat::Fb2, - destination: PandocOutputKind::Pipe, - standalone: true, - self_contained: false, - include_default_css: false, - pdf_engine: None, - pdf_engine_opts: &[], - }, - Asciidoc => Self { - output_format: OutputFormat::Asciidoc, - destination: PandocOutputKind::Pipe, - standalone: false, - self_contained: false, - include_default_css: false, - pdf_engine: None, - pdf_engine_opts: &[], - }, - Icml => Self { - output_format: OutputFormat::Icml, - destination: PandocOutputKind::File("document.icml"), - standalone: true, - self_contained: false, - include_default_css: false, - pdf_engine: None, - pdf_engine_opts: &[], - }, - Slidy => Self { - output_format: OutputFormat::Slidy, - destination: PandocOutputKind::Pipe, - standalone: true, - self_contained: true, - include_default_css: false, - pdf_engine: None, - pdf_engine_opts: &[], - }, - Slideous => Self { - output_format: OutputFormat::Slideous, - destination: PandocOutputKind::Pipe, - standalone: true, - self_contained: true, - include_default_css: false, - pdf_engine: None, - pdf_engine_opts: &[], - }, - Dzslides => Self { - output_format: OutputFormat::Dzslides, - destination: PandocOutputKind::Pipe, - standalone: true, - self_contained: true, - include_default_css: false, - pdf_engine: None, - pdf_engine_opts: &[], - }, - Revealjs => Self { - output_format: OutputFormat::Revealjs, - destination: PandocOutputKind::Pipe, - standalone: true, - self_contained: true, - include_default_css: false, - pdf_engine: None, - pdf_engine_opts: &[], - }, - S5 => Self { - output_format: OutputFormat::S5, - destination: PandocOutputKind::Pipe, - standalone: true, - self_contained: true, - include_default_css: false, - pdf_engine: None, - pdf_engine_opts: &[], - }, - Json => Self { - output_format: OutputFormat::Json, - destination: PandocOutputKind::Pipe, - standalone: true, - self_contained: false, - include_default_css: false, - pdf_engine: None, - pdf_engine_opts: &[], - }, - Plain => Self { - output_format: OutputFormat::Plain, - destination: PandocOutputKind::Pipe, - standalone: true, - self_contained: false, - include_default_css: false, - pdf_engine: None, - pdf_engine_opts: &[], - }, - Commonmark => Self { - output_format: OutputFormat::Commonmark, - destination: PandocOutputKind::Pipe, - standalone: true, - self_contained: false, - include_default_css: false, - pdf_engine: None, - pdf_engine_opts: &[], - }, - CommonmarkX => Self { - output_format: OutputFormat::CommonmarkX, - destination: PandocOutputKind::Pipe, - standalone: true, - self_contained: false, - include_default_css: false, - pdf_engine: None, - pdf_engine_opts: &[], - }, - MarkdownStrict => Self { - output_format: OutputFormat::MarkdownStrict, - destination: PandocOutputKind::Pipe, - standalone: true, - self_contained: false, - include_default_css: false, - pdf_engine: None, - pdf_engine_opts: &[], - }, - MarkdownPhpextra => Self { - output_format: OutputFormat::MarkdownPhpextra, - destination: PandocOutputKind::Pipe, - standalone: true, - self_contained: false, - include_default_css: false, - pdf_engine: None, - pdf_engine_opts: &[], - }, - MarkdownGithub => Self { - output_format: OutputFormat::MarkdownGithub, - destination: PandocOutputKind::Pipe, - standalone: true, - self_contained: false, - include_default_css: false, - pdf_engine: None, - pdf_engine_opts: &[], - }, - Rst => Self { - output_format: OutputFormat::Rst, - destination: PandocOutputKind::Pipe, - standalone: true, - self_contained: false, - include_default_css: false, - pdf_engine: None, - pdf_engine_opts: &[], - }, - Native => Self { - output_format: OutputFormat::Native, - destination: PandocOutputKind::Pipe, - standalone: false, - self_contained: false, - include_default_css: false, - pdf_engine: None, - pdf_engine_opts: &[], - }, - Haddock => Self { - output_format: OutputFormat::Haddock, - destination: PandocOutputKind::Pipe, - standalone: false, - self_contained: false, - include_default_css: false, - pdf_engine: None, - pdf_engine_opts: &[], - }, - }; - Some(config) - } -} - -#[derive(Clone, Copy)] -enum PandocOutputKind { - Pipe, - File(&'static str), -} diff --git a/api/crates/infrastructure/src/documents/mod.rs b/api/crates/infrastructure/src/documents/mod.rs index 1d3f4177..aae8141f 100644 --- a/api/crates/infrastructure/src/documents/mod.rs +++ b/api/crates/infrastructure/src/documents/mod.rs @@ -1,7 +1,6 @@ pub mod db; pub mod doc_event_log; pub mod event_poller; -pub mod exporter; pub mod git_dirty_subscriber; pub mod realtime; pub mod tx_runner_sqlx; diff --git a/api/crates/infrastructure/src/documents/realtime/doc_persistence.rs b/api/crates/infrastructure/src/documents/realtime/doc_persistence.rs index 1e3826ce..d3ecb9ef 100644 --- a/api/crates/infrastructure/src/documents/realtime/doc_persistence.rs +++ b/api/crates/infrastructure/src/documents/realtime/doc_persistence.rs @@ -5,7 +5,8 @@ use uuid::Uuid; use crate::core::db::PgPool; use application::core::ports::errors::PortResult; use application::documents::ports::realtime::realtime_persistence_port::{ - DocPersistencePort, DocumentMissingError, SnapshotEntry, + ContentEncryptionMeta, DocPersistencePort, DocumentMissingError, EncryptedUpdateData, + EncryptedUpdateEntry, SnapshotEntry, }; #[derive(Clone)] @@ -42,6 +43,31 @@ impl DocPersistencePort for SqlxDocPersistenceAdapter { out.map_err(Into::into) } + async fn append_encrypted_update_with_seq( + &self, + doc_id: &Uuid, + seq: i64, + update: &EncryptedUpdateData, + ) -> PortResult<()> { + let out: anyhow::Result<()> = async { + sqlx::query( + r#"INSERT INTO document_updates (document_id, seq, update, nonce, signature, public_key) + VALUES ($1, $2, $3, $4, $5, $6)"#, + ) + .bind(doc_id) + .bind(seq) + .bind(&update.data) + .bind(update.nonce.as_deref()) + .bind(update.signature.as_deref()) + .bind(update.public_key.as_deref()) + .execute(&self.pool) + .await?; + Ok(()) + } + .await; + out.map_err(Into::into) + } + async fn latest_update_seq(&self, doc_id: &Uuid) -> PortResult> { let out: anyhow::Result> = async { let row = sqlx::query( @@ -61,15 +87,22 @@ impl DocPersistencePort for SqlxDocPersistenceAdapter { doc_id: &Uuid, version: i64, snapshot: &[u8], + encryption_meta: Option<&ContentEncryptionMeta>, ) -> PortResult<()> { let out: anyhow::Result<()> = async { + let (nonce, signature, seq_at_snapshot) = encryption_meta + .map(|m| (m.nonce.as_deref(), m.signature.as_deref(), m.seq_at_snapshot)) + .unwrap_or((None, None, None)); let result = sqlx::query( - "INSERT INTO document_snapshots (document_id, version, snapshot) VALUES ($1, $2, $3) - ON CONFLICT (document_id, version) DO UPDATE SET snapshot = EXCLUDED.snapshot", + "INSERT INTO document_snapshots (document_id, version, snapshot, nonce, signature, seq_at_snapshot) VALUES ($1, $2, $3, $4, $5, $6) + ON CONFLICT (document_id, version) DO UPDATE SET snapshot = EXCLUDED.snapshot, nonce = EXCLUDED.nonce, signature = EXCLUDED.signature, seq_at_snapshot = EXCLUDED.seq_at_snapshot", ) .bind(doc_id) .bind(version as i32) .bind(snapshot) + .bind(nonce) + .bind(signature) + .bind(seq_at_snapshot) .execute(&self.pool) .await; @@ -96,7 +129,7 @@ impl DocPersistencePort for SqlxDocPersistenceAdapter { async fn latest_snapshot_entry(&self, doc_id: &Uuid) -> PortResult> { let out: anyhow::Result> = async { let row = sqlx::query( - "SELECT version, snapshot FROM document_snapshots WHERE document_id = $1 + "SELECT version, snapshot, nonce, signature, seq_at_snapshot FROM document_snapshots WHERE document_id = $1 ORDER BY version DESC LIMIT 1", ) .bind(doc_id) @@ -105,6 +138,9 @@ impl DocPersistencePort for SqlxDocPersistenceAdapter { Ok(row.map(|row| SnapshotEntry { version: row.get::("version") as i64, bytes: row.get("snapshot"), + nonce: row.try_get("nonce").ok(), + signature: row.try_get("signature").ok(), + seq_at_snapshot: row.try_get("seq_at_snapshot").ok().flatten(), })) } .await; @@ -163,4 +199,36 @@ impl DocPersistencePort for SqlxDocPersistenceAdapter { .await; out.map_err(Into::into) } + + async fn get_updates_since( + &self, + doc_id: &Uuid, + since_seq: i64, + ) -> PortResult> { + let out: anyhow::Result> = async { + let rows = sqlx::query( + r#"SELECT seq, update, nonce, signature, public_key + FROM document_updates + WHERE document_id = $1 AND seq > $2 + ORDER BY seq ASC"#, + ) + .bind(doc_id) + .bind(since_seq) + .fetch_all(&self.pool) + .await?; + + Ok(rows + .into_iter() + .map(|row| EncryptedUpdateEntry { + seq: row.get("seq"), + data: row.get("update"), + nonce: row.try_get("nonce").ok().flatten(), + signature: row.try_get("signature").ok().flatten(), + public_key: row.try_get("public_key").ok().flatten(), + }) + .collect()) + } + .await; + out.map_err(Into::into) + } } diff --git a/api/crates/infrastructure/src/documents/realtime/hub.rs b/api/crates/infrastructure/src/documents/realtime/hub.rs index 68723993..0e0d7e34 100644 --- a/api/crates/infrastructure/src/documents/realtime/hub.rs +++ b/api/crates/infrastructure/src/documents/realtime/hub.rs @@ -1,333 +1,148 @@ -use std::collections::{HashMap, HashSet}; +use std::collections::HashMap; use std::sync::Arc; -use std::sync::Mutex as StdMutex; use std::sync::atomic::{AtomicBool, Ordering}; -use anyhow::Context; -use chrono::Utc; -use futures_util::SinkExt; -use tokio::sync::mpsc; +use base64::Engine; +use futures_util::{SinkExt, StreamExt}; use tokio::sync::{Mutex, RwLock}; -use tokio::time::{Duration, Instant, sleep}; +use tokio::time::Duration; use uuid::Uuid; -use yrs::GetString; -use yrs::block::ClientID; -use yrs::encoding::write::Write as YWrite; -use yrs::sync::awareness::AwarenessUpdate; -use yrs::sync::protocol::{MSG_SYNC, MSG_SYNC_UPDATE}; -use yrs::sync::{DefaultProtocol, Error as SyncError, Protocol}; -use yrs::updates::decoder::Decode; -use yrs::updates::encoder::{Encoder, EncoderV1}; -use yrs::{Doc, ReadTxn, StateVector, Text, Transact, Update}; -use yrs_warp::AwarenessRef; -use yrs_warp::broadcast::BroadcastGroup; - -use crate::documents::realtime::utils::wrap_stream_with_edit_guard; + +use crate::core::crypto::Ed25519Verifier; use crate::documents::realtime::{DynRealtimeSink, DynRealtimeStream}; use application::documents::ports::realtime::realtime_persistence_port::{ - DocPersistencePort, DocumentMissingError, -}; -use application::documents::services::realtime::doc_hydration::{ - DocHydrationService, HydrationOptions, + ContentEncryptionMeta, DocPersistencePort, EncryptedUpdateData, }; -use application::documents::services::realtime::snapshot::{ - SnapshotArchiveKind, SnapshotArchiveOptions, SnapshotPersistOptions, SnapshotService, +use application::documents::ports::realtime::realtime_types::{ + MessageType, RealtimeMessage, }; +use application::documents::services::realtime::snapshot::SnapshotService; type SharedRealtimeSink = Arc>; +/// E2EE Document Room - simple relay without CRDT merge +/// Server only relays encrypted messages and verifies signatures #[derive(Clone)] -pub struct DocumentRoom { - pub doc: Doc, - pub awareness: AwarenessRef, - pub broadcast: Arc, - #[allow(dead_code)] - persist_sub: yrs::Subscription, - pub seq: Arc>, // latest persisted seq +pub struct E2EEDocumentRoom { + /// Connected clients for broadcasting + clients: Arc>>, + /// Latest persisted sequence number + pub seq: Arc>, + /// Flag to skip filesystem persistence (e.g., after ingest) pub skip_fs_persist: Arc, } +impl E2EEDocumentRoom { + pub fn new(start_seq: i64) -> Self { + Self { + clients: Arc::new(RwLock::new(Vec::new())), + seq: Arc::new(Mutex::new(start_seq)), + skip_fs_persist: Arc::new(AtomicBool::new(false)), + } + } + + /// Add a client to the room + pub async fn add_client(&self, sink: SharedRealtimeSink) { + self.clients.write().await.push(sink); + } + + /// Remove a client from the room + pub async fn remove_client(&self, sink: &SharedRealtimeSink) { + let mut clients = self.clients.write().await; + clients.retain(|c| !Arc::ptr_eq(c, sink)); + } + + /// Broadcast message to all clients except the sender + pub async fn broadcast_except(&self, message: &[u8], sender: &SharedRealtimeSink) { + let clients = self.clients.read().await; + for client in clients.iter() { + if Arc::ptr_eq(client, sender) { + continue; + } + let mut guard = client.lock().await; + if let Err(e) = guard.send(message.to_vec()).await { + tracing::debug!(error = %e, "e2ee_broadcast_send_failed"); + } + } + } + + /// Get current client count + pub async fn client_count(&self) -> usize { + self.clients.read().await.len() + } +} + +/// E2EE Hub - manages document rooms with encrypted relay #[derive(Clone)] pub struct Hub { - inner: Arc>>>, - hydration_service: Arc, + /// Document rooms by document ID + inner: Arc>>>, + /// Snapshot service for persistence snapshot_service: Arc, + /// Document persistence port persistence: Arc, - save_flags: Arc>>, - auto_archive_interval: Duration, - last_auto_archive: Arc>>, + /// Edit flags per document edit_flags: Arc>>>, + /// Auto archive interval (0 = disabled) + #[allow(dead_code)] + auto_archive_interval: Duration, } impl Hub { + /// Create a new Hub + /// + /// Note: hydration_service parameter is kept for API compatibility but not used + /// (clients handle hydration with their own keys) pub fn new( - hydration_service: Arc, + _hydration_service: Arc, snapshot_service: Arc, persistence: Arc, auto_archive_interval: Duration, ) -> Self { Self { inner: Arc::new(RwLock::new(HashMap::new())), - hydration_service, snapshot_service, persistence, - save_flags: Arc::new(Mutex::new(HashMap::new())), - auto_archive_interval, - last_auto_archive: Arc::new(Mutex::new(HashMap::new())), edit_flags: Arc::new(RwLock::new(HashMap::new())), + auto_archive_interval, } } - pub async fn get_or_create(&self, doc_id: &str) -> anyhow::Result> { + + /// Get or create a document room + /// + /// The room is a simple relay structure without Yjs Doc. + /// The server doesn't process document content, only relays encrypted messages. + pub async fn get_or_create(&self, doc_id: &str) -> anyhow::Result> { + // Return existing room if available if let Some(r) = self.inner.read().await.get(doc_id).cloned() { return Ok(r); } - // Create Doc; hydration will run asynchronously after room is registered to avoid blocking WS - let doc = Doc::new(); let doc_uuid = Uuid::parse_str(doc_id)?; - let awareness: AwarenessRef = Arc::new(yrs::sync::Awareness::new(doc.clone())); - let bcast = Arc::new(BroadcastGroup::new(awareness.clone(), 64).await); - - let save_flags = self.save_flags.clone(); - let skip_fs_persist_flag = Arc::new(AtomicBool::new(false)); + // Get the latest sequence number from persistence let start_seq = self .persistence .latest_update_seq(&doc_uuid) .await? .unwrap_or(0); - let seq = Arc::new(Mutex::new(start_seq)); - // Persist updates through a channel. We'll await send in a spawned task to avoid dropping updates. - let (tx, mut rx) = mpsc::channel::>(512); - let persistence = self.persistence.clone(); - let snapshot_service = self.snapshot_service.clone(); - let last_auto_archive = self.last_auto_archive.clone(); - let auto_archive_interval = self.auto_archive_interval; - let persist_doc = doc_uuid; - let persist_seq = seq.clone(); - let doc_for_snap = doc.clone(); - tokio::spawn(async move { - while let Some(bytes) = rx.recv().await { - let mut guard = persist_seq.lock().await; - *guard += 1; - let s = *guard; - if let Err(e) = persistence - .append_update_with_seq(&persist_doc, s, &bytes) - .await - { - tracing::error!( - document_id = %persist_doc, - seq = s, - error = ?e, - "persist_document_update_failed" - ); - } - if s % 100 == 0 && !auto_archive_interval.is_zero() { - let should_archive = { - let mut guard = last_auto_archive.lock().await; - let now = Instant::now(); - match guard.get(&persist_doc.to_string()) { - Some(last) if now.duration_since(*last) < auto_archive_interval => { - false - } - _ => { - guard.insert(persist_doc.to_string(), now); - true - } - } - }; - - if should_archive { - match snapshot_service - .persist_snapshot( - &persist_doc, - &doc_for_snap, - SnapshotPersistOptions { - clear_updates: false, - skip_if_unchanged: true, - ..Default::default() - }, - ) - .await - { - Ok(result) => { - if result.persisted { - let label = format!( - "Snapshot {}", - Utc::now().format("%Y-%m-%d %H:%M:%S UTC") - ); - if let Err(e) = snapshot_service - .archive_snapshot( - &persist_doc, - &result.snapshot_bytes, - result.version, - SnapshotArchiveOptions { - label: label.as_str(), - notes: None, - kind: SnapshotArchiveKind::Automatic, - created_by: None, - }, - ) - .await - { - tracing::debug!( - document_id = %persist_doc, - version = result.version, - error = ?e, - "persist_document_snapshot_archive_failed" - ); - } - } else { - tracing::debug!( - document_id = %persist_doc, - version = result.version, - "persist_document_snapshot_skipped_no_changes" - ); - } - } - Err(err) if err.downcast_ref::().is_some() => { - tracing::debug!( - document_id = %persist_doc, - "persist_document_snapshot_missing_document" - ); - } - Err(e) => { - tracing::error!( - document_id = %persist_doc, - version = s, - error = ?e, - "persist_document_snapshot_failed" - ); - } - } - } - } - } - }); - let tx_obs = tx.clone(); - let hub_for_save = self.clone(); - let doc_id_str = doc_uuid.to_string(); - let doc_for_markdown = doc.clone(); - let skip_flag_for_updates = skip_fs_persist_flag.clone(); - let persist_sub = doc - .observe_update_v1(move |_txn, u| { - // Send to the channel asynchronously to avoid blocking and prevent drops under load - let tx_clone = tx_obs.clone(); - let bytes = u.update.clone(); - tokio::spawn(async move { - let _ = tx_clone.send(bytes).await; - }); - // schedule fs save (debounced) - let save_flags = save_flags.clone(); - let doc_id_s = doc_id_str.clone(); - let hub_clone = hub_for_save.clone(); - let doc_for_markdown = doc_for_markdown.clone(); - let skip_flag = skip_flag_for_updates.clone(); - tokio::spawn(async move { - // simple debounce: set flag and sleep; if still set after sleep, run - { - let mut m = save_flags.lock().await; - m.insert(doc_id_s.clone(), true); - } - sleep(Duration::from_millis(600)).await; - let should_run = { - let mut m = save_flags.lock().await; - m.remove(&doc_id_s).is_some() - }; - if should_run { - if let Ok(doc_uuid) = Uuid::parse_str(&doc_id_s) { - if skip_flag.swap(false, Ordering::SeqCst) { - tracing::debug!( - document_id = %doc_id_s, - "debounced_save_skipped_after_ingest" - ); - return; - } - if let Err(e) = hub_clone - .snapshot_service - .write_markdown(&doc_uuid, &doc_for_markdown) - .await - { - tracing::error!( - document_id = %doc_id_s, - error = ?e, - "debounced_save_failed" - ); - } - } - } - }); - }) - .unwrap(); - - let room = Arc::new(DocumentRoom { - doc: doc.clone(), - awareness: awareness.clone(), - broadcast: bcast.clone(), - persist_sub, - seq: seq.clone(), - skip_fs_persist: skip_fs_persist_flag.clone(), - }); + // Create a simple E2EE room (no Yjs Doc needed) + let room = Arc::new(E2EEDocumentRoom::new(start_seq)); + + // Register the room self.inner .write() .await .insert(doc_id.to_string(), room.clone()); let _ = self.ensure_edit_flag(doc_id).await; - // Hydrate in background (snapshot + updates). Non-blocking for WS subscription - let bcast_h = bcast.clone(); - let hydration = self.hydration_service.clone(); - let seq_for_hydrate = seq.clone(); - let skip_flag_for_hydrate = skip_fs_persist_flag.clone(); - tokio::spawn(async move { - tracing::debug!(%doc_uuid, "hydrate:start"); - match hydration - .hydrate(&doc_uuid, HydrationOptions::default()) - .await - { - Ok(hydrated_state) => { - let update_bin = { - let txn = hydrated_state.doc.transact(); - txn.encode_state_as_update_v1(&StateVector::default()) - }; - if let Ok(update) = Update::decode_v1(&update_bin) { - let mut txn = doc.transact_mut(); - if let Err(e) = txn.apply_update(update) { - tracing::debug!(document_id = %doc_uuid, error = ?e, "hydrate_apply_failed"); - } else { - skip_flag_for_hydrate.store(true, Ordering::SeqCst); - } - } - { - let mut guard = seq_for_hydrate.lock().await; - if hydrated_state.last_seq > *guard { - *guard = hydrated_state.last_seq; - } - } + tracing::debug!( + document_id = %doc_id, + start_seq = start_seq, + "e2ee_room_created" + ); - let txn = doc.transact(); - let bin = txn.encode_state_as_update_v1(&StateVector::default()); - drop(txn); - let mut enc = EncoderV1::new(); - enc.write_var(MSG_SYNC); - enc.write_var(MSG_SYNC_UPDATE); - enc.write_buf(&bin); - let msg = enc.to_vec(); - if let Err(e) = bcast_h.broadcast(msg) { - tracing::debug!( - document_id = %doc_uuid, - error = %e, - "hydrate:broadcast_failed" - ); - } - tracing::debug!(document_id = %doc_uuid, "hydrate:complete"); - } - Err(e) => { - tracing::error!(document_id = %doc_uuid, error = ?e, "hydrate_failed"); - } - } - }); Ok(room) } @@ -335,74 +150,184 @@ impl Hub { self.snapshot_service.clone() } - pub async fn apply_snapshot(&self, doc_id: &str, snapshot: &Doc) -> anyhow::Result<()> { - let room = self.get_or_create(doc_id).await?; - let new_markdown = { - let txt_new = snapshot.get_or_insert_text("content"); - let txn = snapshot.transact(); - txt_new.get_string(&txn) - }; + /// Get encrypted snapshot with metadata (nonce, signature, seq_at_snapshot) + /// + /// Returns the encrypted snapshot directly from persistence. + /// The server cannot decode the content. + pub async fn get_snapshot( + &self, + doc_id: &str, + ) -> anyhow::Result> + { + use application::documents::ports::realtime::realtime_port::SnapshotData; - let update_bytes = { - let txt = room.doc.get_or_insert_text("content"); - let mut txn = room.doc.transact_mut(); - let len = txt.len(&txn); - if len > 0 { - txt.remove_range(&mut txn, 0, len); - } - if !new_markdown.is_empty() { - txt.insert(&mut txn, 0, &new_markdown); - } - txn.encode_update_v1() + let uuid = match Uuid::parse_str(doc_id) { + Ok(id) => id, + Err(_) => return Ok(None), }; - if update_bytes.is_empty() { - return Ok(()); + // Get encrypted snapshot from persistence + if let Ok(Some(entry)) = self.persistence.latest_snapshot_entry(&uuid).await { + return Ok(Some(SnapshotData { + data: entry.bytes, + nonce: entry.nonce, + signature: entry.signature, + seq_at_snapshot: entry.seq_at_snapshot, + })); } - room.skip_fs_persist.store(true, Ordering::SeqCst); + Ok(None) + } + + /// Get encrypted updates since a given sequence number + /// + /// Used by REST API to retrieve pending updates for content reconstruction. + pub async fn get_updates_since( + &self, + doc_id: &str, + since_seq: i64, + ) -> anyhow::Result> + { + use application::documents::ports::realtime::realtime_port::EncryptedUpdateEntry; + + let uuid = match Uuid::parse_str(doc_id) { + Ok(id) => id, + Err(_) => return Ok(Vec::new()), + }; + + let updates = self.persistence.get_updates_since(&uuid, since_seq).await?; + + Ok(updates + .into_iter() + .map(|u| EncryptedUpdateEntry { + seq: u.seq, + data: u.data, + nonce: u.nonce, + signature: u.signature, + public_key: u.public_key, + }) + .collect()) + } + + /// Get plaintext content is not available + /// + /// Returns None as the server cannot decrypt content. + pub async fn get_content(&self, _doc_id: &str) -> anyhow::Result> { + // Server cannot access plaintext content + Ok(None) + } + + /// Apply plaintext snapshot is not available + /// + /// Use apply_encrypted_snapshot instead. + pub async fn apply_snapshot( + &self, + _doc_id: &str, + _snapshot: &yrs::Doc, + ) -> anyhow::Result<()> { + anyhow::bail!("apply_snapshot not available, use apply_encrypted_snapshot") + } + + /// Apply encrypted snapshot + pub async fn apply_encrypted_snapshot( + &self, + doc_id: &str, + data: &[u8], + nonce: Option<&[u8]>, + signature: Option<&[u8]>, + ) -> anyhow::Result<()> { + let doc_uuid = Uuid::parse_str(doc_id)?; + + // Get the next version number + let version = self + .persistence + .latest_snapshot_version(&doc_uuid) + .await? + .unwrap_or(0) + + 1; + + // Get current seq to record in snapshot (for E2EE sync) + let room = self.get_or_create(doc_id).await?; + let current_seq = { + let guard = room.seq.lock().await; + *guard + }; + + // Store the encrypted snapshot with metadata including seq_at_snapshot + let encryption_meta = Some(ContentEncryptionMeta { + nonce: nonce.map(|n| n.to_vec()), + signature: signature.map(|s| s.to_vec()), + seq_at_snapshot: Some(current_seq), + }); + + self.persistence + .persist_snapshot(&doc_uuid, version, data, encryption_meta.as_ref()) + .await + .map_err(|e| anyhow::anyhow!("failed to persist encrypted snapshot: {:?}", e))?; - let mut encoder = EncoderV1::new(); - encoder.write_var(MSG_SYNC); - encoder.write_var(MSG_SYNC_UPDATE); - encoder.write_buf(&update_bytes); - let frame = encoder.to_vec(); - room.broadcast - .broadcast(frame) - .map_err(|err| anyhow::anyhow!(err)) - .context("broadcast_snapshot_update")?; + tracing::debug!( + document_id = %doc_id, + version = version, + seq_at_snapshot = current_seq, + "e2ee_snapshot_persisted" + ); Ok(()) } - pub async fn get_content(&self, doc_id: &str) -> anyhow::Result> { - if let Some(room) = self.inner.read().await.get(doc_id).cloned() { - let txt = room.doc.get_or_insert_text("content"); - let txn = room.doc.transact(); - return Ok(Some(txt.get_string(&txn))); - } + /// Apply encrypted update + pub async fn apply_encrypted_update( + &self, + doc_id: &str, + data: &[u8], + nonce: Option<&[u8]>, + signature: Option<&[u8]>, + public_key: Option<&[u8]>, + ) -> anyhow::Result<()> { + let doc_uuid = Uuid::parse_str(doc_id)?; - let uuid = match Uuid::parse_str(doc_id) { - Ok(id) => id, - Err(_) => return Ok(None), + // Get the current seq number (create room if needed to track seq) + let room = self.get_or_create(doc_id).await?; + let seq = { + let mut guard = room.seq.lock().await; + *guard += 1; + *guard + }; + + // Store the encrypted update with metadata + let update_data = EncryptedUpdateData { + data: data.to_vec(), + nonce: nonce.map(|n| n.to_vec()), + signature: signature.map(|s| s.to_vec()), + public_key: public_key.map(|p| p.to_vec()), }; - let hydrated = self - .hydration_service - .hydrate(&uuid, HydrationOptions::default()) - .await?; - let txt = hydrated.doc.get_or_insert_text("content"); - let txn = hydrated.doc.transact(); - Ok(Some(txt.get_string(&txn))) + + self.persistence + .append_encrypted_update_with_seq(&doc_uuid, seq, &update_data) + .await + .map_err(|e| anyhow::anyhow!("failed to persist encrypted update: {:?}", e))?; + + tracing::debug!( + document_id = %doc_id, + seq = seq, + "e2ee_update_persisted" + ); + + Ok(()) } } impl Hub { + /// Prune old updates for all documents + /// + /// Snapshot creation is client-driven. This method only + /// prunes old encrypted updates after the window. pub async fn snapshot_all( &self, - keep_versions: i64, + _keep_versions: i64, updates_keep_window: i64, ) -> anyhow::Result<()> { - let rooms: Vec<(String, Arc)> = { + let rooms: Vec<(String, Arc)> = { let map = self.inner.read().await; map.iter().map(|(k, v)| (k.clone(), v.clone())).collect() }; @@ -416,40 +341,40 @@ impl Hub { *guard }; let cutoff = (current_seq - updates_keep_window).max(0); - self.snapshot_service - .persist_snapshot( - &doc_uuid, - &room.doc, - SnapshotPersistOptions { - clear_updates: false, - skip_if_unchanged: true, - prune_snapshots: Some(keep_versions), - prune_updates_before: Some(cutoff), - }, - ) - .await?; + + // Prune old updates (encrypted updates before cutoff) + if let Err(e) = self.persistence.prune_updates_before(&doc_uuid, cutoff).await { + tracing::warn!( + document_id = %doc_id, + error = %e, + "e2ee_prune_updates_failed" + ); + } } Ok(()) } + /// Force save to filesystem is not available + /// + /// The server cannot decrypt content to write markdown. pub async fn force_save_to_fs(&self, doc_id: &str) -> anyhow::Result<()> { - let uuid = Uuid::parse_str(doc_id)?; - if let Some(room) = self.inner.read().await.get(doc_id).cloned() { - self.snapshot_service - .write_markdown(&uuid, &room.doc) - .await?; - } else { - let hydrated = self - .hydration_service - .hydrate(&uuid, HydrationOptions::default()) - .await?; - self.snapshot_service - .write_markdown(&uuid, &hydrated.doc) - .await?; - } + tracing::warn!( + document_id = %doc_id, + "force_save_to_fs called - server cannot decrypt content" + ); + // We cannot write plaintext markdown + // This is a no-op but we don't fail to maintain API compatibility Ok(()) } + /// Subscribe to a document room for realtime collaboration + /// + /// This method: + /// 1. Sends initial encrypted snapshot to the client + /// 2. Processes incoming E2EE messages (JSON format) + /// 3. Verifies Ed25519 signatures + /// 4. Relays valid messages to other clients + /// 5. Persists encrypted updates to the database pub async fn subscribe( &self, doc_id: &str, @@ -461,212 +386,282 @@ impl Hub { let sink: SharedRealtimeSink = Arc::new(Mutex::new(sink)); let edit_flag = self.ensure_edit_flag(doc_id).await; let effective_can_edit = can_edit && edit_flag.load(Ordering::Relaxed); - let guarded_stream = - wrap_stream_with_edit_guard(stream, doc_id.to_string(), edit_flag.clone()); - let tracked_clients: Arc>> = - Arc::new(StdMutex::new(HashMap::new())); - let awareness = room.awareness.clone(); - let result = if effective_can_edit { - let subscription = room.broadcast.subscribe_with( - sink.clone(), - guarded_stream, - TrackingProtocol::new(DefaultProtocol, tracked_clients.clone()), - ); - Self::send_protocol_start(sink.clone(), awareness.clone(), DefaultProtocol).await?; - subscription.completed().await - } else { - let subscription = room.broadcast.subscribe_with( - sink.clone(), - guarded_stream, - TrackingProtocol::new(ReadOnlyProtocol, tracked_clients.clone()), - ); - Self::send_protocol_start(sink.clone(), awareness.clone(), ReadOnlyProtocol).await?; - subscription.completed().await - }; - - Self::cleanup_tracked_clients(awareness, tracked_clients); - result.map_err(|e| anyhow::anyhow!(e)) - } - fn cleanup_tracked_clients( - awareness: AwarenessRef, - tracked: Arc>>, - ) { - // Remove awareness states owned by a connection once it disconnects to avoid ghost cursors. - let tracked_clients: Vec<(ClientID, u32)> = { - let mut guard = tracked.lock().expect("tracked clients mutex poisoned"); - guard.drain().collect() + // Add client to room for broadcast + room.add_client(sink.clone()).await; + + // Send initial encrypted snapshot if available + let snapshot_seq = if let Ok(Some(snapshot)) = self.get_snapshot(doc_id).await { + let init_msg = serde_json::json!({ + "type": "init", + "snapshot": { + "data": base64::engine::general_purpose::STANDARD.encode(&snapshot.data), + "nonce": snapshot.nonce.map(|n| base64::engine::general_purpose::STANDARD.encode(&n)), + "signature": snapshot.signature.map(|s| base64::engine::general_purpose::STANDARD.encode(&s)), + "seq_at_snapshot": snapshot.seq_at_snapshot, + } + }); + let msg_bytes = serde_json::to_vec(&init_msg)?; + let mut guard = sink.lock().await; + if let Err(e) = guard.send(msg_bytes).await { + tracing::debug!(error = %e, "e2ee_init_send_failed"); + } + drop(guard); + // Use seq_at_snapshot to determine which updates to send + snapshot.seq_at_snapshot.unwrap_or(0) + } else { + 0 }; - if tracked_clients.is_empty() { - return; - } - let active_clients: HashSet = awareness - .iter() - .filter(|(_, state)| state.data.is_some()) - .map(|(id, _)| id) - .collect(); - - for (client_id, recorded_clock) in tracked_clients { - if !active_clients.contains(&client_id) { - continue; - } - if let Some((current_clock, _)) = awareness.meta(client_id) { - if current_clock == recorded_clock { - awareness.remove_state(client_id); + // Send pending encrypted updates since last snapshot (only updates after snapshot) + let doc_uuid = Uuid::parse_str(doc_id)?; + if let Ok(updates) = self.persistence.get_updates_since(&doc_uuid, snapshot_seq).await { + for update in updates { + let update_msg = serde_json::json!({ + "type": "sync_update", + "update": { + "data": base64::engine::general_purpose::STANDARD.encode(&update.data), + "nonce": update.nonce.map(|n| base64::engine::general_purpose::STANDARD.encode(&n)), + "signature": update.signature.map(|s| base64::engine::general_purpose::STANDARD.encode(&s)), + "public_key": update.public_key.map(|p| base64::engine::general_purpose::STANDARD.encode(&p)), + "seq": update.seq, + } + }); + let msg_bytes = serde_json::to_vec(&update_msg)?; + let mut guard = sink.lock().await; + if let Err(e) = guard.send(msg_bytes).await { + tracing::debug!(error = %e, "e2ee_sync_update_send_failed"); + break; } + drop(guard); } } - } - async fn ensure_edit_flag(&self, doc_id: &str) -> Arc { - let mut guard = self.edit_flags.write().await; - guard - .entry(doc_id.to_string()) - .or_insert_with(|| Arc::new(AtomicBool::new(true))) - .clone() - } + // Process incoming messages + let mut stream = stream; + while let Some(result) = stream.next().await { + let data = match result { + Ok(d) => d, + Err(e) => { + tracing::debug!(error = %e, "e2ee_stream_error"); + break; + } + }; - pub async fn set_document_editable(&self, doc_id: &str, editable: bool) -> anyhow::Result<()> { - let flag = self.ensure_edit_flag(doc_id).await; - flag.store(editable, Ordering::SeqCst); - Ok(()) - } -} + // Parse E2EE message (secsync-compatible format) + let msg: RealtimeMessage = match serde_json::from_slice(&data) { + Ok(m) => m, + Err(e) => { + tracing::debug!(error = %e, "e2ee_parse_error"); + continue; + } + }; -#[derive(Debug, Clone, Copy)] -struct ReadOnlyProtocol; + // Extract public key from publicData based on message type + let (pub_key_b64, msg_doc_id) = match msg.msg_type { + MessageType::Update => { + match msg.parse_update_public_data() { + Ok(pd) => (pd.pub_key, pd.doc_id), + Err(e) => { + tracing::debug!(error = %e, "e2ee_parse_update_public_data_error"); + continue; + } + } + } + MessageType::Snapshot => { + match msg.parse_snapshot_public_data() { + Ok(pd) => (pd.pub_key, pd.doc_id), + Err(e) => { + tracing::debug!(error = %e, "e2ee_parse_snapshot_public_data_error"); + continue; + } + } + } + MessageType::Awareness => { + match msg.parse_ephemeral_public_data() { + Ok(pd) => (pd.pub_key, pd.doc_id), + Err(e) => { + tracing::debug!(error = %e, "e2ee_parse_ephemeral_public_data_error"); + continue; + } + } + } + }; -impl yrs::sync::Protocol for ReadOnlyProtocol { - fn handle_sync_step2( - &self, - _awareness: &yrs::sync::Awareness, - _update: yrs::Update, - ) -> Result, yrs::sync::Error> { - Ok(None) - } + // Verify document ID matches + if msg_doc_id != doc_id { + tracing::warn!( + expected = %doc_id, + actual = %msg_doc_id, + "e2ee_doc_id_mismatch" + ); + continue; + } - fn handle_update( - &self, - _awareness: &yrs::sync::Awareness, - _update: yrs::Update, - ) -> Result, yrs::sync::Error> { - Ok(None) - } -} + // Check edit permission for updates/snapshots + if !effective_can_edit + && matches!(msg.msg_type, MessageType::Update | MessageType::Snapshot) + { + tracing::debug!("e2ee_write_rejected_readonly"); + continue; + } -struct TrackingProtocol

{ - inner: P, - tracked: Arc>>, -} + // Decode signature components + let public_key = match base64::engine::general_purpose::STANDARD.decode(&pub_key_b64) { + Ok(k) => k, + Err(e) => { + tracing::debug!(error = %e, "e2ee_public_key_decode_error"); + continue; + } + }; + let signature = + match base64::engine::general_purpose::STANDARD.decode(&msg.signature) { + Ok(s) => s, + Err(e) => { + tracing::debug!(error = %e, "e2ee_signature_decode_error"); + continue; + } + }; + + // Verify Ed25519 signature (secsync format: domain + canonicalize({nonce, ciphertext, publicData})) + let signing_message = Ed25519Verifier::build_signing_message( + msg.signature_domain(), + &msg.nonce, + &msg.ciphertext, + &msg.public_data, + ); -impl

TrackingProtocol

{ - fn new(inner: P, tracked: Arc>>) -> Self { - Self { inner, tracked } - } -} + match Ed25519Verifier::verify(&public_key, &signing_message, &signature) { + Ok(true) => { + // Signature valid + } + Ok(false) => { + tracing::warn!( + document_id = %doc_id, + "e2ee_signature_invalid" + ); + continue; + } + Err(e) => { + tracing::warn!( + document_id = %doc_id, + error = %e, + "e2ee_signature_verify_error" + ); + continue; + } + } -impl

Protocol for TrackingProtocol

-where - P: Protocol + Send + Sync, -{ - fn start(&self, awareness: &yrs::sync::Awareness, encoder: &mut E) -> Result<(), SyncError> - where - E: Encoder, - { - Protocol::start(&self.inner, awareness, encoder) - } + // Decode ciphertext and nonce for persistence + let ciphertext = + match base64::engine::general_purpose::STANDARD.decode(&msg.ciphertext) { + Ok(c) => c, + Err(e) => { + tracing::debug!(error = %e, "e2ee_ciphertext_decode_error"); + continue; + } + }; + let nonce = match base64::engine::general_purpose::STANDARD.decode(&msg.nonce) { + Ok(n) => n, + Err(e) => { + tracing::debug!(error = %e, "e2ee_nonce_decode_error"); + continue; + } + }; - fn handle_sync_step1( - &self, - awareness: &yrs::sync::Awareness, - sv: StateVector, - ) -> Result, SyncError> { - Protocol::handle_sync_step1(&self.inner, awareness, sv) - } + // Process message by type + let persist_error: Option = match msg.msg_type { + MessageType::Update => { + // Persist encrypted update + match self + .apply_encrypted_update( + doc_id, + &ciphertext, + Some(&nonce), + Some(&signature), + Some(&public_key), + ) + .await + { + Ok(_) => None, + Err(e) => { + tracing::error!( + document_id = %doc_id, + error = %e, + error_debug = ?e, + "e2ee_persist_update_failed" + ); + Some(format!("Failed to persist update: {}", e)) + } + } + } + MessageType::Snapshot => { + // Persist encrypted snapshot + match self + .apply_encrypted_snapshot(doc_id, &ciphertext, Some(&nonce), Some(&signature)) + .await + { + Ok(_) => None, + Err(e) => { + tracing::error!( + document_id = %doc_id, + error = %e, + error_debug = ?e, + "e2ee_persist_snapshot_failed" + ); + Some(format!("Failed to persist snapshot: {}", e)) + } + } + } + MessageType::Awareness => { + // Awareness messages are ephemeral, no persistence + None + } + }; - fn handle_sync_step2( - &self, - awareness: &yrs::sync::Awareness, - update: Update, - ) -> Result, SyncError> { - Protocol::handle_sync_step2(&self.inner, awareness, update) - } + // Send error response to client if persistence failed + if let Some(error_msg) = persist_error { + let error_response = serde_json::json!({ + "type": "error", + "error": error_msg, + "document_id": doc_id, + }); + if let Ok(error_bytes) = serde_json::to_vec(&error_response) { + let mut guard = sink.lock().await; + if let Err(e) = guard.send(error_bytes).await { + tracing::debug!(error = %e, "e2ee_error_response_send_failed"); + } + } + } - fn handle_update( - &self, - awareness: &yrs::sync::Awareness, - update: Update, - ) -> Result, SyncError> { - Protocol::handle_update(&self.inner, awareness, update) - } + // Relay to other clients + room.broadcast_except(&data, &sink).await; + } - fn handle_auth( - &self, - awareness: &yrs::sync::Awareness, - deny_reason: Option, - ) -> Result, SyncError> { - Protocol::handle_auth(&self.inner, awareness, deny_reason) - } + // Remove client from room + room.remove_client(&sink).await; - fn handle_awareness_query( - &self, - awareness: &yrs::sync::Awareness, - ) -> Result, SyncError> { - Protocol::handle_awareness_query(&self.inner, awareness) - } + let remaining = room.client_count().await; + tracing::debug!( + document_id = %doc_id, + remaining_clients = remaining, + "e2ee_client_disconnected" + ); - fn handle_awareness_update( - &self, - awareness: &yrs::sync::Awareness, - update: AwarenessUpdate, - ) -> Result, SyncError> { - { - let mut guard = self.tracked.lock().expect("tracked clients mutex poisoned"); - for (&client_id, entry) in update.clients.iter() { - if entry.json.as_ref() == "null" { - guard.remove(&client_id); - } else { - guard.insert(client_id, entry.clock); - } - } - } - awareness.apply_update(update)?; - Ok(None) + Ok(()) } - fn missing_handle( - &self, - awareness: &yrs::sync::Awareness, - tag: u8, - data: Vec, - ) -> Result, SyncError> { - Protocol::missing_handle(&self.inner, awareness, tag, data) + async fn ensure_edit_flag(&self, doc_id: &str) -> Arc { + let mut guard = self.edit_flags.write().await; + guard + .entry(doc_id.to_string()) + .or_insert_with(|| Arc::new(AtomicBool::new(true))) + .clone() } -} -impl Hub { - async fn send_protocol_start

( - sink: SharedRealtimeSink, - awareness: AwarenessRef, - protocol: P, - ) -> anyhow::Result<()> - where - P: Protocol, - { - let mut encoder = EncoderV1::new(); - protocol - .start::(awareness.as_ref(), &mut encoder) - .map_err(|err| anyhow::anyhow!(err))?; - let frame = encoder.to_vec(); - if frame.is_empty() { - return Ok(()); - } - let mut guard = sink.lock().await; - guard - .send(frame) - .await - .map_err(|err| anyhow::anyhow!(err))?; + pub async fn set_document_editable(&self, doc_id: &str, editable: bool) -> anyhow::Result<()> { + let flag = self.ensure_edit_flag(doc_id).await; + flag.store(editable, Ordering::SeqCst); Ok(()) } } diff --git a/api/crates/infrastructure/src/documents/realtime/local_engine.rs b/api/crates/infrastructure/src/documents/realtime/local_engine.rs index 79304df2..1487277a 100644 --- a/api/crates/infrastructure/src/documents/realtime/local_engine.rs +++ b/api/crates/infrastructure/src/documents/realtime/local_engine.rs @@ -1,5 +1,7 @@ use application::core::ports::errors::PortResult; -use application::documents::ports::realtime::realtime_port::RealtimeEngine; +use application::documents::ports::realtime::realtime_port::{ + EncryptedUpdate, EncryptedUpdateEntry, RealtimeEngine, SnapshotData, +}; use application::documents::ports::realtime::realtime_types::{DynRealtimeSink, DynRealtimeStream}; use application::documents::services::realtime::snapshot::doc_from_snapshot_bytes; @@ -26,6 +28,10 @@ impl RealtimeEngine for LocalRealtimeEngine { self.hub.get_content(doc_id).await.map_err(Into::into) } + async fn get_snapshot(&self, doc_id: &str) -> PortResult> { + self.hub.get_snapshot(doc_id).await.map_err(Into::into) + } + async fn force_persist(&self, doc_id: &str) -> PortResult<()> { self.hub.force_save_to_fs(doc_id).await.map_err(Into::into) } @@ -45,4 +51,50 @@ impl RealtimeEngine for LocalRealtimeEngine { .await .map_err(Into::into) } + + async fn apply_encrypted_updates( + &self, + doc_id: &str, + updates: &[EncryptedUpdate], + ) -> PortResult<()> { + // For E2EE documents, we apply updates as encrypted data + // The hub will store the data without decrypting + for update in updates { + self.hub + .apply_encrypted_update( + doc_id, + &update.data, + update.nonce.as_deref(), + update.signature.as_deref(), + update.public_key.as_deref(), + ) + .await + .map_err(|e| application::core::ports::errors::PortError::from(e))?; + } + Ok(()) + } + + async fn apply_encrypted_snapshot( + &self, + doc_id: &str, + snapshot: &[u8], + nonce: Option<&[u8]>, + signature: Option<&[u8]>, + ) -> PortResult<()> { + self.hub + .apply_encrypted_snapshot(doc_id, snapshot, nonce, signature) + .await + .map_err(Into::into) + } + + async fn get_updates_since( + &self, + doc_id: &str, + since_seq: i64, + ) -> PortResult> { + self.hub + .get_updates_since(doc_id, since_seq) + .await + .map_err(Into::into) + } } diff --git a/api/crates/infrastructure/src/documents/realtime/redis/engine.rs b/api/crates/infrastructure/src/documents/realtime/redis/engine.rs index 022552aa..144a9136 100644 --- a/api/crates/infrastructure/src/documents/realtime/redis/engine.rs +++ b/api/crates/infrastructure/src/documents/realtime/redis/engine.rs @@ -3,50 +3,39 @@ use std::sync::Arc; use std::sync::atomic::{AtomicBool, Ordering}; use std::time::{Duration, SystemTime, UNIX_EPOCH}; -use anyhow::{Context, anyhow}; -use chrono::Utc; +use anyhow::anyhow; +use base64::Engine; use futures_util::{SinkExt, StreamExt}; use tokio::sync::{Mutex, RwLock}; use tokio::task::JoinHandle; -use tokio::time::{Instant, sleep}; +use tokio::time::sleep; use tokio_stream::wrappers::UnboundedReceiverStream; use uuid::Uuid; -use yrs::encoding::write::Write as YWrite; -use yrs::sync::awareness::Awareness; -use yrs::sync::protocol::{MSG_SYNC, MSG_SYNC_UPDATE}; -use yrs::sync::{DefaultProtocol, Protocol}; -use yrs::updates::encoder::{Encoder, EncoderV1}; -use yrs::{Doc, GetString, ReadTxn, StateVector, Text, Transact}; +use crate::core::crypto::Ed25519Verifier; use crate::core::db::PgPool; use crate::documents::db::repositories::document_snapshot_archive_repository_sqlx::SqlxDocumentSnapshotArchiveRepository; use crate::documents::db::repositories::linkgraph_repository_sqlx::SqlxLinkGraphRepository; -use crate::documents::db::repositories::tagging_repository_sqlx::SqlxTaggingRepository; -use crate::documents::realtime::awareness::{AwarenessService, encode_awareness_state}; -use crate::documents::realtime::utils::{analyse_frame, wrap_stream_with_edit_guard}; use crate::documents::realtime::{SqlxDocPersistenceAdapter, SqlxDocStateReader}; use application::core::ports::errors::PortResult; use application::core::ports::storage::storage_port::StorageResolverPort; use application::core::ports::storage::storage_projection_queue::StorageProjectionQueue; use application::documents::ports::document_snapshot_archive_repository::DocumentSnapshotArchiveRepository; use application::documents::ports::linkgraph_repository::LinkGraphRepository; -use application::documents::ports::realtime::awareness_port::AwarenessPublisher; use application::documents::ports::realtime::realtime_hydration_port::{ DocStateReader, RealtimeBacklogReader, }; use application::documents::ports::realtime::realtime_persistence_port::{ - DocPersistencePort, DocumentMissingError, + ContentEncryptionMeta, DocPersistencePort, EncryptedUpdateData, }; -use application::documents::ports::realtime::realtime_port::RealtimeEngine as RealtimeEngineTrait; -use application::documents::ports::realtime::realtime_types::{DynRealtimeSink, DynRealtimeStream}; -use application::documents::ports::tagging::tagging_repository::TaggingRepository; -use application::documents::services::realtime::doc_hydration::{ - DocHydrationService, HydrationOptions, +use application::documents::ports::realtime::realtime_port::{ + EncryptedUpdate, RealtimeEngine as RealtimeEngineTrait, SnapshotData, }; -use application::documents::services::realtime::snapshot::{ - SnapshotArchiveKind, SnapshotArchiveOptions, SnapshotPersistOptions, SnapshotService, - doc_from_snapshot_bytes, +use application::documents::ports::realtime::realtime_types::{ + DynRealtimeSink, DynRealtimeStream, MessageType, RealtimeMessage, }; +use application::documents::services::realtime::doc_hydration::DocHydrationService; +use application::documents::services::realtime::snapshot::SnapshotService; use super::cluster_bus::{RedisClusterBus, StreamItem}; @@ -54,10 +43,11 @@ type SharedRealtimeSink = Arc>; pub struct RedisRealtimeEngine { bus: Arc, - hydration_service: Arc, + _hydration_service: Arc, snapshot_service: Arc, + persistence: Arc, task_debounce: Duration, - awareness_ttl: Duration, + _awareness_ttl: Duration, _worker: Option>, edit_flags: Arc>>>, } @@ -101,44 +91,35 @@ impl RedisRealtimeEngine { Arc::new(SqlxDocPersistenceAdapter::new(pool.clone())); let linkgraph_repo: Arc = Arc::new(SqlxLinkGraphRepository::new(pool.clone())); - let tagging_repo: Arc = - Arc::new(SqlxTaggingRepository::new(pool.clone())); let archive_repo: Arc = Arc::new(SqlxDocumentSnapshotArchiveRepository::new(pool.clone())); let snapshot_service = Arc::new(SnapshotService::new( doc_state_reader, - doc_persistence, + doc_persistence.clone(), linkgraph_repo, - tagging_repo, archive_repo, storage_jobs, )); - let auto_archive_interval = Duration::from_secs(cfg.snapshot_archive_interval_secs); - let last_auto_archive: Arc>> = - Arc::new(Mutex::new(HashMap::new())); - let trim_lifetime = if cfg.min_message_lifetime_ms > 0 { Some(Duration::from_millis(cfg.min_message_lifetime_ms)) } else { None }; + // E2EE mode: persistence worker only trims Redis streams let worker = spawn_persistence_worker( cfg.spawn_persistence_worker, bus.clone(), - hydration_service.clone(), - snapshot_service.clone(), trim_lifetime, - auto_archive_interval, - last_auto_archive.clone(), ); Ok(Self { bus, - hydration_service, + _hydration_service: hydration_service, snapshot_service, + persistence: doc_persistence, task_debounce: Duration::from_millis(cfg.task_debounce_ms), - awareness_ttl: Duration::from_millis(cfg.awareness_ttl_ms), + _awareness_ttl: Duration::from_millis(cfg.awareness_ttl_ms), _worker: worker, edit_flags: Arc::new(RwLock::new(HashMap::new())), }) @@ -148,76 +129,35 @@ impl RedisRealtimeEngine { self.snapshot_service.clone() } - async fn send_initial_sync(&self, doc: &Doc, sink: &SharedRealtimeSink) -> anyhow::Result<()> { - let bin = { - let txn = doc.transact(); - txn.encode_state_as_update_v1(&StateVector::default()) - }; - let mut enc = EncoderV1::new(); - enc.write_var(MSG_SYNC); - enc.write_var(MSG_SYNC_UPDATE); - enc.write_buf(&bin); - let frame = enc.to_vec(); - - let mut guard = sink.lock().await; - guard - .send(frame) - .await - .map_err(|e| anyhow!("initial_sync_send_failed: {e}"))?; - Ok(()) - } - - async fn flush_awareness_backlog( - &self, - sink: &SharedRealtimeSink, - frames: &[Vec], - doc_id: &str, - awareness_manager: &AwarenessService, - ) -> anyhow::Result<()> { - for payload in frames { - awareness_manager.apply_remote_frame(payload).await?; - let mut guard = sink.lock().await; - if let Err(e) = guard.send(payload.clone()).await { - return Err(anyhow!("initial_awareness_send_failed: {e}")); - } - } - tracing::debug!( - document_id = doc_id, - count = frames.len(), - "redis_cluster_awareness_prefill" - ); - Ok(()) - } - + /// Spawn a task to forward messages from Redis to the client fn spawn_forward_task( mut stream: UnboundedReceiverStream>, sink: SharedRealtimeSink, doc_id: String, channel: &'static str, - awareness_manager: Option, ) -> JoinHandle<()> { tokio::spawn(async move { while let Some(item) = stream.next().await { match item { Ok((_id, frame)) => { - if let Some(manager) = &awareness_manager { - if let Err(e) = manager.apply_remote_frame(&frame).await { - tracing::debug!( - document_id = %doc_id, - channel, - error = ?e, - "redis_cluster_awareness_apply_failed" - ); - } - } let mut guard = sink.lock().await; if let Err(e) = guard.send(frame).await { - tracing::debug!(document_id = %doc_id, channel, error = %e, "redis_cluster_forward_sink_closed"); + tracing::debug!( + document_id = %doc_id, + channel, + error = %e, + "redis_forward_sink_closed" + ); break; } } Err(e) => { - tracing::warn!(document_id = %doc_id, channel, error = ?e, "redis_cluster_forward_stream_error"); + tracing::warn!( + document_id = %doc_id, + channel, + error = ?e, + "redis_forward_stream_error" + ); } } } @@ -231,10 +171,98 @@ impl RedisRealtimeEngine { .or_insert_with(|| Arc::new(AtomicBool::new(true))) .clone() } + + /// Get the current seq for a document + async fn get_current_seq(&self, doc_id: &Uuid) -> i64 { + self.persistence + .latest_update_seq(doc_id) + .await + .ok() + .flatten() + .unwrap_or(0) + } + + /// Apply encrypted snapshot with seq tracking + async fn apply_encrypted_snapshot( + &self, + doc_id: &Uuid, + data: &[u8], + nonce: Option<&[u8]>, + signature: Option<&[u8]>, + ) -> anyhow::Result<()> { + let version = self + .persistence + .latest_snapshot_version(doc_id) + .await? + .unwrap_or(0) + + 1; + + let current_seq = self.get_current_seq(doc_id).await; + + let encryption_meta = Some(ContentEncryptionMeta { + nonce: nonce.map(|n| n.to_vec()), + signature: signature.map(|s| s.to_vec()), + seq_at_snapshot: Some(current_seq), + }); + + self.persistence + .persist_snapshot(doc_id, version, data, encryption_meta.as_ref()) + .await + .map_err(|e| anyhow!("failed to persist encrypted snapshot: {:?}", e))?; + + tracing::debug!( + document_id = %doc_id, + version = version, + seq_at_snapshot = current_seq, + "redis_e2ee_snapshot_persisted" + ); + + Ok(()) + } + + /// Apply encrypted update with seq tracking + async fn apply_encrypted_update( + &self, + doc_id: &Uuid, + data: &[u8], + nonce: Option<&[u8]>, + signature: Option<&[u8]>, + public_key: Option<&[u8]>, + ) -> anyhow::Result<()> { + let seq = self.get_current_seq(doc_id).await + 1; + + let update_data = EncryptedUpdateData { + data: data.to_vec(), + nonce: nonce.map(|n| n.to_vec()), + signature: signature.map(|s| s.to_vec()), + public_key: public_key.map(|p| p.to_vec()), + }; + + self.persistence + .append_encrypted_update_with_seq(doc_id, seq, &update_data) + .await + .map_err(|e| anyhow!("failed to persist encrypted update: {:?}", e))?; + + tracing::debug!( + document_id = %doc_id, + seq = seq, + "redis_e2ee_update_persisted" + ); + + Ok(()) + } } #[async_trait::async_trait] impl RealtimeEngineTrait for RedisRealtimeEngine { + /// Subscribe to a document for E2EE realtime collaboration via Redis + /// + /// This method: + /// 1. Sends initial encrypted snapshot to the client + /// 2. Processes incoming E2EE messages (JSON format) + /// 3. Verifies Ed25519 signatures + /// 4. Relays valid messages to other clients via Redis + /// 5. Persists encrypted updates to the database async fn subscribe( &self, doc_id: &str, @@ -244,127 +272,357 @@ impl RealtimeEngineTrait for RedisRealtimeEngine { ) -> PortResult<()> { let sink: SharedRealtimeSink = Arc::new(Mutex::new(sink)); let doc_uuid = Uuid::parse_str(doc_id).map_err(anyhow::Error::from)?; - let hydrated = self - .hydration_service - .hydrate(&doc_uuid, HydrationOptions::default()) - .await?; - let awareness_publisher: Arc = self.bus.clone(); - let awareness_service = AwarenessService::new( - hydrated.doc.clone(), - self.awareness_ttl, - awareness_publisher, - doc_id.to_string(), - ); - let ttl_handle = awareness_service.spawn_ttl_task(); + let edit_flag = self.ensure_edit_flag(doc_id).await; + let effective_can_edit = can_edit && edit_flag.load(Ordering::Relaxed); + let mut updates_handle: Option> = None; let mut awareness_handle: Option> = None; let result: anyhow::Result<()> = async { - let edit_flag = self.ensure_edit_flag(doc_id).await; - let session_can_edit = can_edit && edit_flag.load(Ordering::Relaxed); - let mut guarded_stream = - wrap_stream_with_edit_guard(stream, doc_id.to_string(), edit_flag.clone()); - - self.send_initial_sync(&hydrated.doc, &sink).await?; - self.flush_awareness_backlog( - &sink, - &hydrated.awareness_frames, - doc_id, - &awareness_service, - ) - .await?; - if let Ok(Some(frame)) = encode_awareness_state(&awareness_service.awareness()) { + // Send initial encrypted snapshot if available + let snapshot_seq = if let Ok(Some(entry)) = + self.persistence.latest_snapshot_entry(&doc_uuid).await + { + let init_msg = serde_json::json!({ + "type": "init", + "snapshot": { + "data": base64::engine::general_purpose::STANDARD.encode(&entry.bytes), + "nonce": entry.nonce.map(|n| base64::engine::general_purpose::STANDARD.encode(&n)), + "signature": entry.signature.map(|s| base64::engine::general_purpose::STANDARD.encode(&s)), + "seq_at_snapshot": entry.seq_at_snapshot, + } + }); + let msg_bytes = serde_json::to_vec(&init_msg)?; let mut guard = sink.lock().await; - let _ = guard.send(frame).await; + if let Err(e) = guard.send(msg_bytes).await { + tracing::debug!(error = %e, "redis_e2ee_init_send_failed"); + } + drop(guard); + entry.seq_at_snapshot.unwrap_or(0) + } else { + 0 + }; + + // Send pending encrypted updates since last snapshot + tracing::info!( + document_id = %doc_uuid, + snapshot_seq = snapshot_seq, + "redis_e2ee_loading_updates_since" + ); + if let Ok(updates) = self + .persistence + .get_updates_since(&doc_uuid, snapshot_seq) + .await + { + tracing::info!( + document_id = %doc_uuid, + update_count = updates.len(), + "redis_e2ee_sending_sync_updates" + ); + for update in updates { + let update_msg = serde_json::json!({ + "type": "sync_update", + "update": { + "data": base64::engine::general_purpose::STANDARD.encode(&update.data), + "nonce": update.nonce.map(|n| base64::engine::general_purpose::STANDARD.encode(&n)), + "signature": update.signature.map(|s| base64::engine::general_purpose::STANDARD.encode(&s)), + "public_key": update.public_key.map(|p| base64::engine::general_purpose::STANDARD.encode(&p)), + "seq": update.seq, + } + }); + let msg_bytes = serde_json::to_vec(&update_msg)?; + let mut guard = sink.lock().await; + if let Err(e) = guard.send(msg_bytes).await { + tracing::debug!(error = %e, "redis_e2ee_sync_update_send_failed"); + break; + } + tracing::debug!( + document_id = %doc_uuid, + seq = update.seq, + "redis_e2ee_sync_update_sent" + ); + drop(guard); + } } - Self::send_protocol_start( - sink.clone(), - awareness_service.awareness(), - session_can_edit, - ) - .await - .context("redis_cluster_send_protocol_start")?; - let updates_stream = self - .bus - .subscribe_updates(doc_id, hydrated.last_update_stream_id.clone()) - .await?; - let awareness_stream = self - .bus - .subscribe_awareness(doc_id, hydrated.last_awareness_stream_id.clone()) - .await?; + // Subscribe to Redis streams for updates from other clients + let updates_stream = self.bus.subscribe_updates(doc_id, None).await?; + let awareness_stream = self.bus.subscribe_awareness(doc_id, None).await?; updates_handle = Some(Self::spawn_forward_task( updates_stream, sink.clone(), doc_id.to_string(), "updates", - None, )); awareness_handle = Some(Self::spawn_forward_task( awareness_stream, sink.clone(), doc_id.to_string(), "awareness", - Some(awareness_service.clone()), )); - while let Some(frame) = guarded_stream.next().await { - match frame { - Ok(bytes) => match analyse_frame(&bytes) { - Ok(summary) => { - if summary.has_update { - let allow_edit = can_edit && edit_flag.load(Ordering::Relaxed); - if !allow_edit { - tracing::warn!( - document_id = %doc_id, - "ignored_update_from_readonly_client" - ); - } else if let Err(e) = - self.bus.publish_update(doc_id, bytes.clone()).await - { - tracing::warn!( - document_id = %doc_id, - error = ?e, - "redis_cluster_publish_update_failed" - ); - sleep(self.task_debounce).await; - } + // Process incoming E2EE messages + let mut stream = stream; + while let Some(result) = stream.next().await { + let data = match result { + Ok(d) => d, + Err(e) => { + tracing::debug!(error = %e, "redis_e2ee_stream_error"); + break; + } + }; + + // Parse E2EE message (secsync-compatible format) + tracing::info!( + document_id = %doc_id, + data_len = data.len(), + "redis_e2ee_received_message" + ); + let msg: RealtimeMessage = match serde_json::from_slice(&data) { + Ok(m) => m, + Err(e) => { + tracing::warn!(error = %e, data_preview = %String::from_utf8_lossy(&data[..data.len().min(200)]), "redis_e2ee_parse_error"); + continue; + } + }; + + // Extract public key from publicData based on message type + let (pub_key_b64, msg_doc_id) = match msg.msg_type { + MessageType::Update => match msg.parse_update_public_data() { + Ok(pd) => (pd.pub_key, pd.doc_id), + Err(e) => { + tracing::debug!(error = %e, "redis_e2ee_parse_update_public_data_error"); + continue; + } + }, + MessageType::Snapshot => match msg.parse_snapshot_public_data() { + Ok(pd) => (pd.pub_key, pd.doc_id), + Err(e) => { + tracing::debug!(error = %e, "redis_e2ee_parse_snapshot_public_data_error"); + continue; + } + }, + MessageType::Awareness => match msg.parse_ephemeral_public_data() { + Ok(pd) => (pd.pub_key, pd.doc_id), + Err(e) => { + tracing::debug!(error = %e, "redis_e2ee_parse_ephemeral_public_data_error"); + continue; + } + }, + }; + + // Verify document ID matches + if msg_doc_id != doc_id { + tracing::warn!( + expected = %doc_id, + actual = %msg_doc_id, + "redis_e2ee_doc_id_mismatch" + ); + continue; + } + + // Check edit permission for updates/snapshots + if !effective_can_edit + && matches!(msg.msg_type, MessageType::Update | MessageType::Snapshot) + { + tracing::debug!("redis_e2ee_write_rejected_readonly"); + continue; + } + + // Decode signature components + let public_key = + match base64::engine::general_purpose::STANDARD.decode(&pub_key_b64) { + Ok(k) => k, + Err(e) => { + tracing::debug!(error = %e, "redis_e2ee_public_key_decode_error"); + continue; + } + }; + let signature = + match base64::engine::general_purpose::STANDARD.decode(&msg.signature) { + Ok(s) => s, + Err(e) => { + tracing::debug!(error = %e, "redis_e2ee_signature_decode_error"); + continue; + } + }; + + // Verify Ed25519 signature + let signing_message = Ed25519Verifier::build_signing_message( + msg.signature_domain(), + &msg.nonce, + &msg.ciphertext, + &msg.public_data, + ); + + match Ed25519Verifier::verify(&public_key, &signing_message, &signature) { + Ok(true) => { + // Signature valid + } + Ok(false) => { + tracing::warn!( + document_id = %doc_id, + "redis_e2ee_signature_invalid" + ); + // Send error response to client + let error_response = serde_json::json!({ + "type": "error", + "error": "signature_invalid", + "error_code": "E2EE_SIGNATURE_INVALID", + "document_id": doc_id, + }); + if let Ok(error_bytes) = serde_json::to_vec(&error_response) { + let mut guard = sink.lock().await; + if let Err(e) = guard.send(error_bytes).await { + tracing::debug!(error = %e, "redis_e2ee_signature_error_send_failed"); } - if summary.has_awareness { - awareness_service.record_local_frame(&bytes).await.ok(); - if let Err(e) = - self.bus.publish_awareness(doc_id, bytes.clone()).await - { - tracing::debug!( - document_id = %doc_id, - error = ?e, - "redis_cluster_publish_awareness_failed" - ); - } + drop(guard); + } + continue; + } + Err(e) => { + tracing::warn!( + document_id = %doc_id, + error = %e, + "redis_e2ee_signature_verify_error" + ); + // Send error response to client + let error_response = serde_json::json!({ + "type": "error", + "error": "signature_verification_failed", + "error_code": "E2EE_SIGNATURE_VERIFY_ERROR", + "document_id": doc_id, + }); + if let Ok(error_bytes) = serde_json::to_vec(&error_response) { + let mut guard = sink.lock().await; + if let Err(e) = guard.send(error_bytes).await { + tracing::debug!(error = %e, "redis_e2ee_signature_error_send_failed"); } - if !summary.has_update && !summary.has_awareness { - tracing::debug!( + drop(guard); + } + continue; + } + } + + // Decode ciphertext and nonce for persistence + let ciphertext = + match base64::engine::general_purpose::STANDARD.decode(&msg.ciphertext) { + Ok(c) => c, + Err(e) => { + tracing::debug!(error = %e, "redis_e2ee_ciphertext_decode_error"); + continue; + } + }; + let nonce = match base64::engine::general_purpose::STANDARD.decode(&msg.nonce) { + Ok(n) => n, + Err(e) => { + tracing::debug!(error = %e, "redis_e2ee_nonce_decode_error"); + continue; + } + }; + + // Process message by type + let persist_error: Option = match msg.msg_type { + MessageType::Update => { + // Persist encrypted update + tracing::info!( + document_id = %doc_id, + ciphertext_len = ciphertext.len(), + nonce_len = nonce.len(), + signature_len = signature.len(), + public_key_len = public_key.len(), + "redis_e2ee_persisting_update" + ); + match self + .apply_encrypted_update( + &doc_uuid, + &ciphertext, + Some(&nonce), + Some(&signature), + Some(&public_key), + ) + .await + { + Ok(_) => { + tracing::info!(document_id = %doc_id, "redis_e2ee_update_persisted_ok"); + None + } + Err(e) => { + tracing::error!( document_id = %doc_id, - "redis_cluster_dropped_unknown_frame" + error = %e, + error_debug = ?e, + "redis_e2ee_persist_update_failed" ); + Some(format!("Failed to persist update: {}", e)) } } - Err(e) => { + } + MessageType::Snapshot => { + // Persist encrypted snapshot + match self + .apply_encrypted_snapshot( + &doc_uuid, + &ciphertext, + Some(&nonce), + Some(&signature), + ) + .await + { + Ok(_) => None, + Err(e) => { + tracing::error!( + document_id = %doc_id, + error = %e, + error_debug = ?e, + "redis_e2ee_persist_snapshot_failed" + ); + Some(format!("Failed to persist snapshot: {}", e)) + } + } + } + MessageType::Awareness => { + // Awareness messages are ephemeral, no persistence + None + } + }; + + // Send error response to client if persistence failed + if let Some(error_msg) = persist_error { + let error_response = serde_json::json!({ + "type": "error", + "error": error_msg, + "document_id": doc_id, + }); + if let Ok(error_bytes) = serde_json::to_vec(&error_response) { + let mut guard = sink.lock().await; + if let Err(e) = guard.send(error_bytes).await { + tracing::debug!(error = %e, "redis_e2ee_error_response_send_failed"); + } + } + } + + // Relay to other clients via Redis + match msg.msg_type { + MessageType::Update | MessageType::Snapshot => { + if let Err(e) = self.bus.publish_update(doc_id, data.clone()).await { tracing::warn!( document_id = %doc_id, error = ?e, - "redis_cluster_frame_decode_failed" + "redis_e2ee_publish_update_failed" + ); + sleep(self.task_debounce).await; + } + } + MessageType::Awareness => { + if let Err(e) = self.bus.publish_awareness(doc_id, data.clone()).await { + tracing::debug!( + document_id = %doc_id, + error = ?e, + "redis_e2ee_publish_awareness_failed" ); } - }, - Err(e) => { - tracing::debug!( - document_id = %doc_id, - error = %e, - "redis_cluster_inbound_closed" - ); - break; } } } @@ -379,81 +637,76 @@ impl RealtimeEngineTrait for RedisRealtimeEngine { if let Some(handle) = awareness_handle { handle.abort(); } - if let Err(err) = awareness_service.clear_local_clients().await { - tracing::debug!(document_id = %doc_id, error = ?err, "redis_cluster_awareness_clear_failed"); - } - ttl_handle.abort(); + + tracing::debug!( + document_id = %doc_id, + "redis_e2ee_client_disconnected" + ); result.map_err(Into::into) } - async fn get_content(&self, doc_id: &str) -> PortResult> { + async fn get_content(&self, _doc_id: &str) -> PortResult> { + // In E2EE mode, server cannot decrypt content + Ok(None) + } + + async fn get_snapshot(&self, doc_id: &str) -> PortResult> { let uuid = Uuid::parse_str(doc_id).map_err(anyhow::Error::from)?; - let hydrated = self - .hydration_service - .hydrate(&uuid, HydrationOptions::default()) - .await?; - let txt = hydrated.doc.get_or_insert_text("content"); - let txn = hydrated.doc.transact(); - Ok(Some(txt.get_string(&txn))) + + // Get encrypted snapshot from persistence (E2EE mode) + if let Ok(Some(entry)) = self.persistence.latest_snapshot_entry(&uuid).await { + return Ok(Some(SnapshotData { + data: entry.bytes, + nonce: entry.nonce, + signature: entry.signature, + seq_at_snapshot: entry.seq_at_snapshot, + })); + } + + Ok(None) } async fn force_persist(&self, doc_id: &str) -> PortResult<()> { - let uuid = Uuid::parse_str(doc_id).map_err(anyhow::Error::from)?; - let hydrated = self - .hydration_service - .hydrate(&uuid, HydrationOptions::default()) - .await?; - self.snapshot_service - .write_markdown(&uuid, &hydrated.doc) - .await?; - self.snapshot_service - .persist_snapshot( - &uuid, - &hydrated.doc, - SnapshotPersistOptions { - clear_updates: true, - ..Default::default() - }, - ) - .await?; + // In E2EE mode, server cannot write plaintext markdown + // Snapshot persistence is handled by clients via WebSocket + tracing::warn!( + document_id = %doc_id, + "force_persist called in E2EE mode - server cannot decrypt content" + ); Ok(()) } - async fn apply_snapshot(&self, doc_id: &str, snapshot: &[u8]) -> PortResult<()> { - let doc = doc_from_snapshot_bytes(snapshot)?; - let uuid = Uuid::parse_str(doc_id).map_err(anyhow::Error::from)?; - let hydrated = self - .hydration_service - .hydrate(&uuid, HydrationOptions::default()) - .await?; - let update_bytes = { - let txt_new = doc.get_or_insert_text("content"); - let txn_new = doc.transact(); - let new_markdown = txt_new.get_string(&txn_new); - drop(txn_new); - - let txt = hydrated.doc.get_or_insert_text("content"); - let mut txn = hydrated.doc.transact_mut(); - let len = txt.len(&txn); - if len > 0 { - txt.remove_range(&mut txn, 0, len); - } - if !new_markdown.is_empty() { - txt.insert(&mut txn, 0, &new_markdown); - } - txn.encode_update_v1() - }; - if update_bytes.is_empty() { - return Ok(()); - } - let mut encoder = EncoderV1::new(); - encoder.write_var(MSG_SYNC); - encoder.write_var(MSG_SYNC_UPDATE); - encoder.write_buf(&update_bytes); - let frame = encoder.to_vec(); - self.bus.publish_update(doc_id, frame).await?; - Ok(()) + async fn apply_snapshot(&self, doc_id: &str, _snapshot: &[u8]) -> PortResult<()> { + // In E2EE mode, plaintext snapshot application is not supported + // Use apply_encrypted_updates or WebSocket snapshot messages instead + tracing::warn!( + document_id = %doc_id, + "apply_snapshot called in E2EE mode - not supported" + ); + Err(application::core::ports::errors::PortError::from( + anyhow!("apply_snapshot not available in E2EE mode"), + )) + } + + async fn apply_encrypted_snapshot( + &self, + doc_id: &str, + snapshot: &[u8], + nonce: Option<&[u8]>, + signature: Option<&[u8]>, + ) -> PortResult<()> { + let doc_uuid = Uuid::parse_str(doc_id).map_err(anyhow::Error::from)?; + + // Use the helper method that handles persistence + RedisRealtimeEngine::apply_encrypted_snapshot(self, &doc_uuid, snapshot, nonce, signature) + .await + .map_err(|e| { + application::core::ports::errors::PortError::from(anyhow::anyhow!( + "failed to apply encrypted snapshot: {:?}", + e + )) + }) } async fn set_document_editable(&self, doc_id: &str, editable: bool) -> PortResult<()> { @@ -461,16 +714,86 @@ impl RealtimeEngineTrait for RedisRealtimeEngine { flag.store(editable, Ordering::SeqCst); Ok(()) } + + async fn apply_encrypted_updates( + &self, + doc_id: &str, + updates: &[EncryptedUpdate], + ) -> PortResult<()> { + use application::documents::ports::realtime::realtime_persistence_port::EncryptedUpdateData; + + let doc_uuid = Uuid::parse_str(doc_id).map_err(anyhow::Error::from)?; + + // Get current seq from persistence + let mut seq = self + .persistence + .latest_update_seq(&doc_uuid) + .await? + .unwrap_or(0); + + // Store each encrypted update + for update in updates { + seq += 1; + let update_data = EncryptedUpdateData { + data: update.data.clone(), + nonce: update.nonce.clone(), + signature: update.signature.clone(), + public_key: update.public_key.clone(), + }; + + self.persistence + .append_encrypted_update_with_seq(&doc_uuid, seq, &update_data) + .await + .map_err(|e| { + application::core::ports::errors::PortError::from(anyhow::anyhow!( + "failed to persist encrypted update: {:?}", + e + )) + })?; + } + + Ok(()) + } + + async fn get_updates_since( + &self, + doc_id: &str, + since_seq: i64, + ) -> PortResult> + { + use application::documents::ports::realtime::realtime_port::EncryptedUpdateEntry; + + let uuid = Uuid::parse_str(doc_id).map_err(anyhow::Error::from)?; + + let updates = self.persistence.get_updates_since(&uuid, since_seq).await?; + + Ok(updates + .into_iter() + .map(|u| EncryptedUpdateEntry { + seq: u.seq, + data: u.data, + nonce: u.nonce, + signature: u.signature, + public_key: u.public_key, + }) + .collect()) + } } +/// E2EE persistence worker - only trims Redis streams +/// +/// In E2EE mode, the server is a relay only: +/// - Snapshots are created by clients (shouldSendSnapshot) +/// - Updates are stored encrypted via WebSocket handler +/// - Markdown rendering is done client-side +/// +/// This worker only: +/// 1. Acknowledges tasks from Redis +/// 2. Trims old messages from Redis streams fn spawn_persistence_worker( enabled: bool, bus: Arc, - hydration_service: Arc, - snapshot_service: Arc, trim_lifetime: Option, - auto_archive_interval: Duration, - last_auto_archive: Arc>>, ) -> Option> { if !enabled { return None; @@ -488,158 +811,23 @@ fn spawn_persistence_worker( while let Some(task) = tasks.next().await { match task { - Ok((entry_id, doc_id_str)) => match Uuid::parse_str(&doc_id_str) { - Ok(doc_uuid) => match hydration_service - .hydrate(&doc_uuid, HydrationOptions::default()) - .await - { - Ok(hydrated) => { - let doc_id_owned = doc_uuid.to_string(); - if let Err(e) = snapshot_service - .write_markdown(&doc_uuid, &hydrated.doc) - .await - { - tracing::error!( - document_id = %doc_uuid, - error = ?e, - "redis_worker_markdown_failed" - ); - } - match snapshot_service - .persist_snapshot( - &doc_uuid, - &hydrated.doc, - SnapshotPersistOptions { - clear_updates: true, - skip_if_unchanged: true, - ..Default::default() - }, - ) - .await - { - Ok(result) => { - if !auto_archive_interval.is_zero() { - let should_archive = { - let mut guard = last_auto_archive.lock().await; - let now = Instant::now(); - match guard.get(&doc_id_owned) { - Some(last) - if now.duration_since(*last) - < auto_archive_interval => - { - false - } - _ => { - guard.insert(doc_id_owned.clone(), now); - true - } - } - }; - if should_archive && result.persisted { - let label = format!( - "Snapshot {}", - Utc::now().format("%Y-%m-%d %H:%M:%S UTC") - ); - if let Err(e) = snapshot_service - .archive_snapshot( - &doc_uuid, - &result.snapshot_bytes, - result.version, - SnapshotArchiveOptions { - label: label.as_str(), - notes: None, - kind: SnapshotArchiveKind::Automatic, - created_by: None, - }, - ) - .await - { - tracing::debug!( - document_id = %doc_uuid, - version = result.version, - error = ?e, - "redis_worker_snapshot_archive_failed" - ); - } - } else if should_archive { - tracing::debug!( - document_id = %doc_uuid, - version = result.version, - "redis_worker_snapshot_skipped_no_changes" - ); - } - } - } - Err(err) - if err.downcast_ref::().is_some() => - { - tracing::debug!( - document_id = %doc_uuid, - "redis_worker_snapshot_missing_document" - ); - } - Err(e) => { - tracing::error!( - document_id = %doc_uuid, - error = ?e, - "redis_worker_snapshot_failed" - ); - } - } - if let Err(e) = bus.ack_task(&entry_id).await { - tracing::debug!( - document_id = %doc_uuid, - error = ?e, - "redis_worker_ack_failed" - ); - } - if let Some(lifetime) = trim_lifetime { - let cutoff = SystemTime::now() - .duration_since(UNIX_EPOCH) - .unwrap_or_default() - .as_millis() - as i64 - - lifetime.as_millis() as i64; - if cutoff > 0 { - let min_id = format!("{}-0", cutoff); - if let Err(e) = - bus.trim_updates_minid(&doc_id_owned, &min_id).await - { - tracing::debug!( - document_id = %doc_uuid, - error = ?e, - "redis_worker_trim_updates_failed" - ); - } - if let Err(e) = - bus.trim_awareness_minid(&doc_id_owned, &min_id).await - { - tracing::debug!( - document_id = %doc_uuid, - error = ?e, - "redis_worker_trim_awareness_failed" - ); - } - } - } - } - Err(e) => { - tracing::error!( - document_id = %doc_uuid, - error = ?e, - "redis_worker_hydrate_failed" - ); + Ok((entry_id, doc_id_str)) => { + // E2EE mode: just ack the task and trim Redis streams + let _ = bus.ack_task(&entry_id).await; + + if let Some(lifetime) = trim_lifetime { + let cutoff = SystemTime::now() + .duration_since(UNIX_EPOCH) + .unwrap_or_default() + .as_millis() as i64 + - lifetime.as_millis() as i64; + if cutoff > 0 { + let min_id = format!("{}-0", cutoff); + let _ = bus.trim_updates_minid(&doc_id_str, &min_id).await; + let _ = bus.trim_awareness_minid(&doc_id_str, &min_id).await; } - }, - Err(e) => { - tracing::warn!( - document_id = %doc_id_str, - error = %e, - "redis_worker_invalid_doc_id" - ); - let _ = bus.ack_task(&entry_id).await; } - }, + } Err(e) => { tracing::warn!(error = ?e, "redis_worker_stream_error"); sleep(Duration::from_secs(1)).await; @@ -650,50 +838,3 @@ fn spawn_persistence_worker( tracing::info!("redis_persistence_worker_stopped"); })) } - -impl RedisRealtimeEngine { - async fn send_protocol_start( - sink: SharedRealtimeSink, - awareness: Arc, - writable: bool, - ) -> anyhow::Result<()> { - let mut encoder = EncoderV1::new(); - if writable { - DefaultProtocol - .start::(awareness.as_ref(), &mut encoder) - .map_err(|err| anyhow!(err))?; - } else { - ReadOnlyProtocol - .start::(awareness.as_ref(), &mut encoder) - .map_err(|err| anyhow!(err))?; - } - let frame = encoder.to_vec(); - if frame.is_empty() { - return Ok(()); - } - let mut guard = sink.lock().await; - guard.send(frame).await.map_err(|err| anyhow!(err))?; - Ok(()) - } -} - -#[derive(Debug, Clone, Copy)] -struct ReadOnlyProtocol; - -impl yrs::sync::Protocol for ReadOnlyProtocol { - fn handle_sync_step2( - &self, - _awareness: &yrs::sync::Awareness, - _update: yrs::Update, - ) -> Result, yrs::sync::Error> { - Ok(None) - } - - fn handle_update( - &self, - _awareness: &yrs::sync::Awareness, - _update: yrs::Update, - ) -> Result, yrs::sync::Error> { - Ok(None) - } -} diff --git a/api/crates/infrastructure/src/documents/realtime/utils.rs b/api/crates/infrastructure/src/documents/realtime/utils.rs index bd26e697..58e81dff 100644 --- a/api/crates/infrastructure/src/documents/realtime/utils.rs +++ b/api/crates/infrastructure/src/documents/realtime/utils.rs @@ -14,6 +14,7 @@ use yrs::updates::decoder::DecoderV1; use application::documents::ports::realtime::realtime_port::RealtimeError; use application::documents::ports::realtime::realtime_types::DynRealtimeStream; +#[allow(dead_code)] pub fn analyse_frame(frame: &[u8]) -> Result { let mut decoder = DecoderV1::new(Cursor::new(frame)); let reader = MessageReader::new(&mut decoder); @@ -32,12 +33,14 @@ pub fn analyse_frame(frame: &[u8]) -> Result { Ok(summary) } +#[allow(dead_code)] #[derive(Default, Clone, Copy, Debug)] pub struct FrameSummary { pub has_update: bool, pub has_awareness: bool, } +#[allow(dead_code)] pub fn wrap_stream_with_edit_guard( stream: DynRealtimeStream, doc_id: String, @@ -50,6 +53,7 @@ pub fn wrap_stream_with_edit_guard( }) } +#[allow(dead_code)] struct GuardedStream { inner: DynRealtimeStream, doc_id: String, diff --git a/api/crates/infrastructure/src/git/db/repositories/git_pull_session_repository_sqlx/mod.rs b/api/crates/infrastructure/src/git/db/repositories/git_pull_session_repository_sqlx/mod.rs deleted file mode 100644 index fb26a877..00000000 --- a/api/crates/infrastructure/src/git/db/repositories/git_pull_session_repository_sqlx/mod.rs +++ /dev/null @@ -1,100 +0,0 @@ -use async_trait::async_trait; -use sqlx::types::Json; -use sqlx::{PgPool, Row}; -use uuid::Uuid; - -use application::core::ports::errors::PortResult; -use application::git::dtos::{GitPullConflictItemDto, GitPullResolutionDto, GitPullSessionDto}; -use application::git::ports::git_pull_session_repository::GitPullSessionRepository; -use domain::git::pull_session::GitPullSessionStatus; - -pub struct GitPullSessionRepositorySqlx { - pool: PgPool, -} - -impl GitPullSessionRepositorySqlx { - pub fn new(pool: PgPool) -> Self { - Self { pool } - } -} - -#[async_trait] -impl GitPullSessionRepository for GitPullSessionRepositorySqlx { - async fn upsert(&self, session: GitPullSessionDto) -> PortResult<()> { - let out: anyhow::Result<()> = async { - let GitPullSessionDto { - id, - workspace_id, - status, - conflicts, - resolutions, - message, - base_commit, - remote_commit, - } = session; - sqlx::query( - r#"INSERT INTO git_pull_sessions (id, workspace_id, status, conflicts, resolutions, created_at, updated_at, message, base_commit, remote_commit) - VALUES ($1, $2, $3, $4, $5, now(), now(), $6, $7, $8) - ON CONFLICT (id) DO UPDATE SET - status = EXCLUDED.status, - conflicts = EXCLUDED.conflicts, - resolutions = EXCLUDED.resolutions, - message = EXCLUDED.message, - base_commit = EXCLUDED.base_commit, - remote_commit = EXCLUDED.remote_commit, - updated_at = now()"#, - ) - .bind(id) - .bind(workspace_id) - .bind(status.as_str()) - .bind(Json(conflicts)) - .bind(Json(resolutions)) - .bind(message.clone()) - .bind(base_commit.clone()) - .bind(remote_commit.clone()) - .execute(&self.pool) - .await?; - Ok(()) - } - .await; - out.map_err(Into::into) - } - - async fn get(&self, workspace_id: Uuid, id: Uuid) -> PortResult> { - let out: anyhow::Result> = async { - let row = sqlx::query( - r#"SELECT id, workspace_id, status, conflicts, resolutions, message, base_commit, remote_commit FROM git_pull_sessions - WHERE id = $1 AND workspace_id = $2"#, - ) - .bind(id) - .bind(workspace_id) - .fetch_optional(&self.pool) - .await?; - - let Some(row) = row else { - return Ok(None); - }; - let conflicts: Vec = row - .get::>, _>("conflicts") - .0; - let resolutions: Vec = row - .get::>, _>("resolutions") - .0; - let status_raw: String = row.get::("status"); - let status = GitPullSessionStatus::parse(&status_raw) - .ok_or_else(|| anyhow::anyhow!("invalid_git_pull_session_status"))?; - Ok(Some(GitPullSessionDto { - id, - workspace_id, - status, - conflicts, - resolutions, - message: row.try_get::, _>("message").unwrap_or(None), - base_commit: row.get::>, _>("base_commit"), - remote_commit: row.get::>, _>("remote_commit"), - })) - } - .await; - out.map_err(Into::into) - } -} diff --git a/api/crates/infrastructure/src/git/db/repositories/git_repository_sqlx/mod.rs b/api/crates/infrastructure/src/git/db/repositories/git_repository_sqlx/mod.rs index 22a15cb1..4061d9ff 100644 --- a/api/crates/infrastructure/src/git/db/repositories/git_repository_sqlx/mod.rs +++ b/api/crates/infrastructure/src/git/db/repositories/git_repository_sqlx/mod.rs @@ -8,11 +8,8 @@ use uuid::Uuid; use crate::core::crypto; use crate::core::db::PgPool; use application::core::ports::errors::PortResult; -use application::git::ports::git_repository::{ - GitConfigRecord, GitLastSyncLog, GitRepository, UserGitCfg, -}; +use application::git::ports::git_repository::{GitConfigRecord, GitRepository}; use domain::git::auth::GitAuthType; -use domain::git::sync_log::{GitSyncOperation, GitSyncStatus}; pub struct SqlxGitRepository { pub pool: PgPool, @@ -123,7 +120,7 @@ impl SqlxGitRepository { impl GitRepository for SqlxGitRepository { async fn get_config(&self, workspace_id: Uuid) -> PortResult> { let out: anyhow::Result> = async { - let row = sqlx::query("SELECT id, repository_url, branch_name, auth_type, auto_sync, created_at, updated_at FROM git_configs WHERE workspace_id = $1 LIMIT 1") + let row = sqlx::query("SELECT id, repository_url, branch_name, auth_type, auth_data, auto_sync, created_at, updated_at FROM git_configs WHERE workspace_id = $1 LIMIT 1") .bind(workspace_id) .fetch_optional(&self.pool) .await?; @@ -131,6 +128,7 @@ impl GitRepository for SqlxGitRepository { let auth_type_raw: String = r.get("auth_type"); let auth_type = GitAuthType::parse(&auth_type_raw) .ok_or_else(|| anyhow::anyhow!("invalid_git_auth_type"))?; + let auth_data: Option = r.try_get("auth_data").ok(); Ok(GitConfigRecord { id: r.get("id"), repository_url: r.get("repository_url"), @@ -139,6 +137,7 @@ impl GitRepository for SqlxGitRepository { auto_sync: r.get("auto_sync"), created_at: r.get("created_at"), updated_at: r.get("updated_at"), + auth_data, }) }) .transpose() @@ -193,6 +192,7 @@ impl GitRepository for SqlxGitRepository { auto_sync: row.get("auto_sync"), created_at: row.get("created_at"), updated_at: row.get("updated_at"), + auth_data: Some(enc_auth.clone()), }); } Err(sqlx::Error::Database(db_err)) => { @@ -228,132 +228,6 @@ impl GitRepository for SqlxGitRepository { .await; out.map_err(Into::into) } - - async fn load_user_git_cfg(&self, workspace_id: Uuid) -> PortResult> { - let out: anyhow::Result> = async { - let row = sqlx::query("SELECT repository_url, branch_name, auth_type, auth_data, auto_sync FROM git_configs WHERE workspace_id = $1 LIMIT 1") - .bind(workspace_id) - .fetch_optional(&self.pool) - .await?; - row.map(|r| { - let repository_url: String = r.get("repository_url"); - let branch_name: String = r.get("branch_name"); - let auth_type_raw: Option = r.try_get("auth_type").ok(); - let auth_type = match auth_type_raw.as_deref() { - None => None, - Some(value) => Some( - GitAuthType::parse(value) - .ok_or_else(|| anyhow::anyhow!("invalid_git_auth_type"))?, - ), - }; - let raw_auth: Option = r.try_get("auth_data").ok(); - let auth_data = - raw_auth.map(|v| crypto::decrypt_auth_data(&self.encryption_key, &v)); - let auto_sync: bool = r.try_get("auto_sync").unwrap_or(true); - Ok(UserGitCfg { - repository_url, - branch_name, - auth_type, - auth_data, - auto_sync, - }) - }) - .transpose() - } - .await; - out.map_err(Into::into) - } - - async fn get_last_sync_log(&self, workspace_id: Uuid) -> PortResult> { - let out: anyhow::Result> = async { - let row = sqlx::query("SELECT status, message, commit_hash, created_at FROM git_sync_logs WHERE workspace_id = $1 ORDER BY created_at DESC LIMIT 1") - .bind(workspace_id) - .fetch_optional(&self.pool) - .await?; - row.map(|r| { - let status_raw: Option = r.try_get("status").ok(); - let status = match status_raw.as_deref() { - None => None, - Some(value) => Some( - GitSyncStatus::parse(value) - .ok_or_else(|| anyhow::anyhow!("invalid_git_sync_status"))?, - ), - }; - Ok(GitLastSyncLog { - created_at: r.try_get("created_at").ok(), - status, - message: r.try_get("message").ok(), - commit_hash: r.try_get("commit_hash").ok(), - }) - }) - .transpose() - } - .await; - out.map_err(Into::into) - } - - async fn log_sync_operation( - &self, - workspace_id: Uuid, - operation: GitSyncOperation, - status: GitSyncStatus, - message: Option<&str>, - commit_hash: Option<&str>, - ) -> PortResult<()> { - let out: anyhow::Result<()> = async { - let _ = sqlx::query("INSERT INTO git_sync_logs (workspace_id, operation, status, message, commit_hash) VALUES ($1, $2, $3, $4, $5)") - .bind(workspace_id) - .bind(operation.as_str()) - .bind(status.as_str()) - .bind(message) - .bind(commit_hash) - .execute(&self.pool) - .await?; - Ok(()) - } - .await; - out.map_err(Into::into) - } - - async fn delete_sync_logs(&self, workspace_id: Uuid) -> PortResult<()> { - let out: anyhow::Result<()> = async { - sqlx::query("DELETE FROM git_sync_logs WHERE workspace_id = $1") - .bind(workspace_id) - .execute(&self.pool) - .await?; - Ok(()) - } - .await; - out.map_err(Into::into) - } - - async fn delete_repository_state(&self, workspace_id: Uuid) -> PortResult<()> { - let out: anyhow::Result<()> = async { - sqlx::query("DELETE FROM git_repository_state WHERE workspace_id = $1") - .bind(workspace_id) - .execute(&self.pool) - .await?; - Ok(()) - } - .await; - out.map_err(Into::into) - } - - async fn list_auto_sync_workspaces(&self) -> PortResult> { - let out: anyhow::Result> = async { - let rows = sqlx::query( - "SELECT workspace_id FROM git_configs WHERE auto_sync IS DISTINCT FROM false", - ) - .fetch_all(&self.pool) - .await?; - Ok(rows - .into_iter() - .filter_map(|r| r.try_get("workspace_id").ok()) - .collect()) - } - .await; - out.map_err(Into::into) - } } fn is_missing_workspace_unique_error(err: &dyn DatabaseError) -> bool { diff --git a/api/crates/infrastructure/src/git/db/repositories/mod.rs b/api/crates/infrastructure/src/git/db/repositories/mod.rs index 116e9830..d86ca5b1 100644 --- a/api/crates/infrastructure/src/git/db/repositories/mod.rs +++ b/api/crates/infrastructure/src/git/db/repositories/mod.rs @@ -1,2 +1 @@ -pub mod git_pull_session_repository_sqlx; pub mod git_repository_sqlx; diff --git a/api/crates/infrastructure/src/git/mod.rs b/api/crates/infrastructure/src/git/mod.rs index 9a7f1929..dec10232 100644 --- a/api/crates/infrastructure/src/git/mod.rs +++ b/api/crates/infrastructure/src/git/mod.rs @@ -1,6 +1 @@ pub mod db; -pub mod rebuild_queue; -pub mod storage; -pub mod workspace; - -pub use rebuild_queue::PgGitRebuildJobQueue; diff --git a/api/crates/infrastructure/src/git/rebuild_queue.rs b/api/crates/infrastructure/src/git/rebuild_queue.rs deleted file mode 100644 index 13b5c634..00000000 --- a/api/crates/infrastructure/src/git/rebuild_queue.rs +++ /dev/null @@ -1,163 +0,0 @@ -use async_trait::async_trait; -use sqlx::Row; -use tracing::debug; -use uuid::Uuid; - -use crate::core::db::PgPool; -use application::core::ports::errors::PortResult; -use application::git::ports::git_rebuild_job_queue::{GitRebuildJob, GitRebuildJobQueue}; - -pub struct PgGitRebuildJobQueue { - pool: PgPool, -} - -impl PgGitRebuildJobQueue { - pub fn new(pool: PgPool) -> Self { - Self { pool } - } -} - -#[async_trait] -impl GitRebuildJobQueue for PgGitRebuildJobQueue { - async fn enqueue( - &self, - workspace_id: Uuid, - actor_id: Option, - permission_snapshot: &[String], - ) -> PortResult<()> { - let out: anyhow::Result<()> = async { - sqlx::query( - r#" - INSERT INTO git_rebuild_jobs (workspace_id, actor_id, permission_snapshot, attempts, locked_at, last_error) - VALUES ($1, $2, $3, 0, NULL, NULL) - ON CONFLICT (workspace_id) - DO UPDATE SET attempts = CASE - WHEN git_rebuild_jobs.locked_at IS NULL THEN 0 - ELSE git_rebuild_jobs.attempts - END, - locked_at = CASE - WHEN git_rebuild_jobs.locked_at IS NULL THEN NULL - ELSE git_rebuild_jobs.locked_at - END, - last_error = NULL, - actor_id = EXCLUDED.actor_id, - permission_snapshot = EXCLUDED.permission_snapshot, - pending_retry = CASE - WHEN git_rebuild_jobs.locked_at IS NULL THEN false - ELSE true - END, - updated_at = now() - "#, - ) - .bind(workspace_id) - .bind(actor_id) - .bind(serde_json::json!(permission_snapshot)) - .execute(&self.pool) - .await?; - debug!(workspace_id = %workspace_id, "git_rebuild_job_queued"); - Ok(()) - } - .await; - out.map_err(Into::into) - } - - async fn fetch_next(&self, lock_timeout_secs: i64) -> PortResult> { - let out: anyhow::Result> = async { - let row = sqlx::query( - r#" - WITH next_job AS ( - SELECT id - FROM git_rebuild_jobs - WHERE locked_at IS NULL - OR locked_at < now() - ($1 * interval '1 second') - ORDER BY updated_at - LIMIT 1 - FOR UPDATE SKIP LOCKED - ) - UPDATE git_rebuild_jobs j - SET locked_at = now(), - attempts = attempts + 1, - updated_at = now() - WHERE j.id IN (SELECT id FROM next_job) - RETURNING j.id, j.workspace_id, j.actor_id, j.permission_snapshot, j.attempts - "#, - ) - .bind(lock_timeout_secs.max(1)) - .fetch_optional(&self.pool) - .await?; - - Ok(row.map(|r| GitRebuildJob { - id: r.get("id"), - workspace_id: r.get("workspace_id"), - actor_id: r.try_get("actor_id").ok(), - attempts: r.get("attempts"), - permission_snapshot: parse_permission_snapshot( - r.try_get("permission_snapshot").ok(), - ), - })) - } - .await; - out.map_err(Into::into) - } - - async fn complete(&self, job_id: i64) -> PortResult<()> { - let out: anyhow::Result<()> = async { - let res = sqlx::query( - r#" - UPDATE git_rebuild_jobs - SET locked_at = NULL, - attempts = 0, - pending_retry = false, - last_error = NULL, - updated_at = now() - WHERE id = $1 AND pending_retry = true - "#, - ) - .bind(job_id) - .execute(&self.pool) - .await?; - - if res.rows_affected() == 0 { - sqlx::query("DELETE FROM git_rebuild_jobs WHERE id = $1") - .bind(job_id) - .execute(&self.pool) - .await?; - } - Ok(()) - } - .await; - out.map_err(Into::into) - } - - async fn fail(&self, job_id: i64, error: &str) -> PortResult<()> { - let out: anyhow::Result<()> = async { - sqlx::query( - r#" - UPDATE git_rebuild_jobs - SET last_error = $2, - locked_at = NULL, - pending_retry = false, - updated_at = now() - WHERE id = $1 - "#, - ) - .bind(job_id) - .bind(error) - .execute(&self.pool) - .await?; - Ok(()) - } - .await; - out.map_err(Into::into) - } -} - -fn parse_permission_snapshot(raw: Option) -> Vec { - match raw { - Some(serde_json::Value::Array(items)) => items - .into_iter() - .filter_map(|v| v.as_str().map(|s| s.to_string())) - .collect(), - _ => Vec::new(), - } -} diff --git a/api/crates/infrastructure/src/git/storage.rs b/api/crates/infrastructure/src/git/storage.rs deleted file mode 100644 index fc6e6540..00000000 --- a/api/crates/infrastructure/src/git/storage.rs +++ /dev/null @@ -1,777 +0,0 @@ -use std::path::{Path, PathBuf}; -use std::sync::Arc; - -use anyhow::{Context, anyhow}; -use async_trait::async_trait; -use futures_util::{StreamExt, stream}; -use tokio::fs; -use tokio::io::AsyncReadExt; -use tokio::sync::Mutex; -use uuid::Uuid; - -use application::core::ports::errors::PortResult; -use application::git::ports::git_storage::{ - BlobKey, CommitMeta, GitStorage, PackBlob, PackStream, encode_commit_id, -}; - -#[derive(Clone, Debug)] -pub enum GitStorageDriverConfig { - Filesystem { root: PathBuf }, - S3(S3GitStorageConfig), -} - -#[derive(Clone, Debug)] -pub struct S3GitStorageConfig { - pub storage_root_prefix: String, - pub bucket: String, - pub region: Option, - pub endpoint: Option, - pub access_key: Option, - pub secret_key: Option, - pub use_path_style: bool, -} - -pub async fn build_git_storage(cfg: GitStorageDriverConfig) -> anyhow::Result> { - match cfg { - GitStorageDriverConfig::Filesystem { root } => { - Ok(Arc::new(FilesystemGitStorage::new(root)) as Arc) - } - GitStorageDriverConfig::S3(settings) => { - let storage = S3GitStorage::new(&settings).await?; - Ok(Arc::new(storage) as Arc) - } - } -} - -#[derive(Clone)] -pub struct FilesystemGitStorage { - root: PathBuf, -} - -impl FilesystemGitStorage { - pub fn new(root: impl Into) -> Self { - Self { root: root.into() } - } - - fn user_dir(&self, user_id: Uuid) -> PathBuf { - self.root - .join("git") - .join("packs") - .join(user_id.to_string()) - } - - fn blobs_root(&self) -> PathBuf { - self.root.join("git").join("blobs") - } - - fn meta_path(&self, user_id: Uuid, commit_hex: &str) -> PathBuf { - self.user_dir(user_id).join(format!("{}.json", commit_hex)) - } - - fn pack_path(&self, user_id: Uuid, commit_hex: &str) -> PathBuf { - self.user_dir(user_id).join(format!("{}.pack", commit_hex)) - } - - fn latest_path(&self, user_id: Uuid) -> PathBuf { - self.user_dir(user_id).join("latest.json") - } - - async fn load_meta_or_err( - &self, - user_id: Uuid, - commit_id: &[u8], - ) -> anyhow::Result { - let commit_hex = encode_commit_id(commit_id); - let meta_path = self.meta_path(user_id, &commit_hex); - match self.read_meta(meta_path.as_path()).await? { - Some(meta) => Ok(meta), - None => Err(anyhow!("metadata not found for commit {}", commit_hex)), - } - } - - async fn collect_meta_chain( - &self, - user_id: Uuid, - until: Option<&[u8]>, - ) -> anyhow::Result> { - let mut metas = Vec::new(); - match until { - Some(target) => { - let mut current = self.load_meta_or_err(user_id, target).await?; - metas.push(current.clone()); - while let Some(parent) = current.parent_commit_id.clone() { - current = self.load_meta_or_err(user_id, parent.as_slice()).await?; - metas.push(current.clone()); - } - } - None => { - let Some(mut current) = self.latest_commit(user_id).await? else { - return Ok(metas); - }; - metas.push(current.clone()); - while let Some(parent) = current.parent_commit_id.clone() { - current = self.load_meta_or_err(user_id, parent.as_slice()).await?; - metas.push(current.clone()); - } - } - } - metas.reverse(); - Ok(metas) - } - - async fn read_meta(&self, path: &Path) -> anyhow::Result> { - if !fs::try_exists(path).await.unwrap_or(false) { - return Ok(None); - } - let mut file = fs::File::open(path).await?; - let mut buf = Vec::new(); - file.read_to_end(&mut buf).await?; - let stored: StoredCommitMeta = serde_json::from_slice(&buf)?; - Ok(Some(stored.into_meta()?)) - } - - async fn write_meta(&self, path: &Path, meta: &CommitMeta) -> anyhow::Result<()> { - if let Some(parent) = path.parent() { - fs::create_dir_all(parent).await?; - } - let stored = StoredCommitMeta::from_meta(meta); - let data = serde_json::to_vec_pretty(&stored)?; - fs::write(path, data).await?; - Ok(()) - } -} - -#[async_trait] -impl GitStorage for FilesystemGitStorage { - async fn latest_commit(&self, user_id: Uuid) -> PortResult> { - let out: anyhow::Result> = async { - let path = self.latest_path(user_id); - self.read_meta(path.as_path()).await - } - .await; - out.map_err(Into::into) - } - - async fn store_pack(&self, user_id: Uuid, pack: &[u8], meta: &CommitMeta) -> PortResult<()> { - let out: anyhow::Result<()> = async { - let commit_hex = encode_commit_id(&meta.commit_id); - let pack_path = self.pack_path(user_id, &commit_hex); - if let Some(parent) = pack_path.parent() { - fs::create_dir_all(parent).await?; - } - fs::write(&pack_path, pack).await?; - let meta_path = self.meta_path(user_id, &commit_hex); - self.write_meta(meta_path.as_path(), meta).await?; - Ok(()) - } - .await; - out.map_err(Into::into) - } - - async fn load_pack_chain(&self, user_id: Uuid, until: Option<&[u8]>) -> PortResult { - let out: anyhow::Result = async { - let metas = self.collect_meta_chain(user_id, until).await?; - if metas.is_empty() { - return Ok(Box::pin(stream::empty::>()) as PackStream); - } - let storage = self.clone(); - let storage_for_stream = storage.clone(); - let stream = stream::iter(metas) - .then(move |meta| { - let storage = storage_for_stream.clone(); - async move { - let commit_hex = encode_commit_id(&meta.commit_id); - let pack_path = storage.pack_path(user_id, &commit_hex); - if !fs::try_exists(&pack_path).await.unwrap_or(false) { - anyhow::bail!("pack not found for commit {}", commit_hex); - } - let bytes = fs::read(&pack_path).await?; - Ok(PackBlob { - commit_id: meta.commit_id.clone(), - bytes, - pack_key: meta.pack_key.clone(), - }) - } - }) - .map(|r: anyhow::Result| r.map_err(Into::into)); - Ok(Box::pin(stream) as PackStream) - } - .await; - out.map_err(Into::into) - } - - async fn put_blob(&self, key: &BlobKey, data: &[u8]) -> PortResult<()> { - let out: anyhow::Result<()> = async { - let root = self.blobs_root(); - let path = sanitize_blob_path(root.as_path(), &key.path)?; - if let Some(parent) = path.parent() { - fs::create_dir_all(parent).await?; - } - fs::write(path, data).await?; - Ok(()) - } - .await; - out.map_err(Into::into) - } - - async fn fetch_blob(&self, key: &BlobKey) -> PortResult> { - let out: anyhow::Result> = async { - let root = self.blobs_root(); - let path = sanitize_blob_path(root.as_path(), &key.path)?; - let bytes = fs::read(path).await?; - Ok(bytes) - } - .await; - out.map_err(Into::into) - } - - async fn commit_meta(&self, user_id: Uuid, commit_id: &[u8]) -> PortResult> { - let out: anyhow::Result> = async { - let commit_hex = encode_commit_id(commit_id); - let meta_path = self.meta_path(user_id, &commit_hex); - self.read_meta(meta_path.as_path()).await - } - .await; - out.map_err(Into::into) - } - - async fn restore_commit_meta(&self, user_id: Uuid, meta: &CommitMeta) -> PortResult<()> { - let out: anyhow::Result<()> = async { - let commit_hex = encode_commit_id(&meta.commit_id); - let meta_path = self.meta_path(user_id, &commit_hex); - self.write_meta(meta_path.as_path(), meta).await - } - .await; - out.map_err(Into::into) - } - - async fn fetch_pack_for_commit( - &self, - user_id: Uuid, - commit_id: &[u8], - ) -> PortResult>> { - let out: anyhow::Result>> = async { - let commit_hex = encode_commit_id(commit_id); - let pack_path = self.pack_path(user_id, &commit_hex); - if !fs::try_exists(&pack_path).await.unwrap_or(false) { - return Ok(None); - } - let bytes = fs::read(&pack_path).await?; - Ok(Some(bytes)) - } - .await; - out.map_err(Into::into) - } - - async fn delete_blob(&self, key: &BlobKey) -> PortResult<()> { - let out: anyhow::Result<()> = async { - let root = self.blobs_root(); - let path = sanitize_blob_path(root.as_path(), &key.path)?; - if fs::try_exists(&path).await.unwrap_or(false) { - fs::remove_file(path).await?; - } - Ok(()) - } - .await; - out.map_err(Into::into) - } - - async fn delete_pack(&self, user_id: Uuid, commit_id: &[u8]) -> PortResult<()> { - let out: anyhow::Result<()> = async { - let commit_hex = encode_commit_id(commit_id); - let pack_path = self.pack_path(user_id, &commit_hex); - if fs::try_exists(&pack_path).await.unwrap_or(false) { - fs::remove_file(&pack_path).await?; - } - let meta_path = self.meta_path(user_id, &commit_hex); - if fs::try_exists(&meta_path).await.unwrap_or(false) { - fs::remove_file(&meta_path).await?; - } - Ok(()) - } - .await; - out.map_err(Into::into) - } - - async fn set_latest_commit(&self, user_id: Uuid, meta: Option<&CommitMeta>) -> PortResult<()> { - let out: anyhow::Result<()> = async { - let latest_path = self.latest_path(user_id); - if let Some(meta) = meta { - self.write_meta(latest_path.as_path(), meta).await? - } else if fs::try_exists(&latest_path).await.unwrap_or(false) { - fs::remove_file(&latest_path).await?; - } - Ok(()) - } - .await; - out.map_err(Into::into) - } - - async fn delete_all(&self, user_id: Uuid) -> PortResult<()> { - let out: anyhow::Result<()> = async { - let dir = self.user_dir(user_id); - if fs::try_exists(&dir).await.unwrap_or(false) { - fs::remove_dir_all(&dir).await?; - } - let blobs_root = self.blobs_root().join(user_id.to_string()); - if fs::try_exists(&blobs_root).await.unwrap_or(false) { - fs::remove_dir_all(&blobs_root).await?; - } - let latest_path = self.latest_path(user_id); - if fs::try_exists(&latest_path).await.unwrap_or(false) { - fs::remove_file(&latest_path).await?; - } - Ok(()) - } - .await; - out.map_err(Into::into) - } -} - -#[derive(Debug, serde::Serialize, serde::Deserialize)] -struct StoredCommitMeta { - commit_id: String, - parent_commit_id: Option, - message: Option, - author_name: Option, - author_email: Option, - committed_at: chrono::DateTime, - pack_key: String, - file_hash_index: std::collections::HashMap, -} - -impl StoredCommitMeta { - fn from_meta(meta: &CommitMeta) -> Self { - Self { - commit_id: encode_commit_id(&meta.commit_id), - parent_commit_id: meta - .parent_commit_id - .as_ref() - .map(|id| encode_commit_id(id)), - message: meta.message.clone(), - author_name: meta.author_name.clone(), - author_email: meta.author_email.clone(), - committed_at: meta.committed_at, - pack_key: meta.pack_key.clone(), - file_hash_index: meta.file_hash_index.clone(), - } - } - - fn into_meta(self) -> anyhow::Result { - Ok(CommitMeta { - commit_id: application::git::ports::git_storage::decode_commit_id(&self.commit_id)?, - parent_commit_id: match self.parent_commit_id { - Some(hex) => Some(application::git::ports::git_storage::decode_commit_id( - &hex, - )?), - None => None, - }, - message: self.message, - author_name: self.author_name, - author_email: self.author_email, - committed_at: self.committed_at, - pack_key: self.pack_key, - file_hash_index: self.file_hash_index, - }) - } -} - -fn sanitize_blob_path(root: &Path, key: &str) -> anyhow::Result { - use std::path::Component; - let mut path = root.to_path_buf(); - for component in Path::new(key).components() { - match component { - Component::Normal(part) => { - path.push(part); - } - _ => anyhow::bail!("invalid blob key"), - } - } - if !path.starts_with(root) { - anyhow::bail!("invalid blob path"); - } - Ok(path) -} - -#[derive(Clone)] -pub struct S3GitStorage { - client: aws_sdk_s3::Client, - bucket: String, - root_prefix: String, - // Mutex to serialize latest pointer updates to avoid race when multiple tasks update latest.json concurrently. - latest_lock: Arc>, -} - -impl S3GitStorage { - pub async fn new(cfg: &S3GitStorageConfig) -> anyhow::Result { - let bucket = cfg.bucket.clone(); - let mut loader = aws_config::defaults(aws_config::BehaviorVersion::latest()); - if let Some(region) = &cfg.region { - loader = loader.region(aws_sdk_s3::config::Region::new(region.clone())); - } - let shared = loader.load().await; - let mut builder = aws_sdk_s3::config::Builder::from(&shared); - if let (Some(access), Some(secret)) = (&cfg.access_key, &cfg.secret_key) { - let creds = aws_sdk_s3::config::Credentials::new( - access, - secret, - None, - None, - "git-storage-static", - ); - builder = builder.credentials_provider(creds); - } - if let Some(endpoint) = &cfg.endpoint { - builder = builder.endpoint_url(endpoint.clone()); - } - if cfg.use_path_style { - builder = builder.force_path_style(true); - } - let client = aws_sdk_s3::Client::from_conf(builder.build()); - Ok(Self { - client, - bucket, - root_prefix: cfg.storage_root_prefix.clone(), - latest_lock: Arc::new(Mutex::new(())), - }) - } - - fn key_for_pack(&self, user_id: Uuid, commit_hex: &str) -> String { - format!( - "{}/git/packs/{}/{}.pack", - self.root_prefix, user_id, commit_hex - ) - } - - fn key_for_meta(&self, user_id: Uuid, commit_hex: &str) -> String { - format!( - "{}/git/packs/{}/{}.json", - self.root_prefix, user_id, commit_hex - ) - } - - fn key_for_latest(&self, user_id: Uuid) -> String { - format!("{}/git/packs/{}/latest.json", self.root_prefix, user_id) - } - - fn key_for_blob(&self, key: &str) -> String { - format!("{}/git/blobs/{}", self.root_prefix, key) - } - - async fn get_object(&self, key: &str) -> anyhow::Result>> { - match self - .client - .get_object() - .bucket(&self.bucket) - .key(key) - .send() - .await - { - Ok(resp) => { - let mut data = resp.body.into_async_read(); - let mut buf = Vec::new(); - data.read_to_end(&mut buf).await?; - Ok(Some(buf)) - } - Err(aws_sdk_s3::error::SdkError::ServiceError(service_err)) => { - if service_err.err().is_no_such_key() { - Ok(None) - } else { - Err(anyhow::anyhow!("failed to fetch {key}: {:?}", service_err)) - } - } - Err(err) => Err(anyhow::anyhow!("failed to fetch {key}: {err}")), - } - } - - async fn put_object(&self, key: &str, bytes: &[u8]) -> anyhow::Result<()> { - self.client - .put_object() - .bucket(&self.bucket) - .key(key) - .body(aws_sdk_s3::primitives::ByteStream::from(bytes.to_vec())) - .send() - .await - .with_context(|| format!("failed to upload {key}"))?; - Ok(()) - } - - async fn delete_prefix(&self, prefix: &str) -> anyhow::Result<()> { - let mut continuation: Option = None; - loop { - let mut req = self - .client - .list_objects_v2() - .bucket(&self.bucket) - .prefix(prefix); - if let Some(token) = continuation.as_ref() { - req = req.continuation_token(token.clone()); - } - let resp = req.send().await?; - for obj in resp.contents() { - if let Some(key) = obj.key() { - let _ = self - .client - .delete_object() - .bucket(&self.bucket) - .key(key) - .send() - .await; - } - } - if !resp.is_truncated().unwrap_or(false) { - break; - } - continuation = resp.next_continuation_token().map(|s| s.to_string()); - } - Ok(()) - } - - async fn fetch_meta(&self, key: &str) -> anyhow::Result> { - let bytes = match self.get_object(key).await? { - Some(b) => b, - None => return Ok(None), - }; - let stored: StoredCommitMeta = serde_json::from_slice(&bytes)?; - stored.into_meta().map(Some) - } - - async fn load_meta_or_err( - &self, - user_id: Uuid, - commit_id: &[u8], - ) -> anyhow::Result { - let commit_hex = encode_commit_id(commit_id); - let meta_key = self.key_for_meta(user_id, &commit_hex); - match self.fetch_meta(&meta_key).await? { - Some(meta) => Ok(meta), - None => Err(anyhow!("metadata not found for commit {}", commit_hex)), - } - } - - async fn collect_meta_chain( - &self, - user_id: Uuid, - until: Option<&[u8]>, - ) -> anyhow::Result> { - let mut metas = Vec::new(); - match until { - Some(target) => { - let mut current = self.load_meta_or_err(user_id, target).await?; - metas.push(current.clone()); - while let Some(parent) = current.parent_commit_id.clone() { - current = self.load_meta_or_err(user_id, parent.as_slice()).await?; - metas.push(current.clone()); - } - } - None => { - let Some(mut current) = self.latest_commit(user_id).await? else { - return Ok(metas); - }; - metas.push(current.clone()); - while let Some(parent) = current.parent_commit_id.clone() { - current = self.load_meta_or_err(user_id, parent.as_slice()).await?; - metas.push(current.clone()); - } - } - } - metas.reverse(); - Ok(metas) - } -} - -#[async_trait] -impl GitStorage for S3GitStorage { - async fn latest_commit(&self, user_id: Uuid) -> PortResult> { - let out: anyhow::Result> = async { - let key = self.key_for_latest(user_id); - self.fetch_meta(&key).await - } - .await; - out.map_err(Into::into) - } - - async fn store_pack(&self, user_id: Uuid, pack: &[u8], meta: &CommitMeta) -> PortResult<()> { - let out: anyhow::Result<()> = async { - let commit_hex = encode_commit_id(&meta.commit_id); - let pack_key = self.key_for_pack(user_id, &commit_hex); - self.put_object(&pack_key, pack).await?; - let meta_key = self.key_for_meta(user_id, &commit_hex); - let stored = StoredCommitMeta::from_meta(meta); - let data = serde_json::to_vec_pretty(&stored)?; - self.put_object(&meta_key, &data).await - } - .await; - out.map_err(Into::into) - } - - async fn load_pack_chain(&self, user_id: Uuid, until: Option<&[u8]>) -> PortResult { - let out: anyhow::Result = async { - let metas = self.collect_meta_chain(user_id, until).await?; - if metas.is_empty() { - return Ok(Box::pin(stream::empty::>()) as PackStream); - } - let storage = self.clone(); - let storage_for_stream = storage.clone(); - let stream = stream::iter(metas) - .then(move |meta| { - let storage = storage_for_stream.clone(); - async move { - let commit_hex = encode_commit_id(&meta.commit_id); - let pack_key = storage.key_for_pack(user_id, &commit_hex); - let bytes = match storage.get_object(&pack_key).await? { - Some(b) => b, - None => anyhow::bail!("pack missing for commit {commit_hex}"), - }; - Ok(PackBlob { - commit_id: meta.commit_id.clone(), - bytes, - pack_key: meta.pack_key.clone(), - }) - } - }) - .map(|r: anyhow::Result| r.map_err(Into::into)); - Ok(Box::pin(stream) as PackStream) - } - .await; - out.map_err(Into::into) - } - - async fn put_blob(&self, key: &BlobKey, data: &[u8]) -> PortResult<()> { - let out: anyhow::Result<()> = async { - let key = self.key_for_blob(&key.path); - self.put_object(&key, data).await - } - .await; - out.map_err(Into::into) - } - - async fn fetch_blob(&self, key: &BlobKey) -> PortResult> { - let out: anyhow::Result> = async { - let key = self.key_for_blob(&key.path); - match self.get_object(&key).await? { - Some(bytes) => Ok(bytes), - None => anyhow::bail!("blob not found"), - } - } - .await; - out.map_err(Into::into) - } - - async fn commit_meta(&self, user_id: Uuid, commit_id: &[u8]) -> PortResult> { - let out: anyhow::Result> = async { - let commit_hex = encode_commit_id(commit_id); - let meta_key = self.key_for_meta(user_id, &commit_hex); - self.fetch_meta(&meta_key).await - } - .await; - out.map_err(Into::into) - } - - async fn restore_commit_meta(&self, user_id: Uuid, meta: &CommitMeta) -> PortResult<()> { - let out: anyhow::Result<()> = async { - let commit_hex = encode_commit_id(&meta.commit_id); - let meta_key = self.key_for_meta(user_id, &commit_hex); - let stored = StoredCommitMeta::from_meta(meta); - let data = serde_json::to_vec_pretty(&stored)?; - self.put_object(&meta_key, &data).await - } - .await; - out.map_err(Into::into) - } - - async fn fetch_pack_for_commit( - &self, - user_id: Uuid, - commit_id: &[u8], - ) -> PortResult>> { - let out: anyhow::Result>> = async { - let commit_hex = encode_commit_id(commit_id); - let pack_key = self.key_for_pack(user_id, &commit_hex); - self.get_object(&pack_key).await - } - .await; - out.map_err(Into::into) - } - - async fn delete_blob(&self, key: &BlobKey) -> PortResult<()> { - let out: anyhow::Result<()> = async { - let key = self.key_for_blob(&key.path); - let _ = self - .client - .delete_object() - .bucket(&self.bucket) - .key(&key) - .send() - .await; - Ok(()) - } - .await; - out.map_err(Into::into) - } - - async fn delete_pack(&self, user_id: Uuid, commit_id: &[u8]) -> PortResult<()> { - let out: anyhow::Result<()> = async { - let commit_hex = encode_commit_id(commit_id); - let pack_key = self.key_for_pack(user_id, &commit_hex); - let meta_key = self.key_for_meta(user_id, &commit_hex); - let _ = self - .client - .delete_object() - .bucket(&self.bucket) - .key(&pack_key) - .send() - .await; - let _ = self - .client - .delete_object() - .bucket(&self.bucket) - .key(&meta_key) - .send() - .await; - Ok(()) - } - .await; - out.map_err(Into::into) - } - - async fn set_latest_commit(&self, user_id: Uuid, meta: Option<&CommitMeta>) -> PortResult<()> { - let out: anyhow::Result<()> = async { - let latest_key = self.key_for_latest(user_id); - match meta { - Some(meta) => { - let stored = StoredCommitMeta::from_meta(meta); - let data = serde_json::to_vec_pretty(&stored)?; - let _guard = self.latest_lock.lock().await; - self.put_object(&latest_key, &data).await - } - None => { - let _guard = self.latest_lock.lock().await; - let _ = self - .client - .delete_object() - .bucket(&self.bucket) - .key(&latest_key) - .send() - .await; - Ok(()) - } - } - } - .await; - out.map_err(Into::into) - } - - async fn delete_all(&self, user_id: Uuid) -> PortResult<()> { - let out: anyhow::Result<()> = async { - let pack_prefix = format!("{}/git/packs/{}/", self.root_prefix, user_id); - self.delete_prefix(&pack_prefix).await?; - let blob_prefix = format!("{}/git/blobs/{}/", self.root_prefix, user_id); - self.delete_prefix(&blob_prefix).await?; - self.set_latest_commit(user_id, None).await?; - Ok(()) - } - .await; - out.map_err(Into::into) - } -} diff --git a/api/crates/infrastructure/src/git/workspace/helpers.rs b/api/crates/infrastructure/src/git/workspace/helpers.rs deleted file mode 100644 index c798eb80..00000000 --- a/api/crates/infrastructure/src/git/workspace/helpers.rs +++ /dev/null @@ -1,17 +0,0 @@ -mod conflicts; -mod db; -mod errors; -mod front_matter; -mod pack; -mod remote; -mod snapshots; -mod tree; - -pub(super) use conflicts::*; -pub(super) use db::*; -pub(super) use errors::*; -pub(super) use front_matter::*; -pub(super) use pack::*; -pub(super) use remote::*; -pub(super) use snapshots::*; -pub(super) use tree::*; diff --git a/api/crates/infrastructure/src/git/workspace/helpers/conflicts.rs b/api/crates/infrastructure/src/git/workspace/helpers/conflicts.rs deleted file mode 100644 index 41438e63..00000000 --- a/api/crates/infrastructure/src/git/workspace/helpers/conflicts.rs +++ /dev/null @@ -1,103 +0,0 @@ -use super::super::*; - -pub(in super::super) fn collect_conflicts( - repo: &Repository, - index: &git2::Index, -) -> anyhow::Result> { - let mut out = Vec::new(); - let conflicts = index.conflicts()?; - for conflict in conflicts { - let conflict = conflict?; - let path = conflict - .our - .as_ref() - .or(conflict.their.as_ref()) - .or(conflict.ancestor.as_ref()) - .and_then(|e| std::str::from_utf8(&e.path).ok()) - .unwrap_or("") - .to_string(); - - let to_bytes = |entry: Option<&git2::IndexEntry>| -> anyhow::Result>> { - if let Some(e) = entry { - let blob = repo.find_blob(e.id)?; - Ok(Some(blob.content().to_vec())) - } else { - Ok(None) - } - }; - - let ours_bytes = to_bytes(conflict.our.as_ref())?; - let theirs_bytes = to_bytes(conflict.their.as_ref())?; - let base_bytes = to_bytes(conflict.ancestor.as_ref())?; - - let (mut ours, ours_bin) = as_text_or_binary(path.as_str(), ours_bytes.as_ref()); - let (mut theirs, theirs_bin) = as_text_or_binary(path.as_str(), theirs_bytes.as_ref()); - let (mut base, base_bin) = as_text_or_binary(path.as_str(), base_bytes.as_ref()); - let is_binary = ours_bin || theirs_bin || base_bin; - if !is_binary { - ours = super::strip_front_matter_body(path.as_str(), ours); - theirs = super::strip_front_matter_body(path.as_str(), theirs); - base = super::strip_front_matter_body(path.as_str(), base); - } - - out.push(GitPullConflictItemDto { - path, - is_binary, - ours, - theirs, - base, - document_id: None, - }); - } - Ok(out) -} - -pub(in super::super) fn index_entry_path(entry: &git2::IndexEntry) -> anyhow::Result { - let raw = &entry.path; - if raw.is_empty() { - anyhow::bail!("empty index entry path"); - } - if let Ok(cstr) = std::ffi::CStr::from_bytes_with_nul(raw) { - Ok(cstr - .to_str() - .unwrap_or_default() - .trim_end_matches('\0') - .to_string()) - } else { - Ok(String::from_utf8_lossy(raw) - .trim_end_matches('\0') - .to_string()) - } -} - -pub(in super::super) fn index_entry_stage(entry: &git2::IndexEntry) -> i32 { - ((entry.flags as u32 >> 12) & 0b11) as i32 -} - -pub(in super::super) fn as_text_or_binary( - path: &str, - data: Option<&Vec>, -) -> (Option, bool) { - let Some(bytes) = data else { - return (None, false); - }; - match std::str::from_utf8(bytes) { - Ok(s) => (Some(s.to_string()), false), - Err(_) => { - let lower = path.to_ascii_lowercase(); - let looks_text = lower.ends_with(".md") - || lower.ends_with(".markdown") - || lower.ends_with(".txt") - || lower.ends_with(".json") - || lower.ends_with(".yaml") - || lower.ends_with(".yml") - || lower.ends_with(".toml") - || lower.ends_with(".ini"); - if looks_text { - let lossy = String::from_utf8_lossy(bytes).to_string(); - return (Some(lossy), false); - } - (None, true) - } - } -} diff --git a/api/crates/infrastructure/src/git/workspace/helpers/db.rs b/api/crates/infrastructure/src/git/workspace/helpers/db.rs deleted file mode 100644 index a78057de..00000000 --- a/api/crates/infrastructure/src/git/workspace/helpers/db.rs +++ /dev/null @@ -1,25 +0,0 @@ -use super::super::*; - -pub(in super::super) fn row_to_commit_meta( - row: sqlx::postgres::PgRow, -) -> anyhow::Result { - let commit_id: Vec = row.get("commit_id"); - let parent_commit_id: Option> = row.try_get("parent_commit_id").ok(); - let message: Option = row.try_get("message").ok(); - let author_name: Option = row.try_get("author_name").ok(); - let author_email: Option = row.try_get("author_email").ok(); - let committed_at: DateTime = row.get("committed_at"); - let pack_key: String = row.get("pack_key"); - let file_hash_index: Json> = row.get("file_hash_index"); - - Ok(CommitMeta { - commit_id, - parent_commit_id, - message, - author_name, - author_email, - committed_at, - pack_key, - file_hash_index: file_hash_index.0, - }) -} diff --git a/api/crates/infrastructure/src/git/workspace/helpers/errors.rs b/api/crates/infrastructure/src/git/workspace/helpers/errors.rs deleted file mode 100644 index 97222e47..00000000 --- a/api/crates/infrastructure/src/git/workspace/helpers/errors.rs +++ /dev/null @@ -1,18 +0,0 @@ -pub(in super::super) fn missing_metadata_commit(err: &anyhow::Error) -> Option { - let needle = "metadata not found for commit "; - for cause in err.chain() { - let msg = cause.to_string(); - if let Some(idx) = msg.find(needle) { - let start = idx + needle.len(); - let rest = &msg[start..]; - let commit: String = rest - .chars() - .take_while(|ch| ch.is_ascii_hexdigit()) - .collect(); - if !commit.is_empty() { - return Some(commit); - } - } - } - None -} diff --git a/api/crates/infrastructure/src/git/workspace/helpers/front_matter.rs b/api/crates/infrastructure/src/git/workspace/helpers/front_matter.rs deleted file mode 100644 index dd6fa94e..00000000 --- a/api/crates/infrastructure/src/git/workspace/helpers/front_matter.rs +++ /dev/null @@ -1,63 +0,0 @@ -pub(in super::super) fn find_front_matter_end(s: &str) -> Option<(usize, usize)> { - let bytes = s.as_bytes(); - let mut idx = 0; - while idx < bytes.len() { - if bytes[idx] == b'\n' { - let after_newline = &s[idx + 1..]; - if after_newline.starts_with("---") { - let mut body_start = idx + 1 + 3; - let mut remainder = &s[body_start..]; - // Skip trailing newlines after the closing delimiter to mirror ingest. - while remainder.starts_with("\r\n") || remainder.starts_with('\n') { - if remainder.starts_with("\r\n") { - body_start += 2; - remainder = &s[body_start..]; - } else { - body_start += 1; - remainder = &s[body_start..]; - } - } - return Some((idx, body_start)); - } - } - idx += 1; - } - None -} - -pub(in super::super) fn split_front_matter(input: &str) -> Option<(&str, &str)> { - let after_open = input - .strip_prefix("---\r\n") - .or_else(|| input.strip_prefix("---\n"))?; - if let Some((front_len, body_start)) = find_front_matter_end(after_open) { - let front = &after_open[..front_len]; - let body = &after_open[body_start..]; - return Some((front, body)); - } - None -} - -pub(in super::super) fn strip_front_matter_body( - path: &str, - text: Option, -) -> Option { - let txt = text?; - let lower = path.to_ascii_lowercase(); - let is_markdown = lower.ends_with(".md") || lower.ends_with(".markdown"); - if !is_markdown { - return Some(txt); - } - if let Some((_, body)) = split_front_matter(txt.as_str()) { - return Some(body.to_string()); - } - Some(txt) -} - -pub(in super::super) fn extract_markdown_body(bytes: &[u8]) -> Option { - let text = std::str::from_utf8(bytes).ok()?; - let trimmed = text.trim_start_matches('\u{feff}'); - if let Some((_, body)) = split_front_matter(trimmed) { - return Some(body.to_string()); - } - Some(trimmed.to_string()) -} diff --git a/api/crates/infrastructure/src/git/workspace/helpers/pack.rs b/api/crates/infrastructure/src/git/workspace/helpers/pack.rs deleted file mode 100644 index 1f54ee5d..00000000 --- a/api/crates/infrastructure/src/git/workspace/helpers/pack.rs +++ /dev/null @@ -1,44 +0,0 @@ -use super::super::*; - -pub(in super::super) fn apply_pack_to_repo(repo: &Repository, pack: &[u8]) -> anyhow::Result<()> { - let objects_dir = repo.path().join("objects").join("pack"); - fs::create_dir_all(&objects_dir)?; - let odb = repo.odb()?; - let mut indexer = Indexer::new(Some(&odb), objects_dir.as_path(), 0o644, true)?; - indexer.write_all(pack)?; - indexer.commit()?; - Ok(()) -} - -pub(in super::super) fn read_first_pack(repo_path: &Path) -> anyhow::Result>> { - let pack_dir = repo_path.join("objects").join("pack"); - if !pack_dir.exists() { - return Ok(None); - } - let mut entries: Vec<_> = std::fs::read_dir(&pack_dir)? - .filter_map(|e| e.ok()) - .filter(|e| { - e.path() - .extension() - .map(|ext| ext == "pack") - .unwrap_or(false) - }) - .collect(); - entries.sort_by_key(|e| e.file_name()); - if let Some(entry) = entries.first() { - let bytes = std::fs::read(entry.path())?; - return Ok(Some(bytes)); - } - Ok(None) -} - -pub(in super::super) fn apply_pack_files( - repo: &Repository, - pack_paths: &[PathBuf], -) -> anyhow::Result<()> { - for path in pack_paths { - let bytes = fs::read(path)?; - apply_pack_to_repo(repo, &bytes)?; - } - Ok(()) -} diff --git a/api/crates/infrastructure/src/git/workspace/helpers/remote.rs b/api/crates/infrastructure/src/git/workspace/helpers/remote.rs deleted file mode 100644 index cdf1114b..00000000 --- a/api/crates/infrastructure/src/git/workspace/helpers/remote.rs +++ /dev/null @@ -1,165 +0,0 @@ -use super::super::*; - -pub(in super::super) fn extract_host(url: &str) -> Option { - let s = url.trim(); - let s = s - .strip_prefix("https://") - .or_else(|| s.strip_prefix("http://")) - .unwrap_or(s); - let mut parts = s.split('/'); - let host_port = parts.next().unwrap_or(""); - let host = host_port.split(':').next().unwrap_or(""); - if host.is_empty() { - None - } else { - Some(host.to_string()) - } -} - -pub(in super::super) fn default_token_username_for(host: Option<&str>) -> &'static str { - match host { - Some(h) if h.contains("github") => "x-access-token", - Some(h) if h.contains("gitlab") => "oauth2", - Some(h) if h.contains("dev.azure.com") || h.contains("visualstudio.com") => "pat", - _ => "git", - } -} - -pub(in super::super) fn build_remote_callbacks(cfg: &UserGitCfg) -> RemoteCallbacks<'static> { - let auth_type = cfg.auth_type; - let auth_data = cfg.auth_data.clone(); - let host_hint = extract_host(&cfg.repository_url); - let mut callbacks = RemoteCallbacks::new(); - callbacks.credentials( - move |_url, username_from_url, _allowed| match auth_type { - Some(domain::git::auth::GitAuthType::Token) => { - if let Some(token) = auth_data - .as_ref() - .and_then(|v| v.get("token")) - .and_then(|v| v.as_str()) - { - let user = username_from_url - .unwrap_or(default_token_username_for(host_hint.as_deref())); - Cred::userpass_plaintext(user, token) - } else { - Cred::default() - } - } - Some(domain::git::auth::GitAuthType::Ssh) => { - if let Some(key) = auth_data - .as_ref() - .and_then(|v| v.get("private_key")) - .and_then(|v| v.as_str()) - { - let user = username_from_url.unwrap_or("git"); - let passphrase = auth_data - .as_ref() - .and_then(|v| v.get("passphrase")) - .and_then(|v| v.as_str()) - .filter(|s| !s.is_empty()); - let trimmed = key.trim(); - if trimmed.starts_with("v1:") { - return Err(GitError::from_str( - "failed to decrypt stored SSH key; check ENCRYPTION_KEY and re-save credentials", - )); - } - if trimmed.contains("BEGIN OPENSSH PRIVATE KEY") { - return Err(GitError::from_str( - "OpenSSH private key format is not supported; provide PEM (BEGIN RSA/EC PRIVATE KEY)", - )); - } - let needs_passphrase = trimmed.contains("ENCRYPTED"); - if needs_passphrase && passphrase.is_none() { - return Err(GitError::from_str( - "SSH private key is encrypted; passphrase is required", - )); - } - Cred::ssh_key_from_memory(user, None, trimmed, passphrase) - } else { - Cred::default() - } - } - None => Cred::default(), - }, - ); - callbacks.certificate_check(|_, _| Ok(CertificateCheckStatus::CertificateOk)); - callbacks -} - -pub(in super::super) fn prepare_remote<'repo>( - repo: &'repo Repository, - cfg: &UserGitCfg, -) -> anyhow::Result> { - let mut remote = match repo.find_remote("origin") { - Ok(remote) => remote, - Err(_) => repo.remote("origin", &cfg.repository_url)?, - }; - if remote.url() != Some(cfg.repository_url.as_str()) { - repo.remote_set_url("origin", &cfg.repository_url)?; - remote = repo.find_remote("origin")?; - } - Ok(remote) -} - -pub(in super::super) fn fetch_remote_head( - repo: &Repository, - cfg: &UserGitCfg, - branch: &str, -) -> anyhow::Result> { - let mut remote = prepare_remote(repo, cfg)?; - let callbacks = build_remote_callbacks(cfg); - let mut fetch_options = FetchOptions::new(); - fetch_options.remote_callbacks(callbacks); - let refspec = format!("refs/heads/{branch}:refs/remotes/origin/{branch}"); - remote - .fetch(&[&refspec], Some(&mut fetch_options), None) - .map_err(map_git_http_error)?; - let reference_name = format!("refs/remotes/origin/{branch}"); - match repo.find_reference(&reference_name) { - Ok(reference) => Ok(reference.target()), - Err(err) if err.code() == git2::ErrorCode::NotFound => Ok(None), - Err(err) => Err(err.into()), - } -} - -pub(in super::super) fn perform_push( - repo: &Repository, - cfg: &UserGitCfg, - branch: &str, - commit_oid: git2::Oid, - force: bool, -) -> anyhow::Result { - let ref_name = format!("refs/heads/{}", branch); - repo.reference(&ref_name, commit_oid, true, "update branch for sync")?; - - let mut remote = prepare_remote(repo, cfg)?; - let callbacks = build_remote_callbacks(cfg); - let mut push_options = PushOptions::new(); - push_options.remote_callbacks(callbacks); - let refspec = if force { - format!("+refs/heads/{0}:refs/heads/{0}", branch) - } else { - format!("refs/heads/{0}:refs/heads/{0}", branch) - }; - remote - .push(&[&refspec], Some(&mut push_options)) - .map_err(map_git_http_error)?; - Ok(true) -} - -pub(in super::super) fn map_git_http_error(err: git2::Error) -> anyhow::Error { - if err.class() == ErrorClass::Http { - let msg = err.to_string().to_lowercase(); - if msg.contains("status code: 401") - || msg.contains("status code: 407") - || msg.contains("redirect") - { - // Avoid leaking raw libgit2 error strings to the user; normalize to a short tag. - return anyhow!("git_http_auth_redirect"); - } - if msg.contains("status code: 403") || msg.contains("status code: 404") { - return anyhow!("git_http_not_found"); - } - } - err.into() -} diff --git a/api/crates/infrastructure/src/git/workspace/helpers/snapshots.rs b/api/crates/infrastructure/src/git/workspace/helpers/snapshots.rs deleted file mode 100644 index ef731816..00000000 --- a/api/crates/infrastructure/src/git/workspace/helpers/snapshots.rs +++ /dev/null @@ -1,172 +0,0 @@ -use super::super::*; - -use super::tree::{DirEntry, DirNode, write_dir}; - -#[allow(dead_code)] -pub(in super::super) fn read_commit_files( - repo: &Repository, - commit_id: &[u8], -) -> anyhow::Result>> { - let oid = git2::Oid::from_bytes(commit_id)?; - let commit = repo.find_commit(oid)?; - let tree = commit.tree()?; - let mut files = HashMap::new(); - tree.walk(TreeWalkMode::PreOrder, |root, entry| { - if entry.kind() == Some(ObjectType::Blob) { - if let Some(name) = entry.name() { - if let Ok(blob) = repo.find_blob(entry.id()) { - let key = format!("{}{}", root, name); - files.insert(key, blob.content().to_vec()); - } - } - } - TreeWalkResult::Ok - })?; - Ok(files) -} - -pub(in super::super) enum FileSnapshotData { - Inline(Vec), - StoragePath(String), -} - -pub(in super::super) struct FileSnapshot { - pub(in super::super) hash: String, - pub(in super::super) data: FileSnapshotData, - pub(in super::super) is_text: bool, -} - -pub(in super::super) struct FileDeltaSummary { - pub(in super::super) added: Vec, - pub(in super::super) modified: Vec, - pub(in super::super) deleted: Vec, -} - -pub(in super::super) struct DirtyRow { - pub(in super::super) path: String, - pub(in super::super) is_text: bool, - pub(in super::super) op: String, - pub(in super::super) content_hash: Option, -} - -pub(in super::super) struct DirtyUpsert { - pub(in super::super) is_text: bool, - pub(in super::super) content_hash: Option, -} - -pub(in super::super) fn repo_relative_path(path: &str) -> anyhow::Result { - let trimmed = path.trim_start_matches('/'); - let mut parts = trimmed.splitn(2, '/'); - let leading = parts.next().unwrap_or(""); - if let Some(rest) = parts.next() { - Ok(rest.replace('\\', "/")) - } else if !leading.is_empty() { - Ok(leading.replace('\\', "/")) - } else { - Err(anyhow!("invalid storage path for repository: {path}")) - } -} - -pub(in super::super) fn normalize_repo_path(path: String) -> String { - let trimmed = path.trim_start_matches('/'); - if trimmed.is_empty() { - String::new() - } else { - trimmed - .replace('\\', "/") - .trim_start_matches("./") - .trim_start_matches('/') - .to_string() - } -} - -pub(in super::super) fn blob_key(workspace_id: Uuid, commit_id: &[u8], path: &str) -> BlobKey { - let encoded_path = urlencoding::encode(path); - let commit_hex = encode_commit_id(commit_id); - BlobKey { - path: format!("{}/{}/{}", workspace_id, commit_hex, encoded_path), - } -} - -pub(in super::super) enum FileSource { - Bytes(Vec), - Oid(git2::Oid), -} - -pub(in super::super) fn insert_source_into_dir( - dir: &mut DirNode, - parts: &[&str], - source: &FileSource, -) -> anyhow::Result<()> { - use std::collections::btree_map::Entry; - if parts.is_empty() { - return Ok(()); - } - if parts.len() == 1 { - match source { - FileSource::Bytes(data) => { - dir.entries - .insert(parts[0].to_string(), DirEntry::File(data.clone())); - } - FileSource::Oid(oid) => { - dir.entries - .insert(parts[0].to_string(), DirEntry::Oid(*oid)); - } - } - Ok(()) - } else { - match dir.entries.entry(parts[0].to_string()) { - Entry::Occupied(mut occ) => match occ.get_mut() { - DirEntry::Dir(child) => insert_source_into_dir(child, &parts[1..], source), - DirEntry::File(_) | DirEntry::Oid(_) => { - let mut new_dir = DirNode::default(); - insert_source_into_dir(&mut new_dir, &parts[1..], source)?; - *occ.get_mut() = DirEntry::Dir(Box::new(new_dir)); - Ok(()) - } - }, - Entry::Vacant(vac) => { - let mut new_dir = DirNode::default(); - insert_source_into_dir(&mut new_dir, &parts[1..], source)?; - vac.insert(DirEntry::Dir(Box::new(new_dir))); - Ok(()) - } - } - } -} - -pub(in super::super) fn read_commit_blob_oids( - repo: &Repository, - commit_id: &[u8], -) -> anyhow::Result> { - let oid = git2::Oid::from_bytes(commit_id)?; - let commit = repo.find_commit(oid)?; - let tree = commit.tree()?; - let mut blobs = HashMap::new(); - tree.walk(TreeWalkMode::PreOrder, |root, entry| { - if entry.kind() == Some(ObjectType::Blob) { - if let Some(name) = entry.name() { - let key = format!("{}{}", root, name); - blobs.insert(key, entry.id()); - } - } - TreeWalkResult::Ok - })?; - Ok(blobs) -} - -pub(in super::super) fn build_tree_from_sources( - repo: &Repository, - entries: &BTreeMap, -) -> anyhow::Result { - // We'll reconstruct a DirNode and then write it, but we need to preserve existing blob OIDs for FileSource::Oid. - let mut root = DirNode::default(); - for (path, src) in entries.iter() { - let parts: Vec<&str> = path.split('/').filter(|s| !s.is_empty()).collect(); - if parts.is_empty() { - continue; - } - insert_source_into_dir(&mut root, &parts, src)?; - } - write_dir(repo, &root) -} diff --git a/api/crates/infrastructure/src/git/workspace/helpers/tree.rs b/api/crates/infrastructure/src/git/workspace/helpers/tree.rs deleted file mode 100644 index e2f8d48e..00000000 --- a/api/crates/infrastructure/src/git/workspace/helpers/tree.rs +++ /dev/null @@ -1,98 +0,0 @@ -use super::super::*; - -pub(in super::super) fn build_tree_from_entries( - repo: &Repository, - entries: &BTreeMap>, -) -> anyhow::Result { - let mut root = DirNode::default(); - for (path, data) in entries.iter() { - let parts: Vec<&str> = path.split('/').filter(|s| !s.is_empty()).collect(); - if parts.is_empty() { - continue; - } - insert_into_dir(&mut root, &parts, data.clone()); - } - write_dir(repo, &root) -} - -pub(in super::super) fn signature_from_parts( - name: &str, - email: &str, - at: DateTime, -) -> anyhow::Result> { - let git_time = Time::new(at.timestamp(), 0); - Signature::new(name, email, &git_time).map_err(anyhow::Error::from) -} - -pub(in super::super) fn git_time_to_datetime(time: Time) -> anyhow::Result> { - DateTime::::from_timestamp(time.seconds(), 0) - .ok_or_else(|| anyhow!("invalid git timestamp")) -} - -#[derive(Default)] -pub(in super::super) struct DirNode { - pub(in super::super) entries: BTreeMap, -} - -pub(in super::super) enum DirEntry { - File(Vec), - Oid(git2::Oid), - Dir(Box), -} - -pub(in super::super) fn insert_into_dir(dir: &mut DirNode, parts: &[&str], data: Vec) { - use std::collections::btree_map::Entry; - - if parts.is_empty() { - return; - } - - if parts.len() == 1 { - dir.entries - .insert(parts[0].to_string(), DirEntry::File(data)); - return; - } - - match dir.entries.entry(parts[0].to_string()) { - Entry::Occupied(mut occ) => { - let next = occ.get_mut(); - match next { - DirEntry::Dir(child) => insert_into_dir(child, &parts[1..], data), - DirEntry::File(_) | DirEntry::Oid(_) => { - let mut new_dir = DirNode::default(); - insert_into_dir(&mut new_dir, &parts[1..], data); - *next = DirEntry::Dir(Box::new(new_dir)); - } - } - } - Entry::Vacant(vac) => { - if parts.len() == 1 { - vac.insert(DirEntry::File(data)); - } else { - let mut new_dir = DirNode::default(); - insert_into_dir(&mut new_dir, &parts[1..], data); - vac.insert(DirEntry::Dir(Box::new(new_dir))); - } - } - } -} - -pub(in super::super) fn write_dir(repo: &Repository, dir: &DirNode) -> anyhow::Result { - let mut builder = repo.treebuilder(None)?; - for (name, entry) in dir.entries.iter() { - match entry { - DirEntry::File(content) => { - let oid = repo.blob(content)?; - builder.insert(name, oid, FileMode::Blob.into())?; - } - DirEntry::Oid(oid) => { - builder.insert(name, *oid, FileMode::Blob.into())?; - } - DirEntry::Dir(child) => { - let oid = write_dir(repo, child)?; - builder.insert(name, oid, FileMode::Tree.into())?; - } - } - } - Ok(builder.write()?) -} diff --git a/api/crates/infrastructure/src/git/workspace/import.rs b/api/crates/infrastructure/src/git/workspace/import.rs deleted file mode 100644 index b7312e62..00000000 --- a/api/crates/infrastructure/src/git/workspace/import.rs +++ /dev/null @@ -1,68 +0,0 @@ -impl GitWorkspaceService { - async fn import_repository_inner( - &self, - workspace_id: Uuid, - actor_id: Uuid, - cfg: &UserGitCfg, - ) -> anyhow::Result { - // Suppress dirty tracking globally during import so filesystem watcher/ingest won't re-mark files. - let _global_dirty_guard = crate::core::storage::suppress_git_dirty_global(); - let branch = if cfg.branch_name.is_empty() { - "main".to_string() - } else { - cfg.branch_name.clone() - }; - self.ensure_repository(workspace_id, &branch).await?; - - let previous_index = self - .latest_commit_meta(workspace_id) - .await? - .map(|m| m.file_hash_index) - .unwrap_or_default(); - - // Populate storage and DB with remote history; surface errors so we don't proceed with missing packs. - self.bootstrap_remote_history(workspace_id, cfg, branch.as_str()) - .await?; - let latest = self.ensure_latest_meta(workspace_id).await?; - let Some(latest_meta) = latest else { - return Ok(GitImportOutcome { - files_changed: 0, - commit_hash: None, - docs_created: 0, - attachments_created: 0, - message: "remote has no commits".to_string(), - }); - }; - - let state = self - .state_from_commit_meta(workspace_id, &latest_meta) - .await?; - let files_changed = crate::core::storage::suppress_git_dirty(async { - self.apply_state_to_workspace(workspace_id, &state, &previous_index) - .await - }) - .await?; - - // Materialize documents and attachments from imported state; surface failures so Import can fail loudly. - let (docs_created, attachments_created) = - crate::core::storage::suppress_git_dirty(async { - self.materialize_documents_from_state(workspace_id, actor_id, &state) - .await - }) - .await?; - - self.apply_merged_to_documents(workspace_id, &state).await?; - self.clear_dirty(workspace_id).await.map_err(|err| { - error!(workspace_id = %workspace_id, error = %err, "git_import_clear_dirty_failed"); - err - })?; - - Ok(GitImportOutcome { - files_changed, - docs_created, - attachments_created, - commit_hash: Some(encode_commit_id(&latest_meta.commit_id)), - message: "import completed".to_string(), - }) - } -} diff --git a/api/crates/infrastructure/src/git/workspace/mod.rs b/api/crates/infrastructure/src/git/workspace/mod.rs deleted file mode 100644 index ad31ce59..00000000 --- a/api/crates/infrastructure/src/git/workspace/mod.rs +++ /dev/null @@ -1,123 +0,0 @@ -use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet}; -use std::fs; -use std::io::{self, ErrorKind, Write}; -use std::path::{Path, PathBuf}; -use std::sync::Arc; - -use anyhow::{Context, anyhow}; -use async_trait::async_trait; -use chrono::{DateTime, Utc}; -use futures_util::StreamExt; -use git2::{ - CertificateCheckStatus, Commit, Cred, Error as GitError, ErrorClass, FetchOptions, FileMode, - Indexer, ObjectType, PushOptions, RemoteCallbacks, Repository, Signature, Sort, Time, - TreeWalkMode, TreeWalkResult, -}; -use sqlx::{Row, types::Json}; -use tempfile::{Builder as TempDirBuilder, TempDir}; -use tracing::{error, info, warn}; -use uuid::Uuid; - -use crate::core::db::PgPool; -use application::core::dtos::TextDiffResult; -use application::core::ports::errors::PortResult; -use application::core::ports::storage::storage_port::StorageResolverPort; -use application::core::services::diff::text_diff::compute_text_diff; -use application::core::services::utils::hash::sha256_hex; -use application::documents::ports::document_path_repository::DocumentPathRepository; -use application::documents::ports::document_repository::DocumentRepository; -use application::documents::ports::realtime::realtime_port::RealtimeEngine; -use application::documents::services::realtime::snapshot::{ - SnapshotService, snapshot_from_markdown, -}; -use application::git::dtos::{ - GitChangeItem, GitCommitInfo, GitImportOutcome, GitPullConflictItemDto, GitPullRequestDto, - GitPullResultDto, GitRemoteCheckDto, GitSyncOutcome, GitSyncRequestDto, GitWorkspaceStatus, -}; -use application::git::ports::git_repository::UserGitCfg; -use application::git::ports::git_storage::{ - BlobKey, CommitMeta, GitStorage, decode_commit_id, encode_commit_id, -}; -use application::git::ports::git_workspace::GitWorkspacePort; -use tokio::fs as async_fs; - -mod helpers; -use helpers::*; - -pub struct GitWorkspaceService { - pool: PgPool, - git_storage: Arc, - storage: Arc, - snapshot: Arc, - realtime: Arc, - docs: Arc, - doc_paths: Arc, -} - -include!("workspace_service.rs"); -include!("sync.rs"); -include!("import.rs"); -include!("remote.rs"); -include!("port.rs"); -include!("pull.rs"); - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn sync_build_commit_pack_skips_noop_commit_on_full_scan() -> anyhow::Result<()> { - let temp_dir = TempDirBuilder::new().prefix("git-sync-test-").tempdir()?; - let repo = Repository::init_bare(temp_dir.path())?; - - let mut entries: BTreeMap> = BTreeMap::new(); - entries.insert("doc.md".to_string(), b"hello".to_vec()); - - let base_tree_oid = build_tree_from_entries(&repo, &entries)?; - let base_tree = repo.find_tree(base_tree_oid)?; - let sig = signature_from_parts("RefMD", "refmd@example.com", Utc::now())?; - let base_oid = repo.commit(Some("refs/heads/main"), &sig, &sig, "base", &base_tree, &[])?; - - let latest_meta = CommitMeta { - commit_id: base_oid.as_bytes().to_vec(), - parent_commit_id: None, - message: None, - author_name: None, - author_email: None, - committed_at: Utc::now(), - pack_key: String::new(), - file_hash_index: HashMap::new(), - }; - - let outcome = GitWorkspaceService::sync_build_commit_pack( - Uuid::new_v4(), - &repo, - Some(&latest_meta), - "main", - "RefMD", - "refmd@example.com", - Utc::now(), - "Automated Git rebuild", - true, - Some(&entries), - &BTreeSet::new(), - &BTreeMap::new(), - HashMap::new(), - None, - true, - false, - )?; - - match outcome { - SyncBuildCommitPackOutcome::NoChanges { commit_hex, pushed } => { - assert_eq!(commit_hex, encode_commit_id(base_oid.as_bytes())); - assert!(!pushed); - } - SyncBuildCommitPackOutcome::Committed { .. } => { - anyhow::bail!("expected NoChanges, got Committed") - } - } - - Ok(()) - } -} diff --git a/api/crates/infrastructure/src/git/workspace/port.rs b/api/crates/infrastructure/src/git/workspace/port.rs deleted file mode 100644 index e9695eec..00000000 --- a/api/crates/infrastructure/src/git/workspace/port.rs +++ /dev/null @@ -1,408 +0,0 @@ -#[async_trait] -impl GitWorkspacePort for GitWorkspaceService { - - async fn ensure_repository( - &self, - workspace_id: Uuid, - default_branch: &str, - ) -> PortResult<()> { - let out: anyhow::Result<()> = async { - sqlx::query( - r#"INSERT INTO git_repository_state (workspace_id, initialized, default_branch, initialized_at, updated_at) - VALUES ($1, true, $2, now(), now()) - ON CONFLICT (workspace_id) DO UPDATE SET - initialized = true, - default_branch = EXCLUDED.default_branch, - initialized_at = COALESCE(git_repository_state.initialized_at, EXCLUDED.initialized_at), - updated_at = now()"#, - ) - .bind(workspace_id) - .bind(default_branch) - .execute(&self.pool) - .await?; - Ok(()) - } - .await; - out.map_err(Into::into) - } - - async fn remove_repository(&self, workspace_id: Uuid) -> PortResult<()> { - let out: anyhow::Result<()> = async { - let mut tx = self.pool.begin().await?; - sqlx::query("DELETE FROM git_dirty_files WHERE workspace_id = $1") - .bind(workspace_id) - .execute(&mut *tx) - .await?; - sqlx::query("DELETE FROM git_commits WHERE workspace_id = $1") - .bind(workspace_id) - .execute(&mut *tx) - .await?; - sqlx::query( - "UPDATE git_repository_state SET initialized = false, updated_at = now() WHERE workspace_id = $1", - ) - .bind(workspace_id) - .execute(&mut *tx) - .await?; - tx.commit().await?; - self.git_storage.delete_all(workspace_id).await?; - Ok(()) - } - .await; - out.map_err(Into::into) - } - - async fn status(&self, workspace_id: Uuid) -> PortResult { - let out: anyhow::Result = async { - let state = self.load_repository_state(workspace_id).await?; - let Some((initialized, branch)) = state else { - return Ok(GitWorkspaceStatus { - repository_initialized: false, - current_branch: None, - uncommitted_changes: 0, - untracked_files: 0, - }); - }; - if !initialized { - return Ok(GitWorkspaceStatus { - repository_initialized: false, - current_branch: Some(branch), - uncommitted_changes: 0, - untracked_files: 0, - }); - } - // Dirty-driven status: avoid full workspace scan - let latest = self.latest_commit_meta(workspace_id).await?; - let previous_index: HashMap = latest - .as_ref() - .map(|c| c.file_hash_index.clone()) - .unwrap_or_default(); - - let dirty = self.fetch_dirty(workspace_id).await?; - let mut added: u32 = 0; - let mut modified: u32 = 0; - let mut deleted: u32 = 0; - - for d in dirty.iter() { - match d.op.as_str() { - "upsert" => { - if let Some(prev_hash) = previous_index.get(&d.path) { - // Existing file: if hash unchanged and hash known, ignore; else modified - match d.content_hash.as_ref() { - Some(h) if h == prev_hash => {} - _ => modified += 1, - } - } else { - // New file - added += 1; - } - } - "delete" => { - // Treat as deleted (even if not present in previous index) - deleted += 1; - } - _ => {} - } - } - - Ok(GitWorkspaceStatus { - repository_initialized: true, - current_branch: Some(branch), - uncommitted_changes: modified + deleted, - untracked_files: added, - }) - } - .await; - out.map_err(Into::into) - } - - async fn list_changes(&self, workspace_id: Uuid) -> PortResult> { - let out: anyhow::Result> = async { - // If repository isn't initialized, nothing to report - if let Some((initialized, _branch)) = self.load_repository_state(workspace_id).await? { - if !initialized { - return Ok(Vec::new()); - } - } else { - return Ok(Vec::new()); - } - - // Use dirty set to derive changes without scanning storage - let latest = self.latest_commit_meta(workspace_id).await?; - let previous_index: HashMap = latest - .as_ref() - .map(|c| c.file_hash_index.clone()) - .unwrap_or_default(); - let dirty = self.fetch_dirty(workspace_id).await?; - - let mut change_map: BTreeMap = BTreeMap::new(); - for d in dirty.iter() { - match d.op.as_str() { - "upsert" => { - if let Some(prev_hash) = previous_index.get(&d.path) { - // If hash unchanged and we know the new hash, skip reporting - match d.content_hash.as_ref() { - Some(h) if h == prev_hash => { - change_map.remove(&d.path); - } - _ => { - change_map.insert(d.path.clone(), "modified".to_string()); - } - } - } else { - change_map.insert(d.path.clone(), "untracked".to_string()); - } - } - "delete" => { - change_map.insert(d.path.clone(), "deleted".to_string()); - } - _ => {} - } - } - - let changes = change_map - .into_iter() - .map(|(path, status)| GitChangeItem { path, status }) - .collect(); - Ok(changes) - } - .await; - out.map_err(Into::into) - } - - async fn working_diff(&self, workspace_id: Uuid) -> PortResult> { - let out: anyhow::Result> = async { - let latest = self.latest_commit_meta(workspace_id).await?; - let previous_index = latest - .as_ref() - .map(|c| c.file_hash_index.clone()) - .unwrap_or_default(); - let current = self.collect_current_state(workspace_id).await?; - let delta = self.compute_deltas(¤t, &previous_index); - let mut results = Vec::new(); - - let latest_commit_id = latest.as_ref().map(|c| c.commit_id.clone()); - - for path in delta.added.iter().chain(delta.modified.iter()) { - if let Some(snapshot) = current.get(path) { - if snapshot.is_text { - let new_bytes = self.snapshot_bytes(snapshot).await?; - let new_content = String::from_utf8_lossy(&new_bytes).to_string(); - let old_bytes = match (&latest_commit_id, previous_index.get(path)) { - (Some(commit_id), Some(_)) => { - self.load_file_snapshot(workspace_id, commit_id.as_slice(), path) - .await? - } - _ => None, - }; - let old_text = old_bytes.and_then(|b| String::from_utf8(b).ok()); - results.push(self.build_diff_result( - path, - old_text.as_deref(), - Some(&new_content), - )); - } else { - results.push(TextDiffResult { - file_path: path.clone(), - diff_lines: Vec::new(), - old_content: None, - new_content: None, - }); - } - } - } - - for path in delta.deleted { - let old_bytes = if let (Some(commit_id), Some(_)) = - (&latest_commit_id, previous_index.get(&path)) - { - self.load_file_snapshot(workspace_id, commit_id.as_slice(), &path) - .await? - } else { - None - }; - let old_text = old_bytes.and_then(|b| String::from_utf8(b).ok()); - results.push(self.build_diff_result(&path, old_text.as_deref(), None)); - } - - Ok(results) - } - .await; - out.map_err(Into::into) - } - - async fn commit_diff( - &self, - workspace_id: Uuid, - from: &str, - to: &str, - ) -> PortResult> { - let out: anyhow::Result> = async { - let from_meta = self.load_commit_meta_ref(workspace_id, from).await?; - let to_meta = self.load_commit_meta_ref(workspace_id, to).await?; - - if let Some(to_meta_ref) = to_meta.as_ref() { - match self - .commit_diff_via_packs(workspace_id, from_meta.as_ref(), to_meta_ref) - .await - { - Ok(results) => return Ok(results), - Err(err) => { - warn!( - %err, - from = from_meta - .as_ref() - .map(|m| encode_commit_id(&m.commit_id)) - .unwrap_or_else(|| "(root)".to_string()), - to = encode_commit_id(&to_meta_ref.commit_id), - "failed to compute commit diff from pack data, using stored snapshots" - ); - } - } - } - - self.commit_diff_from_storage(workspace_id, from_meta.as_ref(), to_meta.as_ref()) - .await - } - .await; - out.map_err(Into::into) - } - - async fn history(&self, workspace_id: Uuid) -> PortResult> { - let out: anyhow::Result> = async { - let rows = sqlx::query( - r#"SELECT commit_id, message, author_name, author_email, committed_at - FROM git_commits - WHERE workspace_id = $1 - ORDER BY committed_at DESC - LIMIT 200"#, - ) - .bind(workspace_id) - .fetch_all(&self.pool) - .await?; - - let history = rows - .into_iter() - .map(|row| { - let commit_id: Vec = row.get("commit_id"); - let message: Option = row.try_get("message").ok(); - let author_name: Option = row.try_get("author_name").ok(); - let author_email: Option = row.try_get("author_email").ok(); - let committed_at: DateTime = row.get("committed_at"); - GitCommitInfo { - hash: encode_commit_id(&commit_id), - message: message.unwrap_or_default(), - author_name: author_name.unwrap_or_default(), - author_email: author_email.unwrap_or_default(), - time: committed_at, - } - }) - .collect(); - Ok(history) - } - .await; - out.map_err(Into::into) - } - - async fn sync( - &self, - workspace_id: Uuid, - req: &GitSyncRequestDto, - cfg: Option<&UserGitCfg>, - ) -> PortResult { - self.sync_inner(workspace_id, req, cfg) - .await - .map_err(Into::into) - } - - async fn import_repository( - &self, - workspace_id: Uuid, - actor_id: Uuid, - cfg: &UserGitCfg, - ) -> PortResult { - self.import_repository_inner(workspace_id, actor_id, cfg) - .await - .map_err(Into::into) - } - - async fn pull( - &self, - workspace_id: Uuid, - actor_id: Uuid, - req: &GitPullRequestDto, - cfg: &UserGitCfg, - ) -> PortResult { - self.pull_with_recovery(workspace_id, actor_id, req, cfg) - .await - .map_err(Into::into) - } - - async fn head_commit(&self, workspace_id: Uuid) -> PortResult>> { - let out: anyhow::Result>> = async { - Ok(self - .latest_commit_meta(workspace_id) - .await? - .map(|m| m.commit_id)) - } - .await; - out.map_err(Into::into) - } - - async fn remote_head( - &self, - workspace_id: Uuid, - cfg: &UserGitCfg, - ) -> PortResult>> { - self.remote_head_inner(workspace_id, cfg) - .await - .map_err(Into::into) - } - - async fn has_pending_changes(&self, workspace_id: Uuid) -> PortResult { - let out: anyhow::Result = async { - let dirty_rows = self.fetch_dirty(workspace_id).await?; - Ok(!dirty_rows.is_empty()) - } - .await; - out.map_err(Into::into) - } - - async fn drift_since_commit( - &self, - workspace_id: Uuid, - base_commit: &[u8], - ) -> PortResult { - let out: anyhow::Result = async { - let Some(meta) = self.commit_meta_by_id(workspace_id, base_commit).await? else { - return Ok(true); - }; - let base_index = meta.file_hash_index; - let current_state = self.collect_current_state(workspace_id).await?; - if base_index.len() != current_state.len() { - return Ok(true); - } - for (path, snapshot) in current_state.into_iter() { - let Some(base_hash) = base_index.get(&path) else { - return Ok(true); - }; - if base_hash != &snapshot.hash { - return Ok(true); - } - } - Ok(false) - } - .await; - out.map_err(Into::into) - } - - async fn check_remote( - &self, - workspace_id: Uuid, - cfg: &UserGitCfg, - ) -> PortResult { - self.check_remote_inner(workspace_id, cfg) - .await - .map_err(Into::into) - } - -} diff --git a/api/crates/infrastructure/src/git/workspace/pull.rs b/api/crates/infrastructure/src/git/workspace/pull.rs deleted file mode 100644 index f4668a38..00000000 --- a/api/crates/infrastructure/src/git/workspace/pull.rs +++ /dev/null @@ -1,5 +0,0 @@ -include!("pull/conflicts.rs"); -include!("pull/entry.rs"); -include!("pull/pull_once.rs"); -include!("pull/packs.rs"); -include!("pull/repair.rs"); diff --git a/api/crates/infrastructure/src/git/workspace/pull/conflicts.rs b/api/crates/infrastructure/src/git/workspace/pull/conflicts.rs deleted file mode 100644 index 40616462..00000000 --- a/api/crates/infrastructure/src/git/workspace/pull/conflicts.rs +++ /dev/null @@ -1,46 +0,0 @@ -impl GitWorkspaceService { - async fn build_conflict_item( - &self, - workspace_id: Uuid, - path: &str, - current_state: &HashMap, - remote_state: &HashMap, - local_meta: Option<&CommitMeta>, - ) -> anyhow::Result { - let ours_bytes = if let Some(snap) = current_state.get(path) { - Some(self.snapshot_bytes(snap).await?) - } else { - None - }; - let theirs_bytes = if let Some(snap) = remote_state.get(path) { - Some(self.snapshot_bytes(snap).await?) - } else { - Some(Vec::new()) - }; - let base_bytes = if let Some(meta) = local_meta.as_ref() { - self.load_file_snapshot(workspace_id, meta.commit_id.as_slice(), path) - .await? - } else { - None - }; - - let (mut ours, ours_bin) = as_text_or_binary(path, ours_bytes.as_ref()); - let (mut theirs, theirs_bin) = as_text_or_binary(path, theirs_bytes.as_ref()); - let (mut base, base_bin) = as_text_or_binary(path, base_bytes.as_ref()); - let is_binary = ours_bin || theirs_bin || base_bin; - if !is_binary { - ours = strip_front_matter_body(path, ours); - theirs = strip_front_matter_body(path, theirs); - base = strip_front_matter_body(path, base); - } - - Ok(GitPullConflictItemDto { - path: path.to_string(), - is_binary, - ours, - theirs, - base, - document_id: None, - }) - } -} diff --git a/api/crates/infrastructure/src/git/workspace/pull/entry.rs b/api/crates/infrastructure/src/git/workspace/pull/entry.rs deleted file mode 100644 index dbcfa620..00000000 --- a/api/crates/infrastructure/src/git/workspace/pull/entry.rs +++ /dev/null @@ -1,37 +0,0 @@ -impl GitWorkspaceService { - async fn pull_with_recovery( - &self, - workspace_id: Uuid, - actor_id: Uuid, - req: &GitPullRequestDto, - cfg: &UserGitCfg, - ) -> anyhow::Result { - let mut recover_attempts: u8 = 0; - let mut skip_local_pack_restore = false; - loop { - match self - .pull_once(workspace_id, actor_id, req, cfg, skip_local_pack_restore) - .await - { - Ok(dto) => return Ok(dto), - Err(err) => { - if Self::is_missing_objects(&err) { - if recover_attempts < 2 { - recover_attempts += 1; - skip_local_pack_restore = true; - warn!( - workspace_id = %workspace_id, - attempt = %recover_attempts, - error = %err, - "git_pull_missing_objects_recovering" - ); - self.recover_missing_objects(workspace_id, cfg).await?; - continue; - } - } - return Err(err); - } - } - } - } -} diff --git a/api/crates/infrastructure/src/git/workspace/pull/packs.rs b/api/crates/infrastructure/src/git/workspace/pull/packs.rs deleted file mode 100644 index b8dac4de..00000000 --- a/api/crates/infrastructure/src/git/workspace/pull/packs.rs +++ /dev/null @@ -1,205 +0,0 @@ -impl GitWorkspaceService { - async fn persist_pack_chain( - &self, - workspace_id: Uuid, - until: Option<&[u8]>, - ) -> anyhow::Result)>> { - // Attempt to rebuild pack chain from stored snapshots if packs are missing or corrupted. - async fn rebuild_from_snapshots( - svc: &GitWorkspaceService, - workspace_id: Uuid, - until: Option<&[u8]>, - ) -> anyhow::Result)>> { - // Collect commit metas from oldest to newest - let mut chain: Vec = Vec::new(); - let mut cursor = match until { - Some(id) => svc.commit_meta_by_id(workspace_id, id).await?, - None => svc.latest_commit_meta(workspace_id).await?, - }; - while let Some(meta) = cursor { - chain.push(meta.clone()); - if let Some(parent) = meta.parent_commit_id.as_ref() { - cursor = svc.commit_meta_by_id(workspace_id, parent).await?; - } else { - break; - } - } - if chain.is_empty() { - return Ok(None); - } - chain.reverse(); - - type PreparedEntry = (String, Vec); - type PreparedCommit = (CommitMeta, Vec); - - // Preload snapshots async - let mut prepared: Vec = Vec::new(); - for meta in chain.iter() { - let mut entries: Vec<(String, Vec)> = Vec::new(); - for path in meta.file_hash_index.keys() { - let Some(bytes) = svc - .load_file_snapshot(workspace_id, meta.commit_id.as_slice(), path) - .await? - else { - anyhow::bail!( - "missing snapshot blob for {} at commit {}", - path, - encode_commit_id(&meta.commit_id) - ); - }; - entries.push((path.clone(), bytes)); - } - prepared.push((meta.clone(), entries)); - } - - // Build packs synchronously to avoid Send issues with git2 types - let (temp_dir, pack_paths) = tokio::task::block_in_place(|| -> anyhow::Result<_> { - let temp_dir = tempfile::tempdir()?; - let repo = Repository::init_bare(temp_dir.path())?; - let mut built_commits: HashMap, git2::Oid> = HashMap::new(); - let mut pack_paths: Vec = Vec::new(); - - for (meta, entries) in prepared.into_iter() { - let mut builder = repo.treebuilder(None)?; - for (path, bytes) in entries.iter() { - let blob_oid = repo.blob(bytes)?; - builder.insert(path, blob_oid, FileMode::Blob.into())?; - } - let tree_oid = builder.write()?; - let tree = repo.find_tree(tree_oid)?; - - let sig = signature_from_parts( - meta.author_name.as_deref().unwrap_or("RefMD"), - meta.author_email.as_deref().unwrap_or("refmd@example.com"), - meta.committed_at, - )?; - let mut parents = Vec::new(); - if let Some(parent) = meta.parent_commit_id.as_ref() { - if let Some(existing) = built_commits.get(parent) { - parents.push(repo.find_commit(*existing)?); - } - } - let parent_refs: Vec<&Commit> = parents.iter().collect(); - let commit_oid = repo.commit( - None, - &sig, - &sig, - meta.message - .as_deref() - .unwrap_or("Recovered commit from snapshots"), - &tree, - &parent_refs, - )?; - if commit_oid.as_bytes() != meta.commit_id.as_slice() { - anyhow::bail!( - "reconstructed commit id mismatch for {}", - encode_commit_id(&meta.commit_id) - ); - } - built_commits.insert(meta.commit_id.clone(), commit_oid); - - let mut pack_builder = repo.packbuilder()?; - pack_builder.insert_commit(commit_oid)?; - for p in parents.iter() { - pack_builder.insert_commit(p.id())?; - } - let mut pack_buf = git2::Buf::new(); - pack_builder.write_buf(&mut pack_buf)?; - let pack_bytes = pack_buf.to_vec(); - - let pack_path = temp_dir - .path() - .join(format!("{:08}.pack", pack_paths.len())); - std::fs::write(&pack_path, &pack_bytes)?; - pack_paths.push(pack_path); - } - - Ok((temp_dir, pack_paths)) - })?; - - // Persist rebuilt packs and metas back to storage - for (idx, meta) in chain.iter().enumerate() { - let pack_bytes = std::fs::read(&pack_paths[idx])?; - svc.git_storage - .store_pack(workspace_id, &pack_bytes, meta) - .await?; - svc.upsert_commit_record(workspace_id, meta).await?; - let _ = svc - .git_storage - .set_latest_commit(workspace_id, Some(meta)) - .await; - } - - Ok(Some((temp_dir, pack_paths))) - } - - let mut attempts = 0; - loop { - match self.git_storage.load_pack_chain(workspace_id, until).await { - Ok(mut stream) => { - let temp_dir = tempfile::tempdir()?; - let mut pack_paths = Vec::new(); - let mut index: usize = 0; - while let Some(pack) = stream.next().await { - let pack = pack?; - let path = temp_dir.path().join(format!("{:08}.pack", index)); - tokio::fs::write(&path, &pack.bytes).await?; - pack_paths.push(path); - index += 1; - } - if pack_paths.is_empty() { - return Ok(None); - } else { - return Ok(Some((temp_dir, pack_paths))); - } - } - Err(err) => { - let err_str = err.to_string(); - let is_missing_objects = err_str.to_lowercase().contains("missing") - && err_str.to_lowercase().contains("object"); - if let Some(rebuilt) = rebuild_from_snapshots(self, workspace_id, until).await? - { - return Ok(Some(rebuilt)); - } - if attempts == 0 { - if let Some(commit_hex) = missing_metadata_commit(&err) { - match self - .repair_missing_commit_metadata(workspace_id, &commit_hex) - .await - { - Ok(_) => { - attempts += 1; - continue; - } - Err(repair_err) => { - warn!( - workspace_id = %workspace_id, - commit = %commit_hex, - error = ?repair_err, - "git_commit_metadata_repair_failed" - ); - } - } - } - // If pack is missing objects, fall back by resetting git storage pointer and DB history. - if is_missing_objects { - warn!( - workspace_id = %workspace_id, - error = %err, - "git_pack_missing_objects_detected_resetting_history" - ); - // Drop storage latest pointer and DB commits for this workspace. - let _ = self.git_storage.set_latest_commit(workspace_id, None).await; - let _ = sqlx::query("DELETE FROM git_commits WHERE workspace_id = $1") - .bind(workspace_id) - .execute(&self.pool) - .await; - return Ok(None); - } - } - return Err(err.into()); - } - } - } - } -} diff --git a/api/crates/infrastructure/src/git/workspace/pull/pull_once.rs b/api/crates/infrastructure/src/git/workspace/pull/pull_once.rs deleted file mode 100644 index 982a6aaf..00000000 --- a/api/crates/infrastructure/src/git/workspace/pull/pull_once.rs +++ /dev/null @@ -1,7 +0,0 @@ -include!("pull_once/main.rs"); -include!("pull_once/merge.rs"); -include!("pull_once/fallback_conflicts.rs"); -include!("pull_once/state.rs"); -include!("pull_once/pack.rs"); -include!("pull_once/persist.rs"); -include!("pull_once/response.rs"); diff --git a/api/crates/infrastructure/src/git/workspace/pull/pull_once/fallback_conflicts.rs b/api/crates/infrastructure/src/git/workspace/pull/pull_once/fallback_conflicts.rs deleted file mode 100644 index a3a6a9be..00000000 --- a/api/crates/infrastructure/src/git/workspace/pull/pull_once/fallback_conflicts.rs +++ /dev/null @@ -1,40 +0,0 @@ -impl GitWorkspaceService { -async fn pull_build_fallback_diff_conflicts( - &self, - workspace_id: Uuid, - local_oid: Option, - remote_oid: git2::Oid, - current_state: &HashMap, - remote_state: &HashMap, - local_meta: Option<&CommitMeta>, -) -> anyhow::Result> { - let local_oid_val = local_oid.unwrap_or(remote_oid); - if remote_oid == local_oid_val { - return Ok(Vec::new()); - } - - let mut all_paths: HashSet = HashSet::new(); - for p in remote_state.keys() { - all_paths.insert(p.clone()); - } - for p in current_state.keys() { - all_paths.insert(p.clone()); - } - - let mut remote_conflicts: Vec = Vec::new(); - for path in all_paths { - let remote_hash = remote_state.get(&path).map(|s| &s.hash); - let local_hash = current_state.get(&path).map(|s| &s.hash); - if remote_hash == local_hash { - continue; - } - - let item = self - .build_conflict_item(workspace_id, &path, current_state, remote_state, local_meta) - .await?; - remote_conflicts.push(item); - } - - Ok(remote_conflicts) -} -} diff --git a/api/crates/infrastructure/src/git/workspace/pull/pull_once/main.rs b/api/crates/infrastructure/src/git/workspace/pull/pull_once/main.rs deleted file mode 100644 index 4306e326..00000000 --- a/api/crates/infrastructure/src/git/workspace/pull/pull_once/main.rs +++ /dev/null @@ -1,302 +0,0 @@ -impl GitWorkspaceService { -async fn pull_once( - &self, - workspace_id: Uuid, - actor_id: Uuid, - req: &GitPullRequestDto, - cfg: &UserGitCfg, - skip_local_pack_restore: bool, -) -> anyhow::Result { - let state = self.load_repository_state(workspace_id).await?; - let Some((initialized, branch_default)) = state else { - anyhow::bail!("repository not initialized"); - }; - if !initialized { - anyhow::bail!("repository not initialized"); - } - if cfg.repository_url.is_empty() { - anyhow::bail!("remote not configured"); - } - - let branch = if cfg.branch_name.is_empty() { - branch_default - } else { - cfg.branch_name.clone() - }; - - // Capture current workspace head before touching remote history. - let mut local_meta = self.latest_commit_meta(workspace_id).await?; - // After a recovery we want to treat pull as a fresh fast-forward from remote. - if skip_local_pack_restore { - local_meta = None; - } - let mut local_history_reset = false; - let mut base_index: HashMap = local_meta - .as_ref() - .map(|m| m.file_hash_index.clone()) - .unwrap_or_default(); - let mut previous_index = base_index.clone(); - let mut base_commit = local_meta.as_ref().map(|m| m.commit_id.clone()); - - let temp_dir = TempDirBuilder::new() - .prefix("git-pull-") - .tempdir() - .map_err(|e| anyhow::anyhow!(e))?; - let repo = Repository::init_bare(temp_dir.path())?; - if !skip_local_pack_restore { - match self - .persist_pack_chain( - workspace_id, - local_meta.as_ref().map(|m| m.commit_id.as_slice()), - ) - .await? - { - Some((_, pack_paths)) => { - apply_pack_files(&repo, &pack_paths)?; - } - None => { - warn!( - workspace_id = %workspace_id, - "git_pull_pack_restore_missing_resetting_base" - ); - // Storage/DB history was reset; treat as fresh pull with no local history. - local_meta = None; - local_history_reset = true; - base_index.clear(); - previous_index.clear(); - base_commit = None; - } - } - } else { - info!(workspace_id = %workspace_id, "git_pull_skip_local_pack_restore"); - } - - let remote_oid = { - let Some(head) = fetch_remote_head(&repo, cfg, &branch)? else { - return Ok(GitPullResultDto { - success: false, - message: format!("branch '{branch}' not found on remote"), - files_changed: 0, - commit_hash: None, - conflicts: None, - base_commit: base_commit.clone(), - remote_commit: None, - }); - }; - head - }; - let remote_commit = Some(remote_oid.as_bytes().to_vec()); - - let mut local_oid = if local_history_reset { - None - } else { - local_meta - .as_ref() - .and_then(|m| git2::Oid::from_bytes(&m.commit_id).ok()) - }; - // If workspace has no local commit recorded (fresh pull), fall back to latest known meta after bootstrap. - if local_oid.is_none() && !skip_local_pack_restore && !local_history_reset { - if let Some(meta) = self.latest_commit_meta(workspace_id).await? { - base_index = meta.file_hash_index.clone(); - previous_index = base_index.clone(); - base_commit = Some(meta.commit_id.clone()); - local_oid = git2::Oid::from_bytes(&meta.commit_id).ok(); - local_meta = Some(meta); - } - } - // Detect drift between latest commit and current workspace using the same dirty set as Git Changes/Status. - let dirty_rows = self.fetch_dirty(workspace_id).await?; - let current_state = self.collect_current_state(workspace_id).await?; - info!(workspace_id = %workspace_id, dirty_count = dirty_rows.len(), skip_local_pack_restore = skip_local_pack_restore, "git_pull_dirty_state"); - - #[derive(Clone, Copy, PartialEq, Eq)] - enum CommitRelation { - NoLocal, - Same, - LocalAhead, - RemoteAhead, - Diverged, - } - - let commit_relation = if let Some(local_oid_val) = local_oid { - if local_oid_val == remote_oid { - CommitRelation::Same - } else if repo.graph_descendant_of(local_oid_val, remote_oid)? { - CommitRelation::LocalAhead - } else if repo.graph_descendant_of(remote_oid, local_oid_val)? { - CommitRelation::RemoteAhead - } else { - CommitRelation::Diverged - } - } else { - CommitRelation::NoLocal - }; - - // Nothing to do when remote is identical to or behind the local head. - if matches!(commit_relation, CommitRelation::Same | CommitRelation::LocalAhead) { - let commit_hash = local_oid - .as_ref() - .map(|oid| encode_commit_id(oid.as_bytes())); - return Ok(GitPullResultDto { - success: true, - message: "no remote changes".to_string(), - files_changed: 0, - commit_hash, - conflicts: None, - base_commit: base_commit.clone(), - remote_commit: remote_commit.clone(), - }); - } - - let remote_state = Self::pull_collect_state_from_commit(&repo, remote_oid)?; - let remote_changed_paths_vec = Self::pull_remote_changed_paths(&base_index, &remote_state); - let mut remote_conflicts = self - .pull_build_conflicts_for_paths( - workspace_id, - &remote_changed_paths_vec, - ¤t_state, - &remote_state, - local_meta.as_ref(), - ) - .await?; - - // First-time pull with no local history and no dirty changes: allow fast-forward without forcing conflicts. - if local_meta.is_none() && dirty_rows.is_empty() { - remote_conflicts.clear(); - } - - // If commits differ but no conflict paths were detected above, fallback to diff of current vs remote trees. - if remote_conflicts.is_empty() { - remote_conflicts = self - .pull_build_fallback_diff_conflicts( - workspace_id, - local_oid, - remote_oid, - ¤t_state, - &remote_state, - local_meta.as_ref(), - ) - .await?; - } - let remote_changes = !remote_conflicts.is_empty(); - let remote_ahead_clean = matches!(commit_relation, CommitRelation::RemoteAhead) && dirty_rows.is_empty(); - let fast_forward_remote = matches!(commit_relation, CommitRelation::NoLocal) || remote_ahead_clean; - - // Detect overlap between remote-changed paths and dirty rows to avoid false conflicts. - let dirty_remote_overlap = Self::pull_dirty_remote_overlap(&dirty_rows, &remote_changed_paths_vec); - - info!( - workspace_id = %workspace_id, - dirty_count = dirty_rows.len(), - remote_conflict_count = remote_conflicts.len(), - remote_changes = remote_changes, - resolutions_count = req.resolutions.len(), - dirty_remote_overlap = dirty_remote_overlap, - "git_pull_debug_state" - ); - - // If workspace has dirty changes overlapping remote changes, require explicit resolutions. - if remote_changes && dirty_remote_overlap && req.resolutions.is_empty() { - let conflicts = if remote_conflicts.is_empty() { - vec![GitPullConflictItemDto { - path: "".to_string(), - is_binary: false, - ours: None, - theirs: None, - base: None, - document_id: None, - }] - } else { - remote_conflicts.clone() - }; - return Ok(Self::pull_conflicts_detected_response( - base_commit.clone(), - remote_commit.clone(), - conflicts, - )); - } - - // Ensure remote head commit metadata/pack exists locally for merge parent and future syncs. - let mut remote_pack: Option<(CommitMeta, Vec)> = None; - if self - .commit_meta_by_id(workspace_id, remote_oid.as_bytes()) - .await? - .is_none() - { - let remote_index: HashMap = remote_state - .iter() - .map(|(path, snap)| (path.clone(), snap.hash.clone())) - .collect(); - let (remote_meta, remote_pack_bytes) = - Self::pull_build_commit_meta_and_pack(&repo, workspace_id, remote_oid, remote_index)?; - remote_pack = Some((remote_meta, remote_pack_bytes)); - } - - // Fast-forward when there is no local history or the workspace head cleanly trails remote. - // For fresh workspaces with dirty changes, surface conflicts instead of overwriting. - if fast_forward_remote { - if matches!(commit_relation, CommitRelation::NoLocal) - && (!dirty_rows.is_empty() || !remote_conflicts.is_empty()) - { - return Ok(Self::pull_conflicts_detected_response( - base_commit.clone(), - remote_commit.clone(), - remote_conflicts.clone(), - )); - } - // Ensure we have pack data for the remote head regardless of existing metadata. - let (remote_meta, remote_pack_bytes) = if let Some((meta, pack)) = remote_pack.take() { - (meta, pack) - } else { - let remote_index: HashMap = remote_state - .iter() - .map(|(p, snap)| (p.clone(), snap.hash.clone())) - .collect(); - Self::pull_build_commit_meta_and_pack(&repo, workspace_id, remote_oid, remote_index)? - }; - return self - .pull_fast_forward_to_remote( - workspace_id, - actor_id, - base_commit.clone(), - &previous_index, - &remote_state, - &remote_meta, - Some(remote_pack_bytes.as_slice()), - ) - .await; - } - - // Diverged: merge local into remote (linear, parent = remote) - let Some(local_oid_val) = local_oid else { - anyhow::bail!("no local commit to merge"); - }; - - let (meta, pack_bytes, merged_snapshots, commit_hex) = match self.pull_build_diverged_merge_commit( - workspace_id, - &repo, - local_oid_val, - remote_oid, - req, - &base_commit, - &remote_commit, - )? { - Ok(out) => out, - Err(dto) => return Ok(dto), - }; - - self.pull_persist_merged_commit( - workspace_id, - actor_id, - &previous_index, - base_commit, - remote_commit, - remote_pack.take(), - meta, - pack_bytes, - merged_snapshots, - commit_hex, - ) - .await -} -} diff --git a/api/crates/infrastructure/src/git/workspace/pull/pull_once/merge.rs b/api/crates/infrastructure/src/git/workspace/pull/pull_once/merge.rs deleted file mode 100644 index 9b8d62f6..00000000 --- a/api/crates/infrastructure/src/git/workspace/pull/pull_once/merge.rs +++ /dev/null @@ -1,206 +0,0 @@ -type PullMergeOk = (CommitMeta, Vec, HashMap, String); -type PullMergeResult = Result; -type ConflictEntry = (String, Option>, Option>, Option>); - -impl GitWorkspaceService { - fn pull_build_diverged_merge_commit( - &self, - workspace_id: Uuid, - repo: &Repository, - local_oid_val: git2::Oid, - remote_oid: git2::Oid, - req: &GitPullRequestDto, - base_commit: &Option>, - remote_commit: &Option>, - ) -> anyhow::Result { - // Build a synthetic "ours" commit from the current workspace state anchored to the local head - // so dirty edits participate in the merge against remote changes. - let synthetic_ours = self.build_synthetic_commit(workspace_id, repo, local_oid_val)?; - let ours_commit = repo.find_commit(synthetic_ours)?; - let remote_commit_obj = repo.find_commit(remote_oid)?; - let index = repo.merge_commits(&ours_commit, &remote_commit_obj, None)?; - - let conflict_items = collect_conflicts(repo, &index)?; - if !conflict_items.is_empty() && req.resolutions.is_empty() { - return Ok(Err(Self::pull_conflicts_detected_response( - base_commit.clone(), - remote_commit.clone(), - conflict_items, - ))); - } - - // Collect conflict entries for resolution application. - let mut conflict_entries: Vec = Vec::new(); - { - let conflicts_iter = index.conflicts()?; - for conflict in conflicts_iter { - let conflict = conflict?; - let path = conflict - .our - .as_ref() - .or(conflict.their.as_ref()) - .or(conflict.ancestor.as_ref()) - .and_then(|e| std::str::from_utf8(&e.path).ok()) - .ok_or_else(|| anyhow!("missing conflict path"))? - .to_string(); - - let to_bytes = |entry: Option<&git2::IndexEntry>| -> anyhow::Result>> { - if let Some(e) = entry { - let blob = repo.find_blob(e.id)?; - Ok(Some(blob.content().to_vec())) - } else { - Ok(None) - } - }; - - conflict_entries.push(( - path, - to_bytes(conflict.our.as_ref())?, - to_bytes(conflict.their.as_ref())?, - to_bytes(conflict.ancestor.as_ref())?, - )); - } - } - - let resolution_map: std::collections::HashMap< - String, - &application::git::dtos::GitPullResolutionDto, - > = req.resolutions.iter().map(|r| (r.path.clone(), r)).collect(); - - // Build merged state from resolved index (stage 0) plus user resolutions. - let mut merged_snapshots: HashMap = HashMap::new(); - for entry in index.iter() { - if index_entry_stage(&entry) != 0 { - continue; - } - let path = index_entry_path(&entry)?; - let blob = repo.find_blob(entry.id)?; - let bytes = blob.content().to_vec(); - let hash = sha256_hex(&bytes); - let is_text = std::str::from_utf8(&bytes).is_ok(); - merged_snapshots.insert( - path, - FileSnapshot { - hash, - data: FileSnapshotData::Inline(bytes), - is_text, - }, - ); - } - - let mut unresolved: Vec = Vec::new(); - - for (path, ours_bytes, theirs_bytes, base_bytes) in conflict_entries { - let resolution = resolution_map.get(&path); - if resolution.is_none() { - let (mut ours_txt, ours_bin) = as_text_or_binary(path.as_str(), ours_bytes.as_ref()); - let (mut theirs_txt, theirs_bin) = as_text_or_binary(path.as_str(), theirs_bytes.as_ref()); - let (mut base_txt, base_bin) = as_text_or_binary(path.as_str(), base_bytes.as_ref()); - let is_binary = ours_bin || theirs_bin || base_bin; - if !is_binary { - ours_txt = strip_front_matter_body(path.as_str(), ours_txt); - theirs_txt = strip_front_matter_body(path.as_str(), theirs_txt); - base_txt = strip_front_matter_body(path.as_str(), base_txt); - } - unresolved.push(GitPullConflictItemDto { - path: path.clone(), - is_binary, - ours: ours_txt, - theirs: theirs_txt, - base: base_txt, - document_id: None, - }); - continue; - } - - let res = *resolution.unwrap(); - let selected_bytes = match res.choice.as_str() { - "ours" => ours_bytes.clone(), - "theirs" => theirs_bytes.clone(), - "base" => base_bytes.clone(), - "custom_text" => { - let content = res - .content - .as_ref() - .ok_or_else(|| anyhow!("custom_text content required"))?; - Some(content.as_bytes().to_vec()) - } - other => anyhow::bail!("unsupported resolution choice {other}"), - } - .unwrap_or_default(); - let hash = sha256_hex(&selected_bytes); - let is_text = std::str::from_utf8(&selected_bytes).is_ok(); - merged_snapshots.insert( - path.clone(), - FileSnapshot { - hash, - data: FileSnapshotData::Inline(selected_bytes), - is_text, - }, - ); - } - - if !unresolved.is_empty() { - return Ok(Err(Self::pull_conflicts_detected_response( - base_commit.clone(), - remote_commit.clone(), - unresolved, - ))); - } - - // Build tree from merged snapshots without async work. - let mut entry_map: BTreeMap> = BTreeMap::new(); - for (path, snap) in merged_snapshots.iter() { - let bytes = match &snap.data { - FileSnapshotData::Inline(b) => b.clone(), - FileSnapshotData::StoragePath(_) => { - anyhow::bail!("unexpected storage-backed snapshot during pull merge") - } - }; - entry_map.insert(path.clone(), bytes); - } - let tree_oid = build_tree_from_entries(repo, &entry_map)?; - let tree = repo.find_tree(tree_oid)?; - let sig = signature_from_parts("RefMD", "refmd@example.com", chrono::Utc::now())?; - let base_parent = repo.find_commit(local_oid_val)?; - let remote_parent = repo.find_commit(remote_oid)?; - let parent_refs: [&git2::Commit; 2] = [&base_parent, &remote_parent]; - let commit_oid = repo.commit( - None, - &sig, - &sig, - "Merge remote changes", - &tree, - &parent_refs, - )?; - - let mut file_hash_index: HashMap = HashMap::new(); - for (path, snap) in merged_snapshots.iter() { - file_hash_index.insert(path.clone(), snap.hash.clone()); - } - - let mut pack_builder = repo.packbuilder()?; - pack_builder.insert_commit(commit_oid)?; - // Include both parents to avoid missing bases when applying packs later. - pack_builder.insert_commit(base_parent.id())?; - pack_builder.insert_commit(remote_parent.id())?; - let mut pack_buf = git2::Buf::new(); - pack_builder.write_buf(&mut pack_buf)?; - let pack_bytes = pack_buf.to_vec(); - - let commit_hex = encode_commit_id(commit_oid.as_bytes()); - let meta = CommitMeta { - commit_id: commit_oid.as_bytes().to_vec(), - // Keep workspace history linear: parent is previous workspace head. - parent_commit_id: base_commit.clone(), - message: Some("Merge remote changes".to_string()), - author_name: Some("RefMD".to_string()), - author_email: Some("refmd@example.com".to_string()), - committed_at: chrono::Utc::now(), - pack_key: format!("git/packs/{}/{}.pack", workspace_id, commit_hex), - file_hash_index, - }; - - Ok(Ok((meta, pack_bytes, merged_snapshots, commit_hex))) - } -} diff --git a/api/crates/infrastructure/src/git/workspace/pull/pull_once/pack.rs b/api/crates/infrastructure/src/git/workspace/pull/pull_once/pack.rs deleted file mode 100644 index da629605..00000000 --- a/api/crates/infrastructure/src/git/workspace/pull/pull_once/pack.rs +++ /dev/null @@ -1,48 +0,0 @@ -impl GitWorkspaceService { -fn pull_build_commit_meta_and_pack( - repo: &Repository, - workspace_id: Uuid, - oid: git2::Oid, - file_hash_index: HashMap, -) -> anyhow::Result<(CommitMeta, Vec)> { - let commit = repo.find_commit(oid)?; - let committed_at = git_time_to_datetime(commit.time())?; - let message = commit - .message() - .map(|m| m.trim_end_matches('\n').to_string()) - .filter(|m| !m.trim().is_empty()); - let author = commit.author(); - let author_name = author.name().map(|s| s.to_string()); - let author_email = author.email().map(|s| s.to_string()); - let parent_commit_id = if commit.parent_count() > 0 { - Some(commit.parent_id(0)?.as_bytes().to_vec()) - } else { - None - }; - - let mut pack_builder = repo.packbuilder()?; - pack_builder.insert_commit(oid)?; - if let Some(parent_id) = parent_commit_id.as_ref() { - if let Ok(parent_oid) = git2::Oid::from_bytes(parent_id) { - let _ = pack_builder.insert_commit(parent_oid); - } - } - let mut pack_buf = git2::Buf::new(); - pack_builder.write_buf(&mut pack_buf)?; - let pack_bytes = pack_buf.to_vec(); - - let commit_hex = encode_commit_id(oid.as_bytes()); - let meta = CommitMeta { - commit_id: oid.as_bytes().to_vec(), - parent_commit_id, - message, - author_name, - author_email, - committed_at, - pack_key: format!("git/packs/{}/{}.pack", workspace_id, commit_hex), - file_hash_index, - }; - - Ok((meta, pack_bytes)) -} -} diff --git a/api/crates/infrastructure/src/git/workspace/pull/pull_once/persist.rs b/api/crates/infrastructure/src/git/workspace/pull/pull_once/persist.rs deleted file mode 100644 index f853cc6d..00000000 --- a/api/crates/infrastructure/src/git/workspace/pull/pull_once/persist.rs +++ /dev/null @@ -1,222 +0,0 @@ -impl GitWorkspaceService { -async fn pull_fast_forward_to_remote( - &self, - workspace_id: Uuid, - actor_id: Uuid, - base_commit: Option>, - previous_index: &HashMap, - remote_state: &HashMap, - remote_meta: &CommitMeta, - remote_pack_bytes: Option<&[u8]>, -) -> anyhow::Result { - if let Some(pack_bytes) = remote_pack_bytes { - self.git_storage - .store_pack(workspace_id, pack_bytes, remote_meta) - .await?; - } - self.upsert_commit_record(workspace_id, remote_meta).await?; - - let snapshot_keys = self - .store_commit_snapshots(workspace_id, &remote_meta.commit_id, remote_state) - .await?; - - if let Err(err) = self - .git_storage - .set_latest_commit(workspace_id, Some(remote_meta)) - .await - { - for key in snapshot_keys.iter().rev() { - let _ = self.git_storage.delete_blob(key).await; - } - return Err(err.into()); - } - - let mut tx = self.pool.begin().await?; - let repo_row = sqlx::query("SELECT initialized FROM git_repository_state WHERE workspace_id = $1") - .bind(workspace_id) - .fetch_optional(&mut *tx) - .await?; - let Some(repo_row) = repo_row else { - tx.rollback().await.ok(); - anyhow::bail!("repository not initialized") - }; - let initialized: bool = repo_row.get("initialized"); - if !initialized { - tx.rollback().await.ok(); - anyhow::bail!("repository not initialized") - } - - sqlx::query( - r#"INSERT INTO git_commits ( - commit_id, - parent_commit_id, - workspace_id, - message, - author_name, - author_email, - committed_at, - pack_key, - file_hash_index - ) VALUES ($1,$2,$3,$4,$5,$6,$7,$8,$9) - ON CONFLICT (commit_id, workspace_id) DO NOTHING"#, - ) - .bind(remote_meta.commit_id.clone()) - .bind(remote_meta.parent_commit_id.clone()) - .bind(workspace_id) - .bind(remote_meta.message.clone()) - .bind(remote_meta.author_name.clone()) - .bind(remote_meta.author_email.clone()) - .bind(remote_meta.committed_at) - .bind(remote_meta.pack_key.clone()) - .bind(Json(&remote_meta.file_hash_index)) - .execute(&mut *tx) - .await?; - - sqlx::query("UPDATE git_repository_state SET updated_at = now() WHERE workspace_id = $1") - .bind(workspace_id) - .execute(&mut *tx) - .await?; - tx.commit().await?; - - let files_changed = self - .apply_state_to_workspace(workspace_id, remote_state, previous_index) - .await?; - - self.materialize_documents_from_state(workspace_id, actor_id, remote_state) - .await?; - self.apply_merged_to_documents(workspace_id, remote_state) - .await?; - self.clear_dirty(workspace_id).await.map_err(|err| { - error!( - workspace_id = %workspace_id, - error = %err, - "git_pull_clear_dirty_failed" - ); - err - })?; - - info!( - workspace_id = %workspace_id, - commit = %encode_commit_id(&remote_meta.commit_id), - "git_pull_fast_forward_remote" - ); - - Ok(GitPullResultDto { - success: true, - message: "fast-forwarded to remote".to_string(), - files_changed, - commit_hash: Some(encode_commit_id(&remote_meta.commit_id)), - conflicts: None, - base_commit, - remote_commit: Some(remote_meta.commit_id.clone()), - }) -} - -async fn pull_persist_merged_commit( - &self, - workspace_id: Uuid, - actor_id: Uuid, - previous_index: &HashMap, - base_commit: Option>, - remote_commit: Option>, - remote_pack: Option<(CommitMeta, Vec)>, - meta: CommitMeta, - pack_bytes: Vec, - merged_snapshots: HashMap, - commit_hex: String, -) -> anyhow::Result { - if let Some((remote_meta, remote_pack_bytes)) = remote_pack { - self.git_storage - .store_pack(workspace_id, &remote_pack_bytes, &remote_meta) - .await?; - self.upsert_commit_record(workspace_id, &remote_meta).await?; - } - - let snapshot_keys = self - .store_commit_snapshots(workspace_id, &meta.commit_id, &merged_snapshots) - .await?; - - if let Err(err) = self - .git_storage - .store_pack(workspace_id, &pack_bytes, &meta) - .await - { - for key in snapshot_keys.iter().rev() { - let _ = self.git_storage.delete_blob(key).await; - } - return Err(err.into()); - } - - if let Err(err) = self - .git_storage - .set_latest_commit(workspace_id, Some(&meta)) - .await - { - let _ = self.git_storage.delete_pack(workspace_id, &meta.commit_id).await; - for key in snapshot_keys.iter().rev() { - let _ = self.git_storage.delete_blob(key).await; - } - return Err(err.into()); - } - - let mut tx = self.pool.begin().await?; - sqlx::query( - r#"INSERT INTO git_commits ( - commit_id, - parent_commit_id, - workspace_id, - message, - author_name, - author_email, - committed_at, - pack_key, - file_hash_index - ) VALUES ($1,$2,$3,$4,$5,$6,$7,$8,$9)"#, - ) - .bind(meta.commit_id.clone()) - .bind(meta.parent_commit_id.clone()) - .bind(workspace_id) - .bind(meta.message.clone()) - .bind(meta.author_name.clone()) - .bind(meta.author_email.clone()) - .bind(meta.committed_at) - .bind(meta.pack_key.clone()) - .bind(Json(&meta.file_hash_index)) - .execute(&mut *tx) - .await?; - - sqlx::query("UPDATE git_repository_state SET updated_at = now() WHERE workspace_id = $1") - .bind(workspace_id) - .execute(&mut *tx) - .await?; - tx.commit().await?; - - let files_changed = self - .apply_state_to_workspace(workspace_id, &merged_snapshots, previous_index) - .await?; - - self.materialize_documents_from_state(workspace_id, actor_id, &merged_snapshots) - .await?; - self.apply_merged_to_documents(workspace_id, &merged_snapshots) - .await?; - - self.clear_dirty(workspace_id).await.map_err(|err| { - error!( - workspace_id = %workspace_id, - error = %err, - "git_pull_merge_clear_dirty_failed" - ); - err - })?; - - Ok(GitPullResultDto { - success: true, - message: "remote changes merged".to_string(), - files_changed, - commit_hash: Some(commit_hex), - conflicts: None, - base_commit, - remote_commit, - }) -} -} diff --git a/api/crates/infrastructure/src/git/workspace/pull/pull_once/response.rs b/api/crates/infrastructure/src/git/workspace/pull/pull_once/response.rs deleted file mode 100644 index 7ff2b71b..00000000 --- a/api/crates/infrastructure/src/git/workspace/pull/pull_once/response.rs +++ /dev/null @@ -1,22 +0,0 @@ -impl GitWorkspaceService { -fn pull_dirty_remote_overlap(dirty_rows: &[DirtyRow], remote_changed_paths: &[String]) -> bool { - let dirty_paths: HashSet = dirty_rows.iter().map(|r| r.path.clone()).collect(); - remote_changed_paths.iter().any(|p| dirty_paths.contains(p)) -} - -fn pull_conflicts_detected_response( - base_commit: Option>, - remote_commit: Option>, - conflicts: Vec, -) -> GitPullResultDto { - GitPullResultDto { - success: false, - message: "conflicts detected".to_string(), - files_changed: 0, - commit_hash: None, - conflicts: Some(conflicts), - base_commit, - remote_commit, - } -} -} diff --git a/api/crates/infrastructure/src/git/workspace/pull/pull_once/state.rs b/api/crates/infrastructure/src/git/workspace/pull/pull_once/state.rs deleted file mode 100644 index b857208c..00000000 --- a/api/crates/infrastructure/src/git/workspace/pull/pull_once/state.rs +++ /dev/null @@ -1,88 +0,0 @@ -impl GitWorkspaceService { -fn pull_collect_state_from_commit( - repo: &Repository, - oid: git2::Oid, -) -> anyhow::Result> { - let commit = repo.find_commit(oid)?; - let tree = commit.tree()?; - let mut out: HashMap = HashMap::new(); - - fn walk( - repo: &Repository, - tree: &git2::Tree, - prefix: &str, - out: &mut HashMap, - ) -> anyhow::Result<()> { - for entry in tree.iter() { - let name = entry.name().unwrap_or_default(); - let path = if prefix.is_empty() { - name.to_string() - } else { - format!("{prefix}{name}") - }; - match entry.kind() { - Some(git2::ObjectType::Tree) => { - if let Some(sub) = entry.to_object(repo)?.as_tree() { - walk(repo, sub, &(path.clone() + "/"), out)?; - } - } - Some(git2::ObjectType::Blob) => { - let blob = repo.find_blob(entry.id())?; - let bytes = blob.content().to_vec(); - let hash = sha256_hex(&bytes); - let is_text = std::str::from_utf8(&bytes).is_ok(); - out.insert( - path, - FileSnapshot { - hash, - data: FileSnapshotData::Inline(bytes), - is_text, - }, - ); - } - _ => {} - } - } - Ok(()) - } - - walk(repo, &tree, "", &mut out)?; - Ok(out) -} - -fn pull_remote_changed_paths( - base_index: &HashMap, - remote_state: &HashMap, -) -> Vec { - let mut remote_changed_paths: HashSet = HashSet::new(); - for (path, snap) in remote_state.iter() { - if base_index.get(path) != Some(&snap.hash) { - remote_changed_paths.insert(path.clone()); - } - } - for path in base_index.keys() { - if !remote_state.contains_key(path) { - remote_changed_paths.insert(path.clone()); - } - } - remote_changed_paths.into_iter().collect() -} - -async fn pull_build_conflicts_for_paths( - &self, - workspace_id: Uuid, - paths: &[String], - current_state: &HashMap, - remote_state: &HashMap, - local_meta: Option<&CommitMeta>, -) -> anyhow::Result> { - let mut remote_conflicts: Vec = Vec::new(); - for path in paths.iter() { - let item = self - .build_conflict_item(workspace_id, path, current_state, remote_state, local_meta) - .await?; - remote_conflicts.push(item); - } - Ok(remote_conflicts) -} -} diff --git a/api/crates/infrastructure/src/git/workspace/pull/repair.rs b/api/crates/infrastructure/src/git/workspace/pull/repair.rs deleted file mode 100644 index 783bdbc1..00000000 --- a/api/crates/infrastructure/src/git/workspace/pull/repair.rs +++ /dev/null @@ -1,129 +0,0 @@ -impl GitWorkspaceService { - async fn repair_missing_commit_metadata( - &self, - workspace_id: Uuid, - start_hex: &str, - ) -> anyhow::Result<()> { - let mut current_hex = start_hex.to_string(); - let mut visited = HashSet::new(); - loop { - if !visited.insert(current_hex.clone()) { - break; - } - let meta = - if let Some(meta) = self.commit_meta_by_hex(workspace_id, ¤t_hex).await? { - meta - } else if let Some(meta) = self - .reconstruct_commit_meta_from_pack(workspace_id, ¤t_hex) - .await? - { - meta - } else { - anyhow::bail!( - "commit {} not found in database or pack storage", - current_hex - ); - }; - self.git_storage - .restore_commit_meta(workspace_id, &meta) - .await?; - self.upsert_commit_record(workspace_id, &meta).await?; - if let Some(parent) = meta.parent_commit_id.as_ref() { - current_hex = encode_commit_id(parent); - } else { - break; - } - } - Ok(()) - } - - async fn upsert_commit_record( - &self, - workspace_id: Uuid, - meta: &CommitMeta, - ) -> anyhow::Result<()> { - sqlx::query( - r#"INSERT INTO git_commits ( - commit_id, - parent_commit_id, - workspace_id, - message, - author_name, - author_email, - committed_at, - pack_key, - file_hash_index - ) VALUES ($1,$2,$3,$4,$5,$6,$7,$8,$9) - ON CONFLICT (workspace_id, commit_id) DO UPDATE SET - parent_commit_id = EXCLUDED.parent_commit_id, - message = EXCLUDED.message, - author_name = EXCLUDED.author_name, - author_email = EXCLUDED.author_email, - committed_at = EXCLUDED.committed_at, - pack_key = EXCLUDED.pack_key, - file_hash_index = EXCLUDED.file_hash_index"#, - ) - .bind(meta.commit_id.clone()) - .bind(meta.parent_commit_id.clone()) - .bind(workspace_id) - .bind(meta.message.clone()) - .bind(meta.author_name.clone()) - .bind(meta.author_email.clone()) - .bind(meta.committed_at) - .bind(meta.pack_key.clone()) - .bind(Json(&meta.file_hash_index)) - .execute(&self.pool) - .await?; - Ok(()) - } - - async fn reconstruct_commit_meta_from_pack( - &self, - workspace_id: Uuid, - commit_hex: &str, - ) -> anyhow::Result> { - let commit_id = decode_commit_id(commit_hex)?; - let Some(pack_bytes) = self - .git_storage - .fetch_pack_for_commit(workspace_id, &commit_id) - .await? - else { - return Ok(None); - }; - let temp_dir = tempfile::tempdir()?; - let repo = Repository::init_bare(temp_dir.path())?; - apply_pack_to_repo(&repo, &pack_bytes)?; - let oid = git2::Oid::from_bytes(&commit_id)?; - let commit = repo.find_commit(oid)?; - let committed_at = git_time_to_datetime(commit.time())?; - let message = commit - .message() - .map(|m| m.trim_end_matches('\n').to_string()) - .filter(|m| !m.trim().is_empty()); - let author = commit.author(); - let author_name = author.name().map(|s| s.to_string()); - let author_email = author.email().map(|s| s.to_string()); - let parent_commit_id = if commit.parent_count() > 0 { - let parent = commit.parent_id(0)?; - Some(parent.as_bytes().to_vec()) - } else { - None - }; - let files = read_commit_files(&repo, commit_id.as_slice())?; - let mut file_hash_index: HashMap = HashMap::new(); - for (path, bytes) in files.into_iter() { - file_hash_index.insert(path, sha256_hex(&bytes)); - } - let meta = CommitMeta { - commit_id, - parent_commit_id, - message, - author_name, - author_email, - committed_at, - pack_key: format!("git/packs/{}/{}.pack", workspace_id, commit_hex), - file_hash_index, - }; - Ok(Some(meta)) - } -} diff --git a/api/crates/infrastructure/src/git/workspace/remote.rs b/api/crates/infrastructure/src/git/workspace/remote.rs deleted file mode 100644 index 2f29de97..00000000 --- a/api/crates/infrastructure/src/git/workspace/remote.rs +++ /dev/null @@ -1,88 +0,0 @@ -impl GitWorkspaceService { - async fn remote_head_inner( - &self, - workspace_id: Uuid, - cfg: &UserGitCfg, - ) -> anyhow::Result>> { - let state = self.load_repository_state(workspace_id).await?; - let Some((initialized, branch_default)) = state else { - anyhow::bail!("repository not initialized"); - }; - if !initialized { - anyhow::bail!("repository not initialized"); - } - if cfg.repository_url.is_empty() { - anyhow::bail!("remote not configured"); - } - let branch = if cfg.branch_name.is_empty() { - branch_default - } else { - cfg.branch_name.clone() - }; - let temp_dir = TempDirBuilder::new() - .prefix("git-remote-head-") - .tempdir() - .map_err(|e| anyhow!(e))?; - let repo = Repository::init_bare(temp_dir.path())?; - let head = fetch_remote_head(&repo, cfg, &branch)?; - Ok(head.map(|oid| oid.as_bytes().to_vec())) - } - - async fn check_remote_inner( - &self, - workspace_id: Uuid, - cfg: &UserGitCfg, - ) -> anyhow::Result { - if cfg.repository_url.is_empty() { - return Ok(GitRemoteCheckDto { - ok: true, - message: "remote not configured".to_string(), - reason: Some("no_remote".to_string()), - }); - } - let branch = cfg.branch_name.clone(); - let temp_dir = TempDirBuilder::new() - .prefix("git-check-") - .tempdir() - .map_err(|e| anyhow!(e))?; - let repo = Repository::init_bare(temp_dir.path())?; - let result = match fetch_remote_head(&repo, cfg, &branch) { - Ok(Some(_)) => GitRemoteCheckDto { - ok: true, - message: "remote reachable".to_string(), - reason: None, - }, - Ok(None) => GitRemoteCheckDto { - ok: false, - message: format!("branch '{branch}' not found on remote"), - reason: Some("branch_missing".to_string()), - }, - Err(err) => { - let lower = err.to_string().to_lowercase(); - let (reason, msg) = if lower.contains("git_http_auth_redirect") { - ( - Some("auth_required".to_string()), - "remote requires authentication or SSO approval".to_string(), - ) - } else if lower.contains("git_http_not_found") || lower.contains("status code: 404") - { - ( - Some("repo_not_found".to_string()), - "repository URL or branch not found".to_string(), - ) - } else { - (None, err.to_string()) - }; - GitRemoteCheckDto { - ok: false, - message: msg, - reason, - } - } - }; - drop(repo); - let _ = temp_dir.close(); - info!(workspace_id = %workspace_id, ok = %result.ok, reason = ?result.reason, "git_remote_check_completed"); - Ok(result) - } -} diff --git a/api/crates/infrastructure/src/git/workspace/service/history.rs b/api/crates/infrastructure/src/git/workspace/service/history.rs deleted file mode 100644 index 8818cd8c..00000000 --- a/api/crates/infrastructure/src/git/workspace/service/history.rs +++ /dev/null @@ -1,671 +0,0 @@ -impl GitWorkspaceService { - pub fn new( - pool: PgPool, - git_storage: Arc, - storage: Arc, - snapshot: Arc, - realtime: Arc, - docs: Arc, - doc_paths: Arc, - ) -> anyhow::Result { - Ok(Self { - pool, - git_storage, - storage, - snapshot, - realtime, - docs, - doc_paths, - }) - } - - fn is_missing_objects(err: &anyhow::Error) -> bool { - let msg = err.to_string().to_lowercase(); - msg.contains("missing objects") || msg.contains("packfile is missing") - } - - async fn recover_missing_objects( - &self, - workspace_id: Uuid, - cfg: &UserGitCfg, - ) -> anyhow::Result<()> { - // Pick branch from cfg or fallback to repository state default. - let branch = if cfg.branch_name.is_empty() { - self.load_repository_state(workspace_id) - .await? - .map(|(_, default_branch)| default_branch) - .unwrap_or_else(|| "main".to_string()) - } else { - cfg.branch_name.clone() - }; - - let mut tx = self.pool.begin().await?; - sqlx::query("DELETE FROM git_dirty_files WHERE workspace_id = $1") - .bind(workspace_id) - .execute(&mut *tx) - .await?; - sqlx::query("DELETE FROM git_commits WHERE workspace_id = $1") - .bind(workspace_id) - .execute(&mut *tx) - .await?; - sqlx::query( - "UPDATE git_repository_state SET initialized = true, default_branch = $2, updated_at = now() WHERE workspace_id = $1", - ) - .bind(workspace_id) - .bind(&branch) - .execute(&mut *tx) - .await?; - tx.commit().await?; - - let _ = self.git_storage.delete_all(workspace_id).await; - let _ = self.git_storage.set_latest_commit(workspace_id, None).await; - - // Re-bootstrap remote history (best effort). - let _ = self - .bootstrap_remote_history(workspace_id, cfg, branch.as_str()) - .await; - Ok(()) - } - - async fn load_repository_state( - &self, - workspace_id: Uuid, - ) -> anyhow::Result> { - let row = sqlx::query( - "SELECT initialized, default_branch FROM git_repository_state WHERE workspace_id = $1", - ) - .bind(workspace_id) - .fetch_optional(&self.pool) - .await?; - Ok(row.map(|r| (r.get("initialized"), r.get("default_branch")))) - } - - async fn latest_commit_meta(&self, workspace_id: Uuid) -> anyhow::Result> { - let row = sqlx::query( - r#"SELECT commit_id, parent_commit_id, message, author_name, author_email, - committed_at, pack_key, file_hash_index - FROM git_commits - WHERE workspace_id = $1 - ORDER BY committed_at DESC - LIMIT 1"#, - ) - .bind(workspace_id) - .fetch_optional(&self.pool) - .await?; - - row.map(row_to_commit_meta).transpose() - } - - async fn load_commit_meta_ref( - &self, - workspace_id: Uuid, - rev: &str, - ) -> anyhow::Result> { - if let Some(base) = rev.strip_suffix('^') { - let Some(meta) = self.commit_meta_by_hex(workspace_id, base).await? else { - return Ok(None); - }; - if let Some(parent_id) = meta.parent_commit_id.clone() { - return self - .commit_meta_by_id(workspace_id, parent_id.as_slice()) - .await; - } - return Ok(None); - } - self.commit_meta_by_hex(workspace_id, rev).await - } - - async fn commit_meta_by_id( - &self, - workspace_id: Uuid, - commit_id: &[u8], - ) -> anyhow::Result> { - let row = sqlx::query( - r#"SELECT commit_id, parent_commit_id, message, author_name, author_email, - committed_at, pack_key, file_hash_index - FROM git_commits - WHERE workspace_id = $1 AND commit_id = $2 - LIMIT 1"#, - ) - .bind(workspace_id) - .bind(commit_id) - .fetch_optional(&self.pool) - .await?; - row.map(row_to_commit_meta).transpose() - } - - async fn commit_meta_by_hex( - &self, - workspace_id: Uuid, - hex: &str, - ) -> anyhow::Result> { - let bytes = application::git::ports::git_storage::decode_commit_id(hex)?; - let row = sqlx::query( - r#"SELECT commit_id, parent_commit_id, message, author_name, author_email, - committed_at, pack_key, file_hash_index - FROM git_commits - WHERE workspace_id = $1 AND commit_id = $2 - LIMIT 1"#, - ) - .bind(workspace_id) - .bind(bytes) - .fetch_optional(&self.pool) - .await?; - row.map(row_to_commit_meta).transpose() - } - - async fn ensure_latest_meta(&self, workspace_id: Uuid) -> anyhow::Result> { - if let Some(meta) = self.latest_commit_meta(workspace_id).await? { - return Ok(Some(meta)); - } - let Some(storage_latest) = self.git_storage.latest_commit(workspace_id).await? else { - return Ok(None); - }; - info!(workspace_id = %workspace_id, commit = %encode_commit_id(&storage_latest.commit_id), "git_backfill_latest_from_storage"); - self.backfill_commits_from_storage(workspace_id, &storage_latest) - .await?; - Ok(Some(storage_latest)) - } - - async fn bootstrap_remote_history( - &self, - workspace_id: Uuid, - cfg: &UserGitCfg, - branch: &str, - ) -> anyhow::Result> { - let temp_dir = TempDirBuilder::new() - .prefix("git-bootstrap-") - .tempdir() - .map_err(|e| anyhow!(e))?; - let repo = Repository::init_bare(temp_dir.path())?; - - let Some(remote_head) = fetch_remote_head(&repo, cfg, branch)? else { - return Ok(None); - }; - - let ordered = { - let mut revwalk = repo.revwalk()?; - revwalk.push(remote_head)?; - revwalk.set_sorting(Sort::TOPOLOGICAL | Sort::REVERSE)?; - - let mut collected = Vec::new(); - for oid_result in revwalk { - collected.push(oid_result?); - } - collected - }; - - if ordered.is_empty() { - return Ok(None); - } - - let pack_bytes_master = read_first_pack(repo.path())?.ok_or_else(|| { - anyhow!( - "remote fetch produced no pack files for workspace {}", - workspace_id - ) - })?; - - let mut latest_meta = self.git_storage.latest_commit(workspace_id).await?; - - for oid in ordered { - let existing_meta = self.commit_meta_by_id(workspace_id, oid.as_bytes()).await?; - let existing_pack = self - .git_storage - .fetch_pack_for_commit(workspace_id, oid.as_bytes()) - .await?; - // Skip only when both DB row and pack already exist. - if existing_meta.is_some() && existing_pack.is_some() { - latest_meta = existing_meta; - continue; - } - - let (meta, snapshots, pack_bytes) = { - let commit = repo.find_commit(oid)?; - let committed_at = git_time_to_datetime(commit.time())?; - let message = commit - .message() - .map(|m| m.trim_end_matches('\n').to_string()) - .filter(|m| !m.trim().is_empty()); - let author = commit.author(); - let author_name = author.name().map(|s| s.to_string()); - let author_email = author.email().map(|s| s.to_string()); - let parent_commit_id = if commit.parent_count() > 0 { - let parent = commit.parent_id(0)?; - Some(parent.as_bytes().to_vec()) - } else { - None - }; - - let files = read_commit_files(&repo, oid.as_bytes())?; - let mut snapshots: HashMap = HashMap::new(); - let mut file_hash_index: HashMap = HashMap::new(); - for (path, bytes) in files.into_iter() { - let hash = sha256_hex(&bytes); - let is_text = std::str::from_utf8(&bytes).is_ok(); - file_hash_index.insert(path.clone(), hash.clone()); - snapshots.insert( - path, - FileSnapshot { - hash, - data: FileSnapshotData::Inline(bytes), - is_text, - }, - ); - } - - let pack_builder = repo.packbuilder()?; - // Use the full remote pack for every commit to avoid thin-pack corruption. - let pack_bytes = pack_bytes_master.clone(); - drop(pack_builder); - - let commit_id = oid.as_bytes().to_vec(); - let pack_key = format!( - "git/packs/{}/{}.pack", - workspace_id, - encode_commit_id(&commit_id) - ); - - let meta = CommitMeta { - commit_id, - parent_commit_id, - message, - author_name, - author_email, - committed_at, - pack_key, - file_hash_index, - }; - - (meta, snapshots, pack_bytes) - }; - - let prev_latest = latest_meta.clone(); - let snapshot_keys = match self - .store_commit_snapshots(workspace_id, &meta.commit_id, &snapshots) - .await - { - Ok(keys) => keys, - Err(err) => { - return Err(err); - } - }; - - if let Err(err) = self - .git_storage - .store_pack(workspace_id, &pack_bytes, &meta) - .await - { - for key in snapshot_keys.iter().rev() { - let _ = self.git_storage.delete_blob(key).await; - } - return Err(err.into()); - } - - if let Err(err) = self - .git_storage - .set_latest_commit(workspace_id, Some(&meta)) - .await - { - let _ = self - .git_storage - .delete_pack(workspace_id, &meta.commit_id) - .await; - for key in snapshot_keys.iter().rev() { - let _ = self.git_storage.delete_blob(key).await; - } - let _ = self - .git_storage - .set_latest_commit(workspace_id, prev_latest.as_ref()) - .await; - return Err(err.into()); - } - - let mut tx = self.pool.begin().await?; - let upsert_res = sqlx::query( - r#"INSERT INTO git_commits ( - commit_id, - parent_commit_id, - workspace_id, - message, - author_name, - author_email, - committed_at, - pack_key, - file_hash_index - ) VALUES ($1,$2,$3,$4,$5,$6,$7,$8,$9) - ON CONFLICT (workspace_id, commit_id) DO UPDATE SET - parent_commit_id = EXCLUDED.parent_commit_id, - message = EXCLUDED.message, - author_name = EXCLUDED.author_name, - author_email = EXCLUDED.author_email, - committed_at = EXCLUDED.committed_at, - pack_key = EXCLUDED.pack_key, - file_hash_index = EXCLUDED.file_hash_index"#, - ) - .bind(meta.commit_id.clone()) - .bind(meta.parent_commit_id.clone()) - .bind(workspace_id) - .bind(meta.message.clone()) - .bind(meta.author_name.clone()) - .bind(meta.author_email.clone()) - .bind(meta.committed_at) - .bind(meta.pack_key.clone()) - .bind(Json(&meta.file_hash_index)) - .execute(&mut *tx) - .await; - - if let Err(err) = upsert_res { - tx.rollback().await.ok(); - let _ = self - .git_storage - .delete_pack(workspace_id, &meta.commit_id) - .await; - for key in snapshot_keys.iter().rev() { - let _ = self.git_storage.delete_blob(key).await; - } - let _ = self - .git_storage - .set_latest_commit(workspace_id, prev_latest.as_ref()) - .await; - return Err(err.into()); - } - - if let Err(err) = sqlx::query( - "UPDATE git_repository_state SET updated_at = now() WHERE workspace_id = $1", - ) - .bind(workspace_id) - .execute(&mut *tx) - .await - { - tx.rollback().await.ok(); - let _ = self - .git_storage - .delete_pack(workspace_id, &meta.commit_id) - .await; - for key in snapshot_keys.iter().rev() { - let _ = self.git_storage.delete_blob(key).await; - } - let _ = self - .git_storage - .set_latest_commit(workspace_id, prev_latest.as_ref()) - .await; - return Err(err.into()); - } - - if let Err(err) = tx.commit().await { - let _ = self - .git_storage - .delete_pack(workspace_id, &meta.commit_id) - .await; - for key in snapshot_keys.iter().rev() { - let _ = self.git_storage.delete_blob(key).await; - } - let _ = self - .git_storage - .set_latest_commit(workspace_id, prev_latest.as_ref()) - .await; - return Err(err.into()); - } - - latest_meta = Some(meta); - } - - drop(repo); - let _ = temp_dir.close(); - self.git_storage - .latest_commit(workspace_id) - .await - .map_err(Into::into) - } - - async fn backfill_commits_from_storage( - &self, - workspace_id: Uuid, - latest: &CommitMeta, - ) -> anyhow::Result<()> { - let mut pending = Vec::new(); - let mut cursor = Some(latest.clone()); - while let Some(meta) = cursor { - if self - .commit_meta_by_id(workspace_id, meta.commit_id.as_slice()) - .await? - .is_some() - { - break; - } - pending.push(meta.clone()); - cursor = match meta.parent_commit_id.clone() { - Some(parent) => { - self.git_storage - .commit_meta(workspace_id, parent.as_slice()) - .await? - } - None => None, - }; - } - if pending.is_empty() { - return Ok(()); - } - pending.reverse(); - let mut tx = self.pool.begin().await?; - for meta in pending.into_iter() { - sqlx::query( - r#"INSERT INTO git_commits ( - commit_id, - parent_commit_id, - workspace_id, - message, - author_name, - author_email, - committed_at, - pack_key, - file_hash_index - ) VALUES ($1,$2,$3,$4,$5,$6,$7,$8,$9) - ON CONFLICT (workspace_id, commit_id) DO NOTHING"#, - ) - .bind(meta.commit_id.clone()) - .bind(meta.parent_commit_id.clone()) - .bind(workspace_id) - .bind(meta.message.clone()) - .bind(meta.author_name.clone()) - .bind(meta.author_email.clone()) - .bind(meta.committed_at) - .bind(meta.pack_key.clone()) - .bind(Json(&meta.file_hash_index)) - .execute(&mut *tx) - .await?; - } - tx.commit().await?; - Ok(()) - } - - async fn collect_commit_chain( - &self, - workspace_id: Uuid, - start: CommitMeta, - ) -> anyhow::Result> { - let mut chain = Vec::new(); - let mut cursor = Some(start); - while let Some(meta) = cursor { - chain.push(meta.clone()); - cursor = match meta.parent_commit_id.clone() { - Some(parent) => { - self.commit_meta_by_id(workspace_id, parent.as_slice()) - .await? - } - None => None, - }; - } - Ok(chain) - } - - async fn remove_commits( - &self, - workspace_id: Uuid, - commits: &[CommitMeta], - ) -> anyhow::Result<()> { - for meta in commits { - let commit_hex = encode_commit_id(&meta.commit_id); - if let Err(error) = self - .git_storage - .delete_pack(workspace_id, &meta.commit_id) - .await - { - warn!( - workspace_id = %workspace_id, - commit = %commit_hex, - error = ?error, - "git_commit_cleanup_pack_failed" - ); - } - for path in meta.file_hash_index.keys() { - let key = blob_key(workspace_id, &meta.commit_id, path); - if let Err(error) = self.git_storage.delete_blob(&key).await { - warn!( - workspace_id = %workspace_id, - commit = %commit_hex, - path = %path, - error = ?error, - "git_commit_cleanup_blob_failed" - ); - } - } - sqlx::query("DELETE FROM git_commits WHERE workspace_id = $1 AND commit_id = $2") - .bind(workspace_id) - .bind(meta.commit_id.clone()) - .execute(&self.pool) - .await?; - } - Ok(()) - } - - async fn realign_commit_history( - &self, - workspace_id: Uuid, - storage_latest: Option, - db_latest: Option, - ) -> anyhow::Result<()> { - match (storage_latest, db_latest) { - (Some(storage), Some(db)) => { - if storage.commit_id == db.commit_id { - return Ok(()); - } - let storage_id = storage.commit_id.clone(); - let mut cursor = Some(db.clone()); - let mut reached_storage = false; - let mut to_prune: Vec = Vec::new(); - while let Some(meta) = cursor.clone() { - if meta.commit_id == storage_id { - reached_storage = true; - break; - } - to_prune.push(meta.clone()); - cursor = match meta.parent_commit_id.clone() { - Some(parent) => { - self.commit_meta_by_id(workspace_id, parent.as_slice()) - .await? - } - None => None, - }; - } - if !reached_storage { - let all = self.collect_commit_chain(workspace_id, db.clone()).await?; - if !all.is_empty() { - info!( - workspace_id = %workspace_id, - removed = all.len(), - "git_commit_pointer_reset_db_chain" - ); - self.remove_commits(workspace_id, &all).await?; - } - } else if !to_prune.is_empty() { - info!( - workspace_id = %workspace_id, - removed = to_prune.len(), - "git_commit_pointer_pruned_db_commits" - ); - self.remove_commits(workspace_id, &to_prune).await?; - } - self.backfill_commits_from_storage(workspace_id, &storage) - .await?; - } - (Some(storage), None) => { - self.backfill_commits_from_storage(workspace_id, &storage) - .await?; - } - (None, Some(db)) => { - let all = self.collect_commit_chain(workspace_id, db).await?; - if !all.is_empty() { - info!( - workspace_id = %workspace_id, - removed = all.len(), - "git_commit_pointer_dropped_db_history" - ); - self.remove_commits(workspace_id, &all).await?; - } - } - (None, None) => {} - } - Ok(()) - } - - async fn prune_commits_from_head( - &self, - workspace_id: Uuid, - commits: &[CommitMeta], - ) -> anyhow::Result<()> { - if commits.is_empty() { - return Ok(()); - } - self.remove_commits(workspace_id, commits).await?; - let new_latest = self.latest_commit_meta(workspace_id).await?; - self.git_storage - .set_latest_commit(workspace_id, new_latest.as_ref()) - .await?; - Ok(()) - } - - async fn ensure_storage_commit_integrity(&self, workspace_id: Uuid) -> anyhow::Result<()> { - loop { - let Some(latest) = self.latest_commit_meta(workspace_id).await? else { - self.git_storage - .set_latest_commit(workspace_id, None) - .await?; - return Ok(()); - }; - let chain = self - .collect_commit_chain(workspace_id, latest.clone()) - .await?; - let mut missing_idx: Option = None; - for (idx, meta) in chain.iter().enumerate() { - match self - .git_storage - .commit_meta(workspace_id, meta.commit_id.as_slice()) - .await? - { - Some(_) => continue, - None => { - missing_idx = Some(idx); - break; - } - } - } - if let Some(idx) = missing_idx { - let to_remove: Vec = chain[..=idx].to_vec(); - info!( - workspace_id = %workspace_id, - removed = to_remove.len(), - missing_commit = %encode_commit_id(&chain[idx].commit_id), - "git_commit_pointer_pruned_missing_storage_meta" - ); - self.prune_commits_from_head(workspace_id, &to_remove) - .await?; - continue; - } - break; - } - Ok(()) - } - -} diff --git a/api/crates/infrastructure/src/git/workspace/service/state.rs b/api/crates/infrastructure/src/git/workspace/service/state.rs deleted file mode 100644 index ea70c8e6..00000000 --- a/api/crates/infrastructure/src/git/workspace/service/state.rs +++ /dev/null @@ -1,10 +0,0 @@ -use domain::documents::doc_type::DocumentType; -use domain::documents::title::Title; - -include!("state/collect.rs"); -include!("state/dirty.rs"); -include!("state/export.rs"); -include!("state/deltas.rs"); -include!("state/snapshots.rs"); -include!("state/apply.rs"); -include!("state/diff.rs"); diff --git a/api/crates/infrastructure/src/git/workspace/service/state/apply.rs b/api/crates/infrastructure/src/git/workspace/service/state/apply.rs deleted file mode 100644 index 521f5b63..00000000 --- a/api/crates/infrastructure/src/git/workspace/service/state/apply.rs +++ /dev/null @@ -1,364 +0,0 @@ -impl GitWorkspaceService { - async fn apply_state_to_workspace( - &self, - workspace_id: Uuid, - state: &HashMap, - previous_index: &HashMap, - ) -> anyhow::Result { - let mut changed: u32 = 0; - // write/update files - for (path, snapshot) in state.iter() { - let rel = format!("{}/{}", workspace_id, path.trim_start_matches('/')); - let abs = self.storage.absolute_from_relative(&rel); - if let Some(parent) = abs.parent() { - async_fs::create_dir_all(parent).await?; - } - let bytes = self.snapshot_bytes(snapshot).await?; - self.storage.write_bytes(abs.as_path(), &bytes).await?; - changed += 1; - } - // remove files missing in next state - for path in previous_index.keys() { - if state.contains_key(path) { - continue; - } - let rel = format!("{}/{}", workspace_id, path.trim_start_matches('/')); - let abs = self.storage.absolute_from_relative(&rel); - if async_fs::remove_file(&abs).await.is_ok() { - changed += 1; - } - } - Ok(changed) - } - - async fn ensure_folder( - &self, - workspace_id: Uuid, - actor_id: Uuid, - folder_path: &str, - cache: &mut HashMap, - ) -> anyhow::Result> { - let trimmed = folder_path.trim_matches('/'); - if trimmed.is_empty() { - return Ok(None); - } - - let mut current_parent: Option = None; - let mut accumulated = String::new(); - for segment in trimmed.split('/') { - if !accumulated.is_empty() { - accumulated.push('/'); - } - accumulated.push_str(segment); - - if let Some(id) = cache.get(&accumulated) { - current_parent = Some(*id); - continue; - } - - let lookup_path = format!("{}/{}", workspace_id, accumulated); - if let Some(existing) = self - .doc_paths - .get_by_owner_and_path(workspace_id, &lookup_path) - .await? - { - if existing.doc_type() != DocumentType::Folder { - anyhow::bail!("path_conflict_not_folder"); - } - cache.insert(accumulated.clone(), existing.id()); - current_parent = Some(existing.id()); - continue; - } - - let title = if segment.trim().is_empty() { - "folder" - } else { - segment - }; - let parent_desired_path = match current_parent { - Some(parent_id) => self - .docs - .get_meta_for_owner(parent_id, workspace_id) - .await? - .map(|m| m.desired_path), - None => None, - }; - let title = Title::from_user_input(title); - let mut repo = self.docs.as_ref(); - let folder = application::documents::use_cases::create_document::CreateDocument { - repo: &mut repo, - } - .execute( - workspace_id, - actor_id, - &title, - current_parent, - parent_desired_path.as_ref(), - DocumentType::Folder, - None, - ) - .await?; - self.doc_paths - .update_repo_path(folder.id(), workspace_id, &accumulated) - .await?; - - cache.insert(accumulated.clone(), folder.id()); - current_parent = Some(folder.id()); - } - - Ok(current_parent) - } - - async fn materialize_documents_from_state( - &self, - workspace_id: Uuid, - actor_id: Uuid, - state: &HashMap, - ) -> anyhow::Result<(u32, u32)> { - fn folder_key(path: &str) -> String { - path.rsplit_once('/') - .map(|(parent, _)| parent.trim().trim_end_matches('/')) - .filter(|s| !s.is_empty()) - .unwrap_or_default() - .to_string() - } - - fn attachment_owner_folder(path: &str) -> String { - if let Some(idx) = path.find("/attachments/") { - let prefix = &path[..idx]; - if prefix.is_empty() { - String::new() - } else { - prefix.trim_end_matches('/').to_string() - } - } else if path.starts_with("attachments/") { - String::new() - } else { - folder_key(path) - } - } - - fn is_markdown_path(path: &str) -> bool { - let lower = path.to_ascii_lowercase(); - lower.ends_with(".md") || lower.ends_with(".markdown") - } - - let mut folder_cache: HashMap = HashMap::new(); - let mut docs_created: u32 = 0; - let mut attachments_created: u32 = 0; - - let mut existing_by_desired: HashMap = HashMap::new(); - let mut folder_docs: HashMap> = HashMap::new(); - - for doc in self.docs.list_workspace_documents(workspace_id).await? { - let normalized = normalize_repo_path(doc.desired_path().as_str().to_string()); - existing_by_desired.insert(normalized.clone(), doc.id()); - if doc.doc_type() != DocumentType::Folder { - let key = folder_key(&normalized); - folder_docs.entry(key.clone()).or_default().push(doc.id()); - if doc.archived_at().is_some() { - let archived_key = if key.is_empty() { - "Archives".to_string() - } else { - format!("Archives/{}", key) - }; - folder_docs.entry(archived_key).or_default().push(doc.id()); - } - } - } - - let mut paths: Vec = state.keys().cloned().collect(); - paths.sort(); - - // First pass: create documents only for markdown files - for path in paths.iter() { - let snapshot = match state.get(path) { - Some(s) => s, - None => continue, - }; - if !snapshot.is_text { - continue; - } - let normalized = normalize_repo_path(path.clone()); - if !is_markdown_path(&normalized) { - continue; - } - - // Skip if document already exists at desired_path (including folders that would conflict) - if existing_by_desired.contains_key(&normalized) { - continue; - } - - let parent_path = folder_key(&normalized); - let parent_id = if parent_path.is_empty() { - None - } else { - self.ensure_folder(workspace_id, actor_id, &parent_path, &mut folder_cache) - .await? - }; - - let filename = normalized - .rsplit('/') - .next() - .unwrap_or(&normalized) - .to_string(); - let title = filename - .trim_end_matches(".md") - .trim_end_matches(".markdown") - .trim_end_matches(".txt"); - - let parent_desired_path = match parent_id { - Some(parent_id) => self - .docs - .get_meta_for_owner(parent_id, workspace_id) - .await? - .map(|m| m.desired_path), - None => None, - }; - let title = Title::from_user_input(if title.is_empty() { "Document" } else { title }); - let mut repo = self.docs.as_ref(); - let doc = application::documents::use_cases::create_document::CreateDocument { - repo: &mut repo, - } - .execute( - workspace_id, - actor_id, - &title, - parent_id, - parent_desired_path.as_ref(), - DocumentType::Document, - None, - ) - .await?; - self.doc_paths - .update_repo_path(doc.id(), workspace_id, &normalized) - .await?; - docs_created += 1; - existing_by_desired.insert(normalized.clone(), doc.id()); - - folder_docs.entry(parent_path).or_default().push(doc.id()); - - let bytes = self.snapshot_bytes(snapshot).await.unwrap_or_default(); - let body = extract_markdown_body(&bytes) - .unwrap_or_else(|| std::str::from_utf8(&bytes).unwrap_or_default().to_string()); - let snap_bytes = snapshot_from_markdown(&body); - let _ = self - .realtime - .apply_snapshot(&doc.id().to_string(), snap_bytes.as_slice()) - .await; - let _ = self.realtime.force_persist(&doc.id().to_string()).await; - } - - for docs in folder_docs.values_mut() { - docs.sort(); - } - - // Second pass: attach binaries without creating documents - for path in paths { - let snapshot = match state.get(&path) { - Some(s) => s, - None => continue, - }; - if snapshot.is_text { - continue; - } - let normalized = normalize_repo_path(path.clone()); - if !normalized.contains("/attachments/") && !normalized.starts_with("attachments/") { - continue; - } - let filename = normalized - .rsplit('/') - .next() - .unwrap_or(&normalized) - .to_string(); - let folder = attachment_owner_folder(&normalized); - let doc_id = folder_docs.get(&folder).and_then(|v| v.first().copied()); - let Some(doc_id) = doc_id else { - warn!( - workspace_id = %workspace_id, - repo_path = normalized.as_str(), - "git_materialize_attachment_no_owner" - ); - continue; - }; - - let storage_path = format!("{}/{}", workspace_id, normalized); - let existing: Option = - sqlx::query_scalar("SELECT id FROM files WHERE storage_path = $1 LIMIT 1") - .bind(&storage_path) - .fetch_optional(&self.pool) - .await?; - if existing.is_some() { - continue; - } - - let bytes = self.snapshot_bytes(snapshot).await.unwrap_or_default(); - let size = bytes.len() as i64; - let _ = sqlx::query( - r#"INSERT INTO files (document_id, filename, content_type, size, storage_path, content_hash) - VALUES ($1,$2,$3,$4,$5,$6)"#, - ) - .bind(doc_id) - .bind(&filename) - .bind::>(None) - .bind(size) - .bind(&storage_path) - .bind(&snapshot.hash) - .execute(&self.pool) - .await?; - attachments_created += 1; - } - Ok((docs_created, attachments_created)) - } - - /// Apply merged markdown files directly to realtime/persistence so documents reflect Pull results. - async fn apply_merged_to_documents( - &self, - workspace_id: Uuid, - next_state: &HashMap, - ) -> anyhow::Result<()> { - let doc_rows = self - .docs - .list_workspace_documents(workspace_id) - .await? - .into_iter() - .filter(|d| d.doc_type() != DocumentType::Folder); - - for doc in doc_rows { - let doc_id = doc.id(); - let normalized = normalize_repo_path(doc.desired_path().as_str().to_string()); - let Some(snapshot) = next_state.get(&normalized) else { - continue; - }; - - if !snapshot.is_text { - continue; - } - let bytes = match self.snapshot_bytes(snapshot).await { - Ok(b) => b, - Err(err) => { - warn!(document_id = %doc_id, error = ?err, "git_pull_snapshot_bytes_failed"); - continue; - } - }; - let body = match extract_markdown_body(&bytes) { - Some(b) => b, - None => continue, - }; - let snap_bytes = - application::documents::services::realtime::snapshot::snapshot_from_markdown(&body); - if let Err(err) = crate::core::storage::suppress_git_dirty(async { - self.realtime - .apply_snapshot(&doc_id.to_string(), snap_bytes.as_slice()) - .await?; - self.realtime.force_persist(&doc_id.to_string()).await - }) - .await - { - warn!(document_id = %doc_id, error = ?err, "git_pull_apply_snapshot_failed"); - continue; - } - } - Ok(()) - } -} diff --git a/api/crates/infrastructure/src/git/workspace/service/state/collect.rs b/api/crates/infrastructure/src/git/workspace/service/state/collect.rs deleted file mode 100644 index f4b64c63..00000000 --- a/api/crates/infrastructure/src/git/workspace/service/state/collect.rs +++ /dev/null @@ -1,121 +0,0 @@ -impl GitWorkspaceService { - async fn collect_current_state( - &self, - workspace_id: Uuid, - ) -> anyhow::Result> { - let mut state: HashMap = HashMap::new(); - - let doc_rows = self - .docs - .list_workspace_documents(workspace_id) - .await? - .into_iter() - .filter(|d| d.doc_type() != DocumentType::Folder); - - for doc in doc_rows { - let doc_id = doc.id(); - let export = match self.snapshot.export_current_markdown(&doc_id).await? { - Some(export) => export, - None => continue, - }; - let repo_path = export - .repo_path - .or_else(|| Some(doc.desired_path().as_str().to_string())) - .map(normalize_repo_path) - .ok_or_else(|| anyhow!("missing_repo_path_for_doc {}", doc_id))?; - state.insert( - repo_path, - FileSnapshot { - hash: export.content_hash, - data: FileSnapshotData::Inline(export.bytes), - is_text: true, - }, - ); - } - - let attachment_rows = sqlx::query( - r#"SELECT f.id AS file_id, f.storage_path, f.content_hash - FROM files f - JOIN documents d ON d.id = f.document_id - WHERE d.owner_id = $1"#, - ) - .bind(workspace_id) - .fetch_all(&self.pool) - .await?; - - for row in attachment_rows { - let file_id: Uuid = row.get("file_id"); - let storage_path: String = row.get("storage_path"); - let stored_hash: Option = row - .try_get("content_hash") - .ok() - .and_then(|h: String| if h.is_empty() { None } else { Some(h) }); - let (hash, needs_persist) = match stored_hash { - Some(existing) => (existing, false), - None => { - let computed = self - .compute_attachment_hash(&storage_path) - .await - .with_context(|| { - format!("failed to compute attachment hash for {}", storage_path) - })?; - match computed { - Some(value) => (value, true), - None => continue, - } - } - }; - if needs_persist { - if let Err(err) = self.persist_attachment_hash(file_id, &hash).await { - warn!( - file_id = %file_id, - path = storage_path.as_str(), - error = ?err, - "git_workspace_attachment_hash_persist_failed" - ); - } - } - let repo_path = repo_relative_path(&storage_path)?; - state.insert( - repo_path, - FileSnapshot { - hash, - data: FileSnapshotData::StoragePath(storage_path), - is_text: false, - }, - ); - } - - Ok(state) - } - - async fn compute_attachment_hash(&self, storage_path: &str) -> anyhow::Result> { - let abs = self.storage.absolute_from_relative(storage_path); - match self.storage.read_bytes(abs.as_path()).await { - Ok(bytes) => Ok(Some(sha256_hex(&bytes))), - Err(err) => { - if let Some(io_err) = err.downcast_ref::() { - if io_err.kind() == io::ErrorKind::NotFound { - return Ok(None); - } - } - if err.to_string().to_lowercase().contains("not found") { - return Ok(None); - } - Err(err.into()) - } - } - } - - async fn persist_attachment_hash(&self, file_id: Uuid, hash: &str) -> anyhow::Result<()> { - sqlx::query( - r#"UPDATE files SET content_hash = $2, updated_at = now() - WHERE id = $1"#, - ) - .bind(file_id) - .bind(hash) - .execute(&self.pool) - .await?; - Ok(()) - } -} diff --git a/api/crates/infrastructure/src/git/workspace/service/state/deltas.rs b/api/crates/infrastructure/src/git/workspace/service/state/deltas.rs deleted file mode 100644 index ff1367b5..00000000 --- a/api/crates/infrastructure/src/git/workspace/service/state/deltas.rs +++ /dev/null @@ -1,31 +0,0 @@ -impl GitWorkspaceService { - fn compute_deltas( - &self, - current: &HashMap, - previous: &HashMap, - ) -> FileDeltaSummary { - let mut added = Vec::new(); - let mut modified = Vec::new(); - let mut deleted = Vec::new(); - - for (path, snapshot) in current.iter() { - match previous.get(path) { - None => added.push(path.clone()), - Some(prev_hash) if prev_hash != &snapshot.hash => modified.push(path.clone()), - _ => {} - } - } - - for path in previous.keys() { - if !current.contains_key(path) { - deleted.push(path.clone()); - } - } - - FileDeltaSummary { - added, - modified, - deleted, - } - } -} diff --git a/api/crates/infrastructure/src/git/workspace/service/state/diff.rs b/api/crates/infrastructure/src/git/workspace/service/state/diff.rs deleted file mode 100644 index 352ea0ba..00000000 --- a/api/crates/infrastructure/src/git/workspace/service/state/diff.rs +++ /dev/null @@ -1,173 +0,0 @@ -impl GitWorkspaceService { - fn build_diff_result( - &self, - path: &str, - old_content: Option<&str>, - new_content: Option<&str>, - ) -> TextDiffResult { - match (old_content, new_content) { - (Some(old), Some(new)) => compute_text_diff(old, new, path), - _ => TextDiffResult { - file_path: path.to_string(), - diff_lines: Vec::new(), - old_content: old_content.map(|s| s.to_string()), - new_content: new_content.map(|s| s.to_string()), - }, - } - } - - async fn commit_diff_via_packs( - &self, - workspace_id: Uuid, - from_meta: Option<&CommitMeta>, - to_meta: &CommitMeta, - ) -> anyhow::Result> { - let (to_pack_dir, to_pack_paths) = self - .persist_pack_chain(workspace_id, Some(to_meta.commit_id.as_slice())) - .await? - .ok_or_else(|| { - anyhow!( - "missing pack data for commit {}", - encode_commit_id(&to_meta.commit_id) - ) - })?; - - let from_pack = if let Some(from_meta) = from_meta { - if from_meta.commit_id != to_meta.commit_id { - Some( - self.persist_pack_chain(workspace_id, Some(from_meta.commit_id.as_slice())) - .await? - .ok_or_else(|| { - anyhow!( - "missing pack data for commit {}", - encode_commit_id(&from_meta.commit_id) - ) - })?, - ) - } else { - None - } - } else { - None - }; - - let temp_dir = TempDirBuilder::new() - .prefix("git-diff-") - .tempdir() - .map_err(|e| anyhow::anyhow!(e))?; - let repo = Repository::init_bare(temp_dir.path())?; - - apply_pack_files(&repo, &to_pack_paths)?; - if let Some((_, ref paths)) = from_pack { - apply_pack_files(&repo, paths)?; - } - - let from_files = if let Some(from_meta) = from_meta { - read_commit_files(&repo, from_meta.commit_id.as_slice())? - } else { - HashMap::new() - }; - let to_files = read_commit_files(&repo, to_meta.commit_id.as_slice())?; - - drop(repo); - let _ = temp_dir.close(); - drop(to_pack_dir); - if let Some((dir, _)) = from_pack { - drop(dir); - } - - let mut paths: BTreeSet = BTreeSet::new(); - paths.extend(from_files.keys().cloned()); - paths.extend(to_files.keys().cloned()); - - let mut results = Vec::new(); - for path in paths { - let old_bytes = from_files.get(&path); - let new_bytes = to_files.get(&path); - let old_content = old_bytes - .and_then(|b| std::str::from_utf8(b).ok()) - .map(|s| s.to_string()); - let new_content = new_bytes - .and_then(|b| std::str::from_utf8(b).ok()) - .map(|s| s.to_string()); - if old_content.is_none() && new_content.is_none() { - if old_bytes.is_some() || new_bytes.is_some() { - results.push(self.build_diff_result(&path, None, None)); - } - continue; - } - results.push(self.build_diff_result( - &path, - old_content.as_deref(), - new_content.as_deref(), - )); - } - Ok(results) - } - - async fn commit_diff_from_storage( - &self, - workspace_id: Uuid, - from_meta: Option<&CommitMeta>, - to_meta: Option<&CommitMeta>, - ) -> anyhow::Result> { - let Some(to_meta) = to_meta else { - return Ok(Vec::new()); - }; - - let mut paths: BTreeSet = BTreeSet::new(); - if let Some(meta) = from_meta { - paths.extend(meta.file_hash_index.keys().cloned()); - } - paths.extend(to_meta.file_hash_index.keys().cloned()); - - let mut results = Vec::new(); - for path in paths { - let old_hash = from_meta.and_then(|meta| meta.file_hash_index.get(&path)); - let new_hash = to_meta.file_hash_index.get(&path); - if let (Some(old), Some(new)) = (old_hash, new_hash) { - if old == new { - continue; - } - } - - let old_bytes = match (from_meta, old_hash) { - (Some(meta), Some(_)) => { - self.load_file_snapshot(workspace_id, meta.commit_id.as_slice(), &path) - .await? - } - _ => None, - }; - let new_bytes = match new_hash { - Some(_) => { - self.load_file_snapshot(workspace_id, to_meta.commit_id.as_slice(), &path) - .await? - } - None => None, - }; - - let old_text = old_bytes - .as_ref() - .and_then(|bytes| std::str::from_utf8(bytes).ok()) - .map(|s| s.to_string()); - let new_text = new_bytes - .as_ref() - .and_then(|bytes| std::str::from_utf8(bytes).ok()) - .map(|s| s.to_string()); - - if old_text.is_none() && new_text.is_none() { - if old_bytes.is_some() || new_bytes.is_some() { - results.push(self.build_diff_result(&path, None, None)); - } - } else { - results.push(self.build_diff_result( - &path, - old_text.as_deref(), - new_text.as_deref(), - )); - } - } - - Ok(results) - } -} diff --git a/api/crates/infrastructure/src/git/workspace/service/state/dirty.rs b/api/crates/infrastructure/src/git/workspace/service/state/dirty.rs deleted file mode 100644 index 07ccf746..00000000 --- a/api/crates/infrastructure/src/git/workspace/service/state/dirty.rs +++ /dev/null @@ -1,36 +0,0 @@ -impl GitWorkspaceService { - async fn fetch_dirty(&self, workspace_id: Uuid) -> anyhow::Result> { - let rows = sqlx::query( - r#"SELECT path, is_text, op, content_hash - FROM git_dirty_files - WHERE workspace_id = $1 - ORDER BY created_at ASC"#, - ) - .bind(workspace_id) - .fetch_all(&self.pool) - .await?; - - let mut out = Vec::new(); - for r in rows { - let path: String = r.get("path"); - let is_text: bool = r.get("is_text"); - let op: String = r.get("op"); - let content_hash: Option = r.try_get("content_hash").ok(); - out.push(DirtyRow { - path, - is_text, - op, - content_hash, - }); - } - Ok(out) - } - - async fn clear_dirty(&self, workspace_id: Uuid) -> anyhow::Result { - let res = sqlx::query("DELETE FROM git_dirty_files WHERE workspace_id = $1") - .bind(workspace_id) - .execute(&self.pool) - .await?; - Ok(res.rows_affected()) - } -} diff --git a/api/crates/infrastructure/src/git/workspace/service/state/export.rs b/api/crates/infrastructure/src/git/workspace/service/state/export.rs deleted file mode 100644 index ac358227..00000000 --- a/api/crates/infrastructure/src/git/workspace/service/state/export.rs +++ /dev/null @@ -1,52 +0,0 @@ -impl GitWorkspaceService { - async fn export_markdown_for_repo_path( - &self, - workspace_id: Uuid, - repo_path: &str, - ) -> anyhow::Result, String)>> { - let trimmed = repo_path.trim_start_matches('/'); - let mut candidates: Vec<(&str, bool)> = vec![(trimmed, false)]; - if let Some(stripped) = trimmed.strip_prefix("Archives/") { - if !stripped.is_empty() { - candidates.push((stripped, true)); - } - } - - // First try by normalized repo path (documents.path). Fall back to desired_path for older records. - let all_docs = self.docs.list_workspace_documents(workspace_id).await?; - - for (candidate, archived_only) in candidates { - let lookup_path = format!("{}/{}", workspace_id, candidate); - let from_path = self - .doc_paths - .get_by_owner_and_path(workspace_id, &lookup_path) - .await?; - - let doc = if let Some(doc) = from_path { - Some(doc) - } else { - all_docs - .iter() - .find(|d| { - normalize_repo_path(d.desired_path().as_str().to_string()) == candidate - }) - .cloned() - }; - - if let Some(doc) = doc { - if doc.doc_type() == DocumentType::Folder { - continue; - } - if archived_only && doc.archived_at().is_none() { - continue; - } - let doc_id = doc.id(); - if let Some(export) = self.snapshot.export_current_markdown(&doc_id).await? { - return Ok(Some((export.bytes, export.content_hash))); - } - } - } - - Ok(None) - } -} diff --git a/api/crates/infrastructure/src/git/workspace/service/state/snapshots.rs b/api/crates/infrastructure/src/git/workspace/service/state/snapshots.rs deleted file mode 100644 index eea8949d..00000000 --- a/api/crates/infrastructure/src/git/workspace/service/state/snapshots.rs +++ /dev/null @@ -1,84 +0,0 @@ -impl GitWorkspaceService { - async fn store_commit_snapshots( - &self, - workspace_id: Uuid, - commit_id: &[u8], - state: &HashMap, - ) -> anyhow::Result> { - let mut stored = Vec::new(); - for (path, snapshot) in state.iter() { - let key = blob_key(workspace_id, commit_id, path); - let bytes = self.snapshot_bytes(snapshot).await?; - if let Err(err) = self.git_storage.put_blob(&key, &bytes).await { - for key in stored.iter().rev() { - let _ = self.git_storage.delete_blob(key).await; - } - return Err(err.into()); - } - stored.push(key); - } - Ok(stored) - } - - async fn snapshot_bytes(&self, snapshot: &FileSnapshot) -> anyhow::Result> { - match &snapshot.data { - FileSnapshotData::Inline(bytes) => Ok(bytes.clone()), - FileSnapshotData::StoragePath(path) => { - let abs = self.storage.absolute_from_relative(path); - self.storage.read_bytes(abs.as_path()).await.map_err(Into::into) - } - } - } - - async fn load_file_snapshot( - &self, - workspace_id: Uuid, - commit_id: &[u8], - path: &str, - ) -> anyhow::Result>> { - let key = blob_key(workspace_id, commit_id, path); - match self.git_storage.fetch_blob(&key).await { - Ok(bytes) => Ok(Some(bytes)), - Err(err) => { - // Treat missing blob as absence (e.g., binary or not stored). - if let Some(io_err) = err.downcast_ref::() { - if io_err.kind() == std::io::ErrorKind::NotFound { - return Ok(None); - } - } - if err.to_string().contains("not found") { - return Ok(None); - } - Err(err.into()) - } - } - } - - #[allow(dead_code)] - async fn state_from_commit_meta( - &self, - workspace_id: Uuid, - meta: &CommitMeta, - ) -> anyhow::Result> { - let mut state: HashMap = HashMap::new(); - for path in meta.file_hash_index.keys() { - let Some(bytes) = self - .load_file_snapshot(workspace_id, &meta.commit_id, path) - .await? - else { - continue; - }; - let hash = sha256_hex(&bytes); - let is_text = std::str::from_utf8(&bytes).is_ok(); - state.insert( - path.clone(), - FileSnapshot { - hash, - data: FileSnapshotData::Inline(bytes), - is_text, - }, - ); - } - Ok(state) - } -} diff --git a/api/crates/infrastructure/src/git/workspace/service/synthetic.rs b/api/crates/infrastructure/src/git/workspace/service/synthetic.rs deleted file mode 100644 index 9a58c9b6..00000000 --- a/api/crates/infrastructure/src/git/workspace/service/synthetic.rs +++ /dev/null @@ -1,58 +0,0 @@ -impl GitWorkspaceService { - // Build a synthetic commit from the current workspace state so dirty edits participate in merges. - fn build_synthetic_commit( - &self, - workspace_id: Uuid, - repo: &Repository, - base_oid: git2::Oid, - ) -> anyhow::Result { - // Collect current workspace state into blobs and index entries (supports nested paths). - let current_state = tokio::task::block_in_place(|| { - let handle = tokio::runtime::Handle::current(); - handle.block_on(self.collect_current_state(workspace_id)) - })?; - - let mut index = repo.index()?; - index.clear()?; - - for (path, snapshot) in current_state.iter() { - let bytes = tokio::task::block_in_place(|| { - let handle = tokio::runtime::Handle::current(); - handle.block_on(self.snapshot_bytes(snapshot)) - })?; - let blob_oid = repo.blob(&bytes)?; - - let entry = git2::IndexEntry { - ctime: git2::IndexTime::new(0, 0), - mtime: git2::IndexTime::new(0, 0), - dev: 0, - ino: 0, - mode: 0o100644, - uid: 0, - gid: 0, - file_size: bytes.len() as u32, - id: blob_oid, - flags: std::cmp::min(path.len(), 0x0fff) as u16, - flags_extended: 0, - path: path.as_bytes().to_vec(), - }; - index.add(&entry)?; - } - - let tree_oid = index.write_tree_to(repo)?; - let tree = repo.find_tree(tree_oid)?; - - // Create a synthetic commit with remote as parent to anchor the merge base. - // Use an explicit signature so we don't rely on local git config being present. - let sig = signature_from_parts("RefMD", "refmd@example.com", Utc::now())?; - let commit_oid = repo.commit( - Some("refs/heads/synthetic-workspace"), - &sig, - &sig, - "workspace-state", - &tree, - &[&repo.find_commit(base_oid)?], - )?; - Ok(commit_oid) - } -} diff --git a/api/crates/infrastructure/src/git/workspace/sync.rs b/api/crates/infrastructure/src/git/workspace/sync.rs deleted file mode 100644 index c0ddd069..00000000 --- a/api/crates/infrastructure/src/git/workspace/sync.rs +++ /dev/null @@ -1,6 +0,0 @@ -include!("sync/main.rs"); -include!("sync/commit_pack.rs"); -include!("sync/pack_chain.rs"); -include!("sync/changes.rs"); -include!("sync/precompute.rs"); -include!("sync/persist.rs"); diff --git a/api/crates/infrastructure/src/git/workspace/sync/changes.rs b/api/crates/infrastructure/src/git/workspace/sync/changes.rs deleted file mode 100644 index 1ebe0e2c..00000000 --- a/api/crates/infrastructure/src/git/workspace/sync/changes.rs +++ /dev/null @@ -1,43 +0,0 @@ -impl GitWorkspaceService { - fn sync_build_change_sets( - use_full_scan: bool, - dirty_rows: &[DirtyRow], - previous_index: &HashMap, - ) -> (BTreeMap, BTreeSet) { - if use_full_scan { - return (BTreeMap::new(), BTreeSet::new()); - } - - let mut upserts: BTreeMap = BTreeMap::new(); - let mut deletes: BTreeSet = BTreeSet::new(); - - for row in dirty_rows { - match row.op.as_str() { - "upsert" => { - upserts.insert( - row.path.clone(), - DirtyUpsert { - is_text: row.is_text, - content_hash: row.content_hash.clone(), - }, - ); - deletes.remove(&row.path); - } - "delete" => { - upserts.remove(&row.path); - deletes.insert(row.path.clone()); - } - _ => {} - } - } - - upserts.retain(|path, u| { - !matches!( - (&u.content_hash, previous_index.get(path)), - (Some(hnew), Some(hprev)) if hnew == hprev - ) - }); - - (upserts, deletes) - } -} diff --git a/api/crates/infrastructure/src/git/workspace/sync/commit_pack.rs b/api/crates/infrastructure/src/git/workspace/sync/commit_pack.rs deleted file mode 100644 index 888928aa..00000000 --- a/api/crates/infrastructure/src/git/workspace/sync/commit_pack.rs +++ /dev/null @@ -1,136 +0,0 @@ -enum SyncBuildCommitPackOutcome { - Committed { - meta: Box, - pack_bytes: Vec, - commit_hex: String, - pushed: bool, - }, - NoChanges { - commit_hex: String, - pushed: bool, - }, -} - -impl GitWorkspaceService { - #[allow(clippy::too_many_arguments)] - fn sync_build_commit_pack( - workspace_id: Uuid, - repo: &Repository, - latest_meta: Option<&CommitMeta>, - branch_name: &str, - author_name: &str, - author_email: &str, - committed_at: DateTime, - message: &str, - use_full_scan: bool, - full_entries: Option<&BTreeMap>>, - deletes: &BTreeSet, - precomputed_upsert_bytes: &BTreeMap>, - next_file_hash_index: HashMap, - cfg: Option<&UserGitCfg>, - skip_push: bool, - force_push: bool, - ) -> anyhow::Result { - // Skip pre-fetch/verify to avoid remote redirect/auth loops; rely on push outcome. - // Build sources from either full scan or dirty set (no awaits here). - let tree_oid = if use_full_scan { - let entries = full_entries.ok_or_else(|| anyhow!("full-scan entries missing"))?; - build_tree_from_entries(repo, entries)? - } else { - // Incremental: reuse previous blobs for unchanged paths. - let mut sources: BTreeMap = BTreeMap::new(); - if let Some(prev_meta) = latest_meta { - let prev_oids = read_commit_blob_oids(repo, prev_meta.commit_id.as_slice())?; - for (path, oid) in prev_oids { - sources.insert(path, FileSource::Oid(oid)); - } - } - for d in deletes.iter() { - sources.remove(d); - } - for (path, bytes) in precomputed_upsert_bytes.iter() { - sources.insert(path.clone(), FileSource::Bytes(bytes.clone())); - } - build_tree_from_sources(repo, &sources)? - }; - - let mut parent_commits = Vec::new(); - if let Some(prev_meta) = latest_meta { - let parent_oid = git2::Oid::from_bytes(&prev_meta.commit_id)?; - parent_commits.push(repo.find_commit(parent_oid)?); - } - let parent_refs: Vec<&Commit> = parent_commits.iter().collect(); - - if let Some(parent) = parent_commits.first() - && parent.tree_id() == tree_oid - { - // libgit2 allows creating a new commit even if the tree is identical to the parent. - // Avoid generating empty/no-op commits (notably during automated rebuild full scans). - let mut pushed = false; - if let Some(cfg) = cfg { - if !cfg.repository_url.is_empty() && !skip_push { - pushed = perform_push(repo, cfg, branch_name, parent.id(), force_push)?; - } - } - return Ok(SyncBuildCommitPackOutcome::NoChanges { - commit_hex: encode_commit_id(parent.id().as_bytes()), - pushed, - }); - } - - let tree = repo.find_tree(tree_oid)?; - let branch_ref = format!("refs/heads/{}", branch_name); - let author_sig = signature_from_parts(author_name, author_email, committed_at)?; - let commit_oid = repo.commit( - Some(&branch_ref), - &author_sig, - &author_sig, - message, - &tree, - &parent_refs, - )?; - let commit_hex = encode_commit_id(commit_oid.as_bytes()); - - let mut pack_builder = repo.packbuilder()?; - pack_builder.insert_commit(commit_oid)?; - // Include parent commit objects to avoid missing bases when applying packs later. - for parent in parent_commits.iter() { - pack_builder.insert_commit(parent.id())?; - } - let mut pack_buf = git2::Buf::new(); - pack_builder.write_buf(&mut pack_buf)?; - let pack_bytes = pack_buf.to_vec(); - - let message_opt = if message.trim().is_empty() { - None - } else { - Some(message.to_string()) - }; - - let meta = CommitMeta { - commit_id: commit_oid.as_bytes().to_vec(), - parent_commit_id: latest_meta.map(|c| c.commit_id.clone()), - message: message_opt, - author_name: Some(author_name.to_string()), - author_email: Some(author_email.to_string()), - committed_at, - pack_key: format!("git/packs/{}/{}.pack", workspace_id, commit_hex.clone()), - file_hash_index: next_file_hash_index, - }; - - let mut pushed = false; - if let Some(cfg) = cfg { - if !cfg.repository_url.is_empty() && !skip_push { - // Propagate push errors so the caller can retry with force. - pushed = perform_push(repo, cfg, branch_name, commit_oid, force_push)?; - } - } - - Ok(SyncBuildCommitPackOutcome::Committed { - meta: Box::new(meta), - pack_bytes, - commit_hex, - pushed, - }) - } -} diff --git a/api/crates/infrastructure/src/git/workspace/sync/main.rs b/api/crates/infrastructure/src/git/workspace/sync/main.rs deleted file mode 100644 index f4765762..00000000 --- a/api/crates/infrastructure/src/git/workspace/sync/main.rs +++ /dev/null @@ -1,429 +0,0 @@ -struct SyncPrecompute { - precomputed_full_entries: Option>>, - precomputed_upsert_bytes: BTreeMap>, - changed_text_snapshots: HashMap, - next_file_hash_index: HashMap, - files_changed_for_response: u32, -} - -impl GitWorkspaceService { - async fn sync_inner( - &self, - workspace_id: Uuid, - req: &GitSyncRequestDto, - cfg: Option<&UserGitCfg>, - ) -> anyhow::Result { - let state = self.load_repository_state(workspace_id).await?; - let Some((state_initialized, state_default_branch)) = state else { - anyhow::bail!("repository not initialized") - }; - if !state_initialized { - anyhow::bail!("repository not initialized") - } - - let branch_hint = cfg - .map(|c| c.branch_name.clone()) - .unwrap_or(state_default_branch.clone()); - - let mut latest_meta = self.ensure_latest_meta(workspace_id).await?; - if latest_meta.is_none() { - if let Some(cfg) = cfg { - if !cfg.repository_url.is_empty() { - // Bootstrap remote history; propagate errors to avoid proceeding without packs. - self.bootstrap_remote_history(workspace_id, cfg, branch_hint.as_str()) - .await?; - latest_meta = self.ensure_latest_meta(workspace_id).await?; - } - } - } - - // Resolve branch without holding a DB lock for long. - let branch_name = cfg - .map(|c| c.branch_name.clone()) - .unwrap_or(state_default_branch.clone()); - let force_push = req.force.unwrap_or(false); - let force_full_scan = req.full_scan.unwrap_or(false); - let skip_push = req.skip_push.unwrap_or(false); - let push_required = cfg - .as_ref() - .map(|c| !c.repository_url.is_empty()) - .unwrap_or(false) - && !skip_push; - - // Ensure latest commit pack exists; if missing, attempt to rebuild from storage/remote or fail early. - if let Some(latest) = latest_meta.as_ref() { - if self - .git_storage - .fetch_pack_for_commit(workspace_id, latest.commit_id.as_slice()) - .await? - .is_none() - { - // Try to restore metadata and pack from storage (if pointer mismatch), else try remote bootstrap. - warn!( - workspace_id = %workspace_id, - commit = %encode_commit_id(&latest.commit_id), - "git_sync_missing_latest_pack_detected" - ); - // Attempt backfill from storage; ensure_latest_meta will also update latest pointer. - self.ensure_storage_commit_integrity(workspace_id).await?; - latest_meta = self.ensure_latest_meta(workspace_id).await?; - if let Some(latest2) = latest_meta.as_ref() { - if self - .git_storage - .fetch_pack_for_commit(workspace_id, latest2.commit_id.as_slice()) - .await? - .is_none() - { - if let Some(cfg) = cfg { - if !cfg.repository_url.is_empty() { - info!( - workspace_id = %workspace_id, - commit = %encode_commit_id(&latest2.commit_id), - "git_sync_missing_latest_pack_bootstrap_remote" - ); - self.bootstrap_remote_history( - workspace_id, - cfg, - branch_hint.as_str(), - ) - .await?; - latest_meta = self.ensure_latest_meta(workspace_id).await?; - } - } - } - } - if let Some(latest3) = latest_meta.as_ref() { - if self - .git_storage - .fetch_pack_for_commit(workspace_id, latest3.commit_id.as_slice()) - .await? - .is_none() - { - anyhow::bail!( - "missing pack data for latest commit {}; pull and retry", - encode_commit_id(&latest3.commit_id) - ); - } - } - } - } - - let mut storage_latest = self.git_storage.latest_commit(workspace_id).await?; - let mut storage_commit_hex = storage_latest - .as_ref() - .map(|m| encode_commit_id(&m.commit_id)); - let mut db_commit_hex = latest_meta.as_ref().map(|m| encode_commit_id(&m.commit_id)); - if storage_commit_hex != db_commit_hex { - warn!( - workspace_id = %workspace_id, - db_commit = ?db_commit_hex, - storage_commit = ?storage_commit_hex, - "git_commit_pointer_mismatch_detected" - ); - if let Some(storage_meta) = storage_latest.as_ref() { - self.backfill_commits_from_storage(workspace_id, storage_meta) - .await?; - latest_meta = self.latest_commit_meta(workspace_id).await?; - } - storage_latest = self.git_storage.latest_commit(workspace_id).await?; - storage_commit_hex = storage_latest - .as_ref() - .map(|m| encode_commit_id(&m.commit_id)); - db_commit_hex = latest_meta.as_ref().map(|m| encode_commit_id(&m.commit_id)); - if storage_commit_hex == db_commit_hex { - info!( - workspace_id = %workspace_id, - commit = ?storage_commit_hex, - "git_commit_pointer_repaired_from_storage" - ); - } else { - warn!( - workspace_id = %workspace_id, - db_commit = ?db_commit_hex, - storage_commit = ?storage_commit_hex, - "git_commit_pointer_attempting_realign" - ); - self.realign_commit_history( - workspace_id, - storage_latest.clone(), - latest_meta.clone(), - ) - .await?; - latest_meta = self.ensure_latest_meta(workspace_id).await?; - storage_latest = self.git_storage.latest_commit(workspace_id).await?; - storage_commit_hex = storage_latest - .as_ref() - .map(|m| encode_commit_id(&m.commit_id)); - db_commit_hex = latest_meta.as_ref().map(|m| encode_commit_id(&m.commit_id)); - if storage_commit_hex == db_commit_hex { - info!( - workspace_id = %workspace_id, - commit = ?db_commit_hex, - "git_commit_pointer_repaired_by_prune" - ); - } else { - error!( - workspace_id = %workspace_id, - db_commit = ?db_commit_hex, - storage_commit = ?storage_commit_hex, - "git_commit_pointer_irreparable" - ); - anyhow::bail!( - "repository latest commit mismatch between database ({db_commit_hex:?}) and storage ({storage_commit_hex:?})" - ); - } - } - } - - self.ensure_storage_commit_integrity(workspace_id).await?; - latest_meta = self.latest_commit_meta(workspace_id).await?; - - let use_full_scan = force_full_scan || latest_meta.is_none(); - - let previous_index = latest_meta - .as_ref() - .map(|c| c.file_hash_index.clone()) - .unwrap_or_default(); - let dirty_rows = self.fetch_dirty(workspace_id).await?; - - let (upserts, deletes) = - Self::sync_build_change_sets(use_full_scan, &dirty_rows, &previous_index); - - // If still nothing to do, optionally push existing head when a remote is configured. - if !use_full_scan && upserts.is_empty() && deletes.is_empty() { - if push_required { - if let Some(latest) = latest_meta.as_ref() { - // Ensure pack chain exists to materialize the commit for push. - let pack_chain = self - .persist_pack_chain(workspace_id, Some(latest.commit_id.as_slice())) - .await?; - if let Some((temp_dir, pack_paths)) = pack_chain { - let repo = Repository::init_bare(temp_dir.path())?; - apply_pack_files(&repo, &pack_paths)?; - let oid = git2::Oid::from_bytes(&latest.commit_id)?; - let pushed = - perform_push(&repo, cfg.unwrap(), &branch_name, oid, force_push)?; - drop(repo); - drop(temp_dir); - let _ = self.clear_dirty(workspace_id).await; - return Ok(GitSyncOutcome { - files_changed: 0, - commit_hash: Some(encode_commit_id(&latest.commit_id)), - pushed, - message: if pushed { - "push completed".to_string() - } else { - "nothing to push".to_string() - }, - }); - } - } - } - // Nothing to commit/push: clear any leftover dirty and exit. - let _ = self.clear_dirty(workspace_id).await; - return Ok(GitSyncOutcome { - files_changed: 0, - commit_hash: latest_meta.map(|c| encode_commit_id(&c.commit_id)), - pushed: false, - message: "nothing to commit".to_string(), - }); - } - - let committed_at = Utc::now(); - let author_name = "RefMD".to_string(); - let author_email = "refmd@example.com".to_string(); - let message = req - .message - .clone() - .unwrap_or_else(|| "RefMD sync".to_string()); - - let precompute = self - .sync_precompute_tree_inputs( - workspace_id, - use_full_scan, - previous_index.clone(), - &upserts, - &deletes, - ) - .await?; - let mut precomputed_full_entries = precompute.precomputed_full_entries; - let precomputed_upsert_bytes = precompute.precomputed_upsert_bytes; - let changed_text_snapshots = precompute.changed_text_snapshots; - let mut next_file_hash_index = precompute.next_file_hash_index; - let mut files_changed_for_response = precompute.files_changed_for_response; - - // Ensure full-scan entries are available before we touch libgit2 types. - if use_full_scan && precomputed_full_entries.is_none() { - next_file_hash_index.clear(); - let current = self.collect_current_state(workspace_id).await?; - let mut entries: BTreeMap> = BTreeMap::new(); - for (path, snapshot) in current.iter() { - let bytes = self.snapshot_bytes(snapshot).await?; - entries.insert(path.clone(), bytes); - next_file_hash_index.insert(path.clone(), snapshot.hash.clone()); - } - files_changed_for_response = next_file_hash_index.len() as u32; - precomputed_full_entries = Some(entries); - } - - let mut previous_pack = self - .sync_load_previous_pack_chain(workspace_id, cfg, &mut latest_meta) - .await?; - - let commit_build = { - let temp_dir = TempDirBuilder::new() - .prefix("git-sync-") - .tempdir() - .map_err(|e| anyhow::anyhow!(e))?; - let repo = Repository::init_bare(temp_dir.path())?; - - if let Some((_, ref pack_paths)) = previous_pack { - // Apply full chain to ensure delta bases are present. - if let Err(err) = apply_pack_files(&repo, pack_paths) { - let lower = err.to_string().to_lowercase(); - let missing_obj = lower.contains("missing") && lower.contains("object"); - if !missing_obj { - return Err(err); - } - - // Try to repair packs by re-bootstrap from remote, then retry apply once more. - warn!( - workspace_id = %workspace_id, - error = %err, - "git_sync_pack_missing_objects_retry_bootstrap" - ); - if let Some(cfg) = cfg { - if !cfg.repository_url.is_empty() { - previous_pack = self - .sync_rebuild_pack_chain_from_remote( - workspace_id, - cfg, - &branch_name, - latest_meta.as_ref(), - ) - .await?; - if let Some((_, ref pack_paths_retry)) = previous_pack { - if apply_pack_files(&repo, pack_paths_retry).is_err() { - // Last resort: recover objects and retry once more. - warn!( - workspace_id = %workspace_id, - "git_sync_pack_retry_still_missing_recovering_objects" - ); - previous_pack = self - .sync_recover_objects_and_reload_pack_chain( - workspace_id, - cfg, - &mut latest_meta, - ) - .await?; - if let Some((_, ref pack_paths_retry2)) = previous_pack { - apply_pack_files(&repo, pack_paths_retry2)?; - } else { - anyhow::bail!( - "missing pack objects after recovery; pull/import required before sync" - ); - } - } - } else { - anyhow::bail!( - "missing pack objects after bootstrap; pull/import required before sync" - ); - } - } - } - - anyhow::bail!( - "missing pack objects for {}; pull/import to repair history", - latest_meta - .as_ref() - .map(|m| encode_commit_id(&m.commit_id)) - .unwrap_or_else(|| "unknown".to_string()) - ); - } - } - - let full_entries = if use_full_scan { - Some( - precomputed_full_entries - .as_ref() - .ok_or_else(|| anyhow!("full-scan entries missing"))?, - ) - } else { - None - }; - let commit_build = Self::sync_build_commit_pack( - workspace_id, - &repo, - latest_meta.as_ref(), - branch_name.as_str(), - author_name.as_str(), - author_email.as_str(), - committed_at, - message.as_str(), - use_full_scan, - full_entries, - &deletes, - &precomputed_upsert_bytes, - next_file_hash_index, - cfg, - skip_push, - force_push, - )?; - - drop(repo); - let _ = temp_dir.close(); - - commit_build - }; - - if let Some((dir, _)) = previous_pack { - drop(dir); - } - - match commit_build { - SyncBuildCommitPackOutcome::NoChanges { commit_hex, pushed } => { - let _ = self.clear_dirty(workspace_id).await; - Ok(GitSyncOutcome { - files_changed: 0, - commit_hash: Some(commit_hex), - pushed, - message: if push_required && pushed { - "push completed".to_string() - } else { - "nothing to commit".to_string() - }, - }) - } - SyncBuildCommitPackOutcome::Committed { - meta, - pack_bytes, - commit_hex, - pushed, - } => { - self.sync_persist_commit( - workspace_id, - use_full_scan, - meta.as_ref(), - &pack_bytes, - &changed_text_snapshots, - latest_meta.as_ref(), - ) - .await?; - let outcome_message = if skip_push { - "sync completed (push skipped)".to_string() - } else if push_required && !pushed { - "sync completed (push not performed)".to_string() - } else { - "sync completed".to_string() - }; - - Ok(GitSyncOutcome { - files_changed: files_changed_for_response, - commit_hash: Some(commit_hex), - pushed, - message: outcome_message, - }) - } - } - } -} diff --git a/api/crates/infrastructure/src/git/workspace/sync/pack_chain.rs b/api/crates/infrastructure/src/git/workspace/sync/pack_chain.rs deleted file mode 100644 index 86a4f431..00000000 --- a/api/crates/infrastructure/src/git/workspace/sync/pack_chain.rs +++ /dev/null @@ -1,80 +0,0 @@ -impl GitWorkspaceService { - async fn sync_load_previous_pack_chain( - &self, - workspace_id: Uuid, - cfg: Option<&UserGitCfg>, - latest_meta: &mut Option, - ) -> anyhow::Result)>> { - let Some(prev_meta) = latest_meta.as_ref() else { - return Ok(None); - }; - let prev_commit_hex = encode_commit_id(&prev_meta.commit_id); - match self - .persist_pack_chain(workspace_id, Some(prev_meta.commit_id.as_slice())) - .await? - { - Some(chain) => Ok(Some(chain)), - None => { - // Attempt to repair from remote and retry once. - if let Some(cfg) = cfg { - if !cfg.repository_url.is_empty() { - warn!( - workspace_id = %workspace_id, - commit = %prev_commit_hex, - "git_sync_missing_pack_chain_recover" - ); - self.recover_missing_objects(workspace_id, cfg).await?; - *latest_meta = self.ensure_latest_meta(workspace_id).await?; - if let Some(latest) = latest_meta.as_ref() { - let chain = self - .persist_pack_chain( - workspace_id, - Some(latest.commit_id.as_slice()), - ) - .await?; - if chain.is_some() { - return Ok(chain); - } - } - } - } - warn!(workspace_id = %workspace_id, "git_sync_missing_pack_chain_abort"); - anyhow::bail!( - "missing pack data for current head {}; pull/import required before sync", - prev_commit_hex - ); - } - } - } - - async fn sync_rebuild_pack_chain_from_remote( - &self, - workspace_id: Uuid, - cfg: &UserGitCfg, - branch_name: &str, - latest_meta: Option<&CommitMeta>, - ) -> anyhow::Result)>> { - self.bootstrap_remote_history(workspace_id, cfg, branch_name) - .await?; - self.persist_pack_chain( - workspace_id, - latest_meta.map(|m| m.commit_id.as_slice()), - ) - .await - } - - async fn sync_recover_objects_and_reload_pack_chain( - &self, - workspace_id: Uuid, - cfg: &UserGitCfg, - latest_meta: &mut Option, - ) -> anyhow::Result)>> { - self.recover_missing_objects(workspace_id, cfg).await?; - *latest_meta = self.ensure_latest_meta(workspace_id).await?; - self.persist_pack_chain( - workspace_id, - latest_meta.as_ref().map(|m| m.commit_id.as_slice()), - ) - .await - } -} diff --git a/api/crates/infrastructure/src/git/workspace/sync/persist.rs b/api/crates/infrastructure/src/git/workspace/sync/persist.rs deleted file mode 100644 index 8774cc3a..00000000 --- a/api/crates/infrastructure/src/git/workspace/sync/persist.rs +++ /dev/null @@ -1,131 +0,0 @@ -impl GitWorkspaceService { - async fn sync_persist_commit( - &self, - workspace_id: Uuid, - use_full_scan: bool, - meta: &CommitMeta, - pack_bytes: &[u8], - changed_text_snapshots: &HashMap, - latest_meta_for_rollback: Option<&CommitMeta>, - ) -> anyhow::Result<()> { - let mut tx = self.pool.begin().await?; - let repo_row = - sqlx::query("SELECT initialized FROM git_repository_state WHERE workspace_id = $1") - .bind(workspace_id) - .fetch_optional(&mut *tx) - .await?; - let Some(repo_row) = repo_row else { - tx.rollback().await.ok(); - anyhow::bail!("repository not initialized") - }; - let initialized: bool = repo_row.get("initialized"); - if !initialized { - tx.rollback().await.ok(); - anyhow::bail!("repository not initialized") - } - - sqlx::query( - r#"INSERT INTO git_commits ( - commit_id, - parent_commit_id, - workspace_id, - message, - author_name, - author_email, - committed_at, - pack_key, - file_hash_index - ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)"#, - ) - .bind(meta.commit_id.clone()) - .bind(meta.parent_commit_id.clone()) - .bind(workspace_id) - .bind(meta.message.clone()) - .bind(meta.author_name.clone()) - .bind(meta.author_email.clone()) - .bind(meta.committed_at) - .bind(meta.pack_key.clone()) - .bind(Json(&meta.file_hash_index)) - .execute(&mut *tx) - .await?; - - sqlx::query("UPDATE git_repository_state SET updated_at = now() WHERE workspace_id = $1") - .bind(workspace_id) - .execute(&mut *tx) - .await?; - - let snapshot_keys = if use_full_scan { - let current = self.collect_current_state(workspace_id).await?; - match self - .store_commit_snapshots(workspace_id, &meta.commit_id, ¤t) - .await - { - Ok(keys) => keys, - Err(err) => { - tx.rollback().await.ok(); - return Err(err); - } - } - } else { - match self - .store_commit_snapshots(workspace_id, &meta.commit_id, changed_text_snapshots) - .await - { - Ok(keys) => keys, - Err(err) => { - tx.rollback().await.ok(); - return Err(err); - } - } - }; - - if let Err(err) = self - .git_storage - .store_pack(workspace_id, pack_bytes, meta) - .await - { - for key in snapshot_keys.iter().rev() { - let _ = self.git_storage.delete_blob(key).await; - } - tx.rollback().await.ok(); - return Err(err.into()); - } - - if let Err(err) = self - .git_storage - .set_latest_commit(workspace_id, Some(meta)) - .await - { - let _ = self - .git_storage - .delete_pack(workspace_id, &meta.commit_id) - .await; - for key in snapshot_keys.iter().rev() { - let _ = self.git_storage.delete_blob(key).await; - } - tx.rollback().await.ok(); - return Err(err.into()); - } - - if let Err(err) = tx.commit().await { - let _ = self - .git_storage - .delete_pack(workspace_id, &meta.commit_id) - .await; - for key in snapshot_keys.iter().rev() { - let _ = self.git_storage.delete_blob(key).await; - } - let _ = self - .git_storage - .set_latest_commit(workspace_id, latest_meta_for_rollback) - .await; - return Err(err.into()); - } - - self.clear_dirty(workspace_id).await.map_err(|err| { - error!(workspace_id = %workspace_id, error = %err, "git_import_clear_dirty_failed"); - err - })?; - Ok(()) - } -} diff --git a/api/crates/infrastructure/src/git/workspace/sync/precompute.rs b/api/crates/infrastructure/src/git/workspace/sync/precompute.rs deleted file mode 100644 index 96522526..00000000 --- a/api/crates/infrastructure/src/git/workspace/sync/precompute.rs +++ /dev/null @@ -1,106 +0,0 @@ -impl GitWorkspaceService { - async fn sync_precompute_tree_inputs( - &self, - workspace_id: Uuid, - use_full_scan: bool, - previous_index: HashMap, - upserts: &BTreeMap, - deletes: &BTreeSet, - ) -> anyhow::Result { - // Precompute data needed for tree build and meta before creating libgit2 objects. - // This avoids holding non-Send libgit2 types across await points. - let mut precomputed_full_entries: Option>> = None; - let mut precomputed_upsert_bytes: BTreeMap> = BTreeMap::new(); - let mut changed_text_snapshots: HashMap = HashMap::new(); - let mut next_file_hash_index: HashMap = previous_index; - let files_changed_for_response: u32; - - if use_full_scan { - next_file_hash_index.clear(); - let current = self.collect_current_state(workspace_id).await?; - let mut entries: BTreeMap> = BTreeMap::new(); - for (path, snapshot) in current.iter() { - let bytes = self.snapshot_bytes(snapshot).await?; - entries.insert(path.clone(), bytes); - next_file_hash_index.insert(path.clone(), snapshot.hash.clone()); - } - files_changed_for_response = next_file_hash_index.len() as u32; - precomputed_full_entries = Some(entries); - } else { - let mut stale_paths: Vec = Vec::new(); - for (path, up) in upserts.iter() { - if up.is_text { - match self - .export_markdown_for_repo_path(workspace_id, path) - .await? - { - Some((bytes, hash)) => { - precomputed_upsert_bytes.insert(path.clone(), bytes.clone()); - next_file_hash_index.insert(path.clone(), hash.clone()); - changed_text_snapshots.insert( - path.clone(), - FileSnapshot { - hash, - data: FileSnapshotData::Inline(bytes), - is_text: true, - }, - ); - } - None => { - stale_paths.push(path.clone()); - } - } - continue; - } - - let storage_rel = format!("{}/{}", workspace_id, path); - let abs = self.storage.absolute_from_relative(&storage_rel); - match self.storage.read_bytes(abs.as_path()).await { - Ok(bytes) => { - precomputed_upsert_bytes.insert(path.clone(), bytes.clone()); - let hash = match up.content_hash.as_ref() { - Some(h) => h.clone(), - None => sha256_hex(&bytes), - }; - next_file_hash_index.insert(path.clone(), hash); - } - Err(e) => { - let skip = e - .downcast_ref::() - .map(|ioe| ioe.kind() == ErrorKind::NotFound) - .unwrap_or_else(|| e.to_string().to_lowercase().contains("not found")); - if skip { - stale_paths.push(path.clone()); - continue; - } else { - return Err(e.into()); - } - } - } - } - if !stale_paths.is_empty() { - for p in stale_paths { - let _ = sqlx::query( - "DELETE FROM git_dirty_files WHERE workspace_id = $1 AND path = $2", - ) - .bind(workspace_id) - .bind(&p) - .execute(&self.pool) - .await; - } - } - for d in deletes.iter() { - next_file_hash_index.remove(d); - } - files_changed_for_response = (upserts.len() + deletes.len()) as u32; - } - - Ok(SyncPrecompute { - precomputed_full_entries, - precomputed_upsert_bytes, - changed_text_snapshots, - next_file_hash_index, - files_changed_for_response, - }) - } -} diff --git a/api/crates/infrastructure/src/git/workspace/workspace_service.rs b/api/crates/infrastructure/src/git/workspace/workspace_service.rs deleted file mode 100644 index d1053d18..00000000 --- a/api/crates/infrastructure/src/git/workspace/workspace_service.rs +++ /dev/null @@ -1,3 +0,0 @@ -include!("service/history.rs"); -include!("service/state.rs"); -include!("service/synthetic.rs"); diff --git a/api/crates/infrastructure/src/identity/db/repositories/migration_repository_sqlx/mod.rs b/api/crates/infrastructure/src/identity/db/repositories/migration_repository_sqlx/mod.rs new file mode 100644 index 00000000..d130f5e2 --- /dev/null +++ b/api/crates/infrastructure/src/identity/db/repositories/migration_repository_sqlx/mod.rs @@ -0,0 +1,314 @@ +//! SQLx implementation of the migration repository. + +use async_trait::async_trait; +use sqlx::{Postgres, Row, Transaction}; +use uuid::Uuid; + +use crate::core::db::PgPool; +use application::core::ports::errors::PortResult; +use application::identity::ports::migration_repository::{ + MigrationDocument, MigrationFile, MigrationRepository, MigrationSnapshot, +}; + +/// SQLx implementation of the migration repository. +/// +/// This provides read-only access to plaintext data for migration. +pub struct SqlxMigrationRepository { + pool: PgPool, +} + +impl SqlxMigrationRepository { + pub fn new(pool: PgPool) -> Self { + Self { pool } + } + + /// Get a reference to the pool for transaction creation. + pub fn pool(&self) -> &PgPool { + &self.pool + } +} + +#[async_trait] +impl MigrationRepository for SqlxMigrationRepository { + async fn list_user_documents(&self, user_id: Uuid) -> PortResult> { + let out: anyhow::Result> = async { + // Get all documents from workspaces where the user is a member + let rows = sqlx::query( + r#" + SELECT d.id, d.workspace_id, d.title, d.created_at + FROM documents d + INNER JOIN workspace_members wm ON d.workspace_id = wm.workspace_id + WHERE wm.user_id = $1 + AND d.encrypted_title IS NULL + ORDER BY d.created_at + "#, + ) + .bind(user_id) + .fetch_all(&self.pool) + .await?; + + let documents = rows + .into_iter() + .map(|row| MigrationDocument { + id: row.get("id"), + workspace_id: row.get("workspace_id"), + title: row.get("title"), + created_at: row.get("created_at"), + }) + .collect(); + + Ok(documents) + } + .await; + out.map_err(Into::into) + } + + async fn list_user_files(&self, user_id: Uuid) -> PortResult> { + let out: anyhow::Result> = async { + // Get all plaintext files (not yet encrypted) from workspaces where the user is a member + let rows = sqlx::query( + r#" + SELECT f.id, f.document_id, d.workspace_id, f.filename, f.content_type, f.storage_path + FROM files f + INNER JOIN documents d ON f.document_id = d.id + INNER JOIN workspace_members wm ON d.workspace_id = wm.workspace_id + WHERE wm.user_id = $1 + AND f.encrypted_metadata IS NULL + ORDER BY f.created_at + "#, + ) + .bind(user_id) + .fetch_all(&self.pool) + .await?; + + let files = rows + .into_iter() + .map(|row| MigrationFile { + id: row.get("id"), + document_id: row.get("document_id"), + workspace_id: row.get("workspace_id"), + filename: row.get("filename"), + content_type: row.get("content_type"), + storage_path: row.get("storage_path"), + }) + .collect(); + + Ok(files) + } + .await; + out.map_err(Into::into) + } +} + +// ============================================================================ +// Transactional operations +// ============================================================================ + +impl SqlxMigrationRepository { + /// Update a document with encrypted title (within transaction). + pub async fn update_encrypted_title_tx( + &self, + tx: &mut Transaction<'_, Postgres>, + document_id: Uuid, + encrypted_title: &[u8], + nonce: &[u8], + ) -> anyhow::Result<()> { + sqlx::query( + r#" + UPDATE documents + SET encrypted_title = $2, + encrypted_title_nonce = $3, + updated_at = now() + WHERE id = $1 + "#, + ) + .bind(document_id) + .bind(encrypted_title) + .bind(nonce) + .execute(tx.as_mut()) + .await?; + + Ok(()) + } + + /// Create or update an encrypted snapshot (within transaction). + pub async fn upsert_encrypted_snapshot_tx( + &self, + tx: &mut Transaction<'_, Postgres>, + document_id: Uuid, + encrypted_snapshot: &[u8], + nonce: &[u8], + seq_at_snapshot: i64, + ) -> anyhow::Result<()> { + // Get the next version number + let next_version: i64 = sqlx::query_scalar( + r#" + SELECT COALESCE(MAX(version), 0) + 1 + FROM document_snapshots + WHERE document_id = $1 + "#, + ) + .bind(document_id) + .fetch_one(tx.as_mut()) + .await?; + + // Insert new encrypted snapshot + sqlx::query( + r#" + INSERT INTO document_snapshots (document_id, version, snapshot, nonce, seq_at_snapshot, created_at) + VALUES ($1, $2, $3, $4, $5, now()) + "#, + ) + .bind(document_id) + .bind(next_version) + .bind(encrypted_snapshot) + .bind(nonce) + .bind(seq_at_snapshot) + .execute(tx.as_mut()) + .await?; + + Ok(()) + } + + /// Delete all plaintext updates for a document (within transaction). + pub async fn clear_plaintext_updates_tx( + &self, + tx: &mut Transaction<'_, Postgres>, + document_id: Uuid, + ) -> anyhow::Result { + // Delete all plaintext updates (those without nonce) + let result = sqlx::query( + r#" + DELETE FROM document_updates + WHERE document_id = $1 + AND nonce IS NULL + "#, + ) + .bind(document_id) + .execute(tx.as_mut()) + .await?; + + Ok(result.rows_affected()) + } + + /// Update a file's metadata with encrypted values (within transaction). + pub async fn update_encrypted_file_metadata_tx( + &self, + tx: &mut Transaction<'_, Postgres>, + file_id: Uuid, + encrypted_metadata: &[u8], + nonce: &[u8], + encrypted_hash: &str, + ) -> anyhow::Result<()> { + sqlx::query( + r#" + UPDATE files + SET encrypted_metadata = $2, + encrypted_metadata_nonce = $3, + encrypted_hash = $4, + updated_at = now() + WHERE id = $1 + "#, + ) + .bind(file_id) + .bind(encrypted_metadata) + .bind(nonce) + .bind(encrypted_hash) + .execute(tx.as_mut()) + .await?; + + Ok(()) + } + + /// Clear plaintext title from a document (within transaction). + pub async fn clear_plaintext_title_tx( + &self, + tx: &mut Transaction<'_, Postgres>, + document_id: Uuid, + ) -> anyhow::Result<()> { + sqlx::query( + r#" + UPDATE documents + SET title = '[encrypted]', + updated_at = now() + WHERE id = $1 + AND encrypted_title IS NOT NULL + "#, + ) + .bind(document_id) + .execute(tx.as_mut()) + .await?; + + Ok(()) + } + + /// Clear plaintext metadata from a file (within transaction). + pub async fn clear_plaintext_file_metadata_tx( + &self, + tx: &mut Transaction<'_, Postgres>, + file_id: Uuid, + ) -> anyhow::Result<()> { + sqlx::query( + r#" + UPDATE files + SET filename = '[encrypted]', + content_type = NULL, + updated_at = now() + WHERE id = $1 + AND encrypted_metadata IS NOT NULL + "#, + ) + .bind(file_id) + .execute(tx.as_mut()) + .await?; + + Ok(()) + } + + /// Get the latest Yjs snapshot for a document (within transaction). + pub async fn get_document_snapshot_tx( + &self, + tx: &mut Transaction<'_, Postgres>, + document_id: Uuid, + ) -> anyhow::Result> { + let row = sqlx::query( + r#" + SELECT document_id, version, snapshot, seq_at_snapshot + FROM document_snapshots + WHERE document_id = $1 + ORDER BY version DESC + LIMIT 1 + "#, + ) + .bind(document_id) + .fetch_optional(tx.as_mut()) + .await?; + + Ok(row.map(|row| MigrationSnapshot { + document_id: row.get("document_id"), + version: row.get("version"), + data: row.get("snapshot"), + seq_at_snapshot: row.get("seq_at_snapshot"), + })) + } + + /// Get the maximum sequence number for a document's updates (within transaction). + pub async fn get_document_max_seq_tx( + &self, + tx: &mut Transaction<'_, Postgres>, + document_id: Uuid, + ) -> anyhow::Result> { + let row = sqlx::query( + r#" + SELECT MAX(seq) as max_seq + FROM document_updates + WHERE document_id = $1 + "#, + ) + .bind(document_id) + .fetch_one(tx.as_mut()) + .await?; + + Ok(row.get("max_seq")) + } +} diff --git a/api/crates/infrastructure/src/identity/db/repositories/migration_tx_runner_sqlx.rs b/api/crates/infrastructure/src/identity/db/repositories/migration_tx_runner_sqlx.rs new file mode 100644 index 00000000..014b5e3b --- /dev/null +++ b/api/crates/infrastructure/src/identity/db/repositories/migration_tx_runner_sqlx.rs @@ -0,0 +1,274 @@ +//! SQLx implementation of the migration transaction runner. + +use std::sync::Arc; + +use async_trait::async_trait; +use sqlx::{Postgres, Transaction}; +use uuid::Uuid; + +use application::core::ports::errors::PortResult; +use application::identity::ports::migration_repository::MigrationSnapshot; +use application::identity::ports::migration_tx_runner::{ + BoxedTxResult, DocumentKeysRepositoryTx, MigrationRepositoryTx, MigrationTx, + MigrationTxFn, MigrationTxRunner, UserKeysRepositoryTx, WorkspaceKeysRepositoryTx, +}; + +use crate::core::db::PgPool; + +use super::migration_repository_sqlx::SqlxMigrationRepository; + +/// SQLx implementation of the migration transaction runner. +pub struct SqlxMigrationTxRunner { + pool: PgPool, + migration_repo: Arc, +} + +impl SqlxMigrationTxRunner { + pub fn new(pool: PgPool, migration_repo: Arc) -> Self { + Self { + pool, + migration_repo, + } + } +} + +#[async_trait] +impl MigrationTxRunner for SqlxMigrationTxRunner { + async fn run_boxed(&self, f: MigrationTxFn) -> anyhow::Result { + let mut tx = self.pool.begin().await?; + + let mut uow = SqlxMigrationTx { + migration_repo: self.migration_repo.as_ref(), + tx: &mut tx, + }; + + let result = f(&mut uow).await; + match result { + Ok(out) => { + tx.commit().await?; + Ok(out) + } + Err(err) => { + tx.rollback().await.ok(); + Err(err) + } + } + } +} + +/// SQLx transaction context for migration. +struct SqlxMigrationTx<'repo, 'tx, 'c> { + migration_repo: &'repo SqlxMigrationRepository, + tx: &'tx mut Transaction<'c, Postgres>, +} + +impl<'repo, 'tx, 'c> MigrationTx for SqlxMigrationTx<'repo, 'tx, 'c> { + fn migration(&mut self) -> &mut dyn MigrationRepositoryTx { + self + } + + fn document_keys(&mut self) -> &mut dyn DocumentKeysRepositoryTx { + self + } + + fn workspace_keys(&mut self) -> &mut dyn WorkspaceKeysRepositoryTx { + self + } + + fn user_keys(&mut self) -> &mut dyn UserKeysRepositoryTx { + self + } +} + +// ============================================================================ +// MigrationRepositoryTx implementation +// ============================================================================ + +#[async_trait] +impl<'repo, 'tx, 'c> MigrationRepositoryTx for SqlxMigrationTx<'repo, 'tx, 'c> { + async fn update_encrypted_title( + &mut self, + document_id: Uuid, + encrypted_title: &[u8], + nonce: &[u8], + ) -> PortResult<()> { + self.migration_repo + .update_encrypted_title_tx(self.tx, document_id, encrypted_title, nonce) + .await + .map_err(Into::into) + } + + async fn upsert_encrypted_snapshot( + &mut self, + document_id: Uuid, + encrypted_snapshot: &[u8], + nonce: &[u8], + seq_at_snapshot: i64, + ) -> PortResult<()> { + self.migration_repo + .upsert_encrypted_snapshot_tx(self.tx, document_id, encrypted_snapshot, nonce, seq_at_snapshot) + .await + .map_err(Into::into) + } + + async fn clear_plaintext_updates(&mut self, document_id: Uuid) -> PortResult { + self.migration_repo + .clear_plaintext_updates_tx(self.tx, document_id) + .await + .map_err(Into::into) + } + + async fn update_encrypted_file_metadata( + &mut self, + file_id: Uuid, + encrypted_metadata: &[u8], + nonce: &[u8], + encrypted_hash: &str, + ) -> PortResult<()> { + self.migration_repo + .update_encrypted_file_metadata_tx(self.tx, file_id, encrypted_metadata, nonce, encrypted_hash) + .await + .map_err(Into::into) + } + + async fn clear_plaintext_title(&mut self, document_id: Uuid) -> PortResult<()> { + self.migration_repo + .clear_plaintext_title_tx(self.tx, document_id) + .await + .map_err(Into::into) + } + + async fn clear_plaintext_file_metadata(&mut self, file_id: Uuid) -> PortResult<()> { + self.migration_repo + .clear_plaintext_file_metadata_tx(self.tx, file_id) + .await + .map_err(Into::into) + } + + async fn get_document_snapshot( + &mut self, + document_id: Uuid, + ) -> PortResult> { + self.migration_repo + .get_document_snapshot_tx(self.tx, document_id) + .await + .map_err(Into::into) + } + + async fn get_document_max_seq(&mut self, document_id: Uuid) -> PortResult> { + self.migration_repo + .get_document_max_seq_tx(self.tx, document_id) + .await + .map_err(Into::into) + } +} + +// ============================================================================ +// DocumentKeysRepositoryTx implementation +// ============================================================================ + +#[async_trait] +impl<'repo, 'tx, 'c> DocumentKeysRepositoryTx for SqlxMigrationTx<'repo, 'tx, 'c> { + async fn upsert_encrypted_dek( + &mut self, + document_id: Uuid, + encrypted_dek: &[u8], + nonce: &[u8], + key_version: i32, + ) -> PortResult<()> { + let out: anyhow::Result<()> = async { + sqlx::query( + r#"INSERT INTO document_encrypted_keys (document_id, encrypted_dek, nonce, key_version, created_at, updated_at) + VALUES ($1, $2, $3, $4, now(), now()) + ON CONFLICT (document_id) + DO UPDATE SET + encrypted_dek = EXCLUDED.encrypted_dek, + nonce = EXCLUDED.nonce, + key_version = EXCLUDED.key_version, + updated_at = now()"#, + ) + .bind(document_id) + .bind(encrypted_dek) + .bind(nonce) + .bind(key_version) + .execute(self.tx.as_mut()) + .await?; + Ok(()) + } + .await; + out.map_err(Into::into) + } +} + +// ============================================================================ +// WorkspaceKeysRepositoryTx implementation +// ============================================================================ + +#[async_trait] +impl<'repo, 'tx, 'c> WorkspaceKeysRepositoryTx for SqlxMigrationTx<'repo, 'tx, 'c> { + async fn upsert_encrypted_kek( + &mut self, + workspace_id: Uuid, + user_id: Uuid, + encrypted_kek: &[u8], + key_version: i32, + ) -> PortResult<()> { + let out: anyhow::Result<()> = async { + sqlx::query( + r#"INSERT INTO workspace_encrypted_keys (workspace_id, user_id, encrypted_kek, key_version, created_at) + VALUES ($1, $2, $3, $4, now()) + ON CONFLICT (workspace_id, user_id, key_version) + DO UPDATE SET + encrypted_kek = EXCLUDED.encrypted_kek"#, + ) + .bind(workspace_id) + .bind(user_id) + .bind(encrypted_kek) + .bind(key_version) + .execute(self.tx.as_mut()) + .await?; + Ok(()) + } + .await; + out.map_err(Into::into) + } +} + +// ============================================================================ +// UserKeysRepositoryTx implementation +// ============================================================================ + +#[async_trait] +impl<'repo, 'tx, 'c> UserKeysRepositoryTx for SqlxMigrationTx<'repo, 'tx, 'c> { + async fn mark_encryption_setup_completed(&mut self, user_id: Uuid) -> PortResult<()> { + let out: anyhow::Result<()> = async { + sqlx::query(r#"UPDATE users SET e2ee_setup_completed_at = now() WHERE id = $1"#) + .bind(user_id) + .execute(self.tx.as_mut()) + .await?; + Ok(()) + } + .await; + out.map_err(Into::into) + } + + async fn is_encryption_setup_completed(&mut self, user_id: Uuid) -> PortResult { + let out: anyhow::Result = async { + let row = sqlx::query(r#"SELECT e2ee_setup_completed_at FROM users WHERE id = $1"#) + .bind(user_id) + .fetch_optional(self.tx.as_mut()) + .await?; + + Ok(row + .and_then(|r| { + use sqlx::Row; + r.try_get::>, _>("e2ee_setup_completed_at") + .ok() + }) + .flatten() + .is_some()) + } + .await; + out.map_err(Into::into) + } +} diff --git a/api/crates/infrastructure/src/identity/db/repositories/mod.rs b/api/crates/infrastructure/src/identity/db/repositories/mod.rs index 2e6528ce..a4e9e3ae 100644 --- a/api/crates/infrastructure/src/identity/db/repositories/mod.rs +++ b/api/crates/infrastructure/src/identity/db/repositories/mod.rs @@ -1,4 +1,7 @@ pub mod api_token_repository_sqlx; +pub mod migration_repository_sqlx; +pub mod migration_tx_runner_sqlx; +pub mod user_keys_repository_sqlx; pub mod user_repository_sqlx; pub mod user_session_repository_sqlx; pub mod user_shortcut_repository_sqlx; diff --git a/api/crates/infrastructure/src/identity/db/repositories/user_keys_repository_sqlx/mod.rs b/api/crates/infrastructure/src/identity/db/repositories/user_keys_repository_sqlx/mod.rs new file mode 100644 index 00000000..2c3f9b12 --- /dev/null +++ b/api/crates/infrastructure/src/identity/db/repositories/user_keys_repository_sqlx/mod.rs @@ -0,0 +1,256 @@ +use async_trait::async_trait; +use sqlx::Row; +use uuid::Uuid; + +use crate::core::db::PgPool; +use application::core::ports::errors::PortResult; +use application::identity::ports::user_keys_repository::{ + UserEncryptedMasterKeyRow, UserEncryptedPrivateKeyRow, UserKeysRepository, UserPublicKeyRow, +}; +use domain::identity::keys::{KdfParams, KdfType, KeyType}; + +pub struct SqlxUserKeysRepository { + pool: PgPool, +} + +impl SqlxUserKeysRepository { + pub fn new(pool: PgPool) -> Self { + Self { pool } + } +} + +#[async_trait] +impl UserKeysRepository for SqlxUserKeysRepository { + async fn get_public_key(&self, user_id: Uuid) -> PortResult> { + let out: anyhow::Result> = async { + let row = sqlx::query( + r#"SELECT user_id, public_key, key_type, created_at, updated_at + FROM user_public_keys + WHERE user_id = $1"#, + ) + .bind(user_id) + .fetch_optional(&self.pool) + .await?; + + Ok(row.map(|row| { + let key_type_str: String = row.get("key_type"); + UserPublicKeyRow { + user_id: row.get("user_id"), + public_key: row.get("public_key"), + key_type: KeyType::parse(&key_type_str).unwrap_or(KeyType::EcdhP256), + created_at: row.get("created_at"), + updated_at: row.get("updated_at"), + } + })) + } + .await; + out.map_err(Into::into) + } + + async fn upsert_public_key( + &self, + user_id: Uuid, + public_key: &[u8], + key_type: KeyType, + ) -> PortResult { + let out: anyhow::Result = async { + let row = sqlx::query( + r#"INSERT INTO user_public_keys (user_id, public_key, key_type, created_at, updated_at) + VALUES ($1, $2, $3, now(), now()) + ON CONFLICT (user_id) + DO UPDATE SET + public_key = EXCLUDED.public_key, + key_type = EXCLUDED.key_type, + updated_at = now() + RETURNING user_id, public_key, key_type, created_at, updated_at"#, + ) + .bind(user_id) + .bind(public_key) + .bind(key_type.as_str()) + .fetch_one(&self.pool) + .await?; + + let key_type_str: String = row.get("key_type"); + Ok(UserPublicKeyRow { + user_id: row.get("user_id"), + public_key: row.get("public_key"), + key_type: KeyType::parse(&key_type_str).unwrap_or(KeyType::EcdhP256), + created_at: row.get("created_at"), + updated_at: row.get("updated_at"), + }) + } + .await; + out.map_err(Into::into) + } + + async fn get_encrypted_master_key( + &self, + user_id: Uuid, + ) -> PortResult> { + let out: anyhow::Result> = async { + let row = sqlx::query( + r#"SELECT user_id, encrypted_key, salt, kdf_type, kdf_params, created_at, updated_at + FROM user_encrypted_master_keys + WHERE user_id = $1"#, + ) + .bind(user_id) + .fetch_optional(&self.pool) + .await?; + + Ok(row.map(|row| { + let kdf_type_str: String = row.get("kdf_type"); + let kdf_params_json: serde_json::Value = row.get("kdf_params"); + UserEncryptedMasterKeyRow { + user_id: row.get("user_id"), + encrypted_key: row.get("encrypted_key"), + salt: row.get("salt"), + kdf_type: KdfType::parse(&kdf_type_str).unwrap_or(KdfType::Argon2id), + kdf_params: serde_json::from_value(kdf_params_json).unwrap_or_default(), + created_at: row.get("created_at"), + updated_at: row.get("updated_at"), + } + })) + } + .await; + out.map_err(Into::into) + } + + async fn upsert_encrypted_master_key( + &self, + user_id: Uuid, + encrypted_key: &[u8], + salt: &[u8], + kdf_type: KdfType, + kdf_params: &KdfParams, + ) -> PortResult { + let out: anyhow::Result = async { + let kdf_params_json = serde_json::to_value(kdf_params)?; + let row = sqlx::query( + r#"INSERT INTO user_encrypted_master_keys (user_id, encrypted_key, salt, kdf_type, kdf_params, created_at, updated_at) + VALUES ($1, $2, $3, $4, $5, now(), now()) + ON CONFLICT (user_id) + DO UPDATE SET + encrypted_key = EXCLUDED.encrypted_key, + salt = EXCLUDED.salt, + kdf_type = EXCLUDED.kdf_type, + kdf_params = EXCLUDED.kdf_params, + updated_at = now() + RETURNING user_id, encrypted_key, salt, kdf_type, kdf_params, created_at, updated_at"#, + ) + .bind(user_id) + .bind(encrypted_key) + .bind(salt) + .bind(kdf_type.as_str()) + .bind(&kdf_params_json) + .fetch_one(&self.pool) + .await?; + + let kdf_type_str: String = row.get("kdf_type"); + let kdf_params_json: serde_json::Value = row.get("kdf_params"); + Ok(UserEncryptedMasterKeyRow { + user_id: row.get("user_id"), + encrypted_key: row.get("encrypted_key"), + salt: row.get("salt"), + kdf_type: KdfType::parse(&kdf_type_str).unwrap_or(KdfType::Argon2id), + kdf_params: serde_json::from_value(kdf_params_json).unwrap_or_default(), + created_at: row.get("created_at"), + updated_at: row.get("updated_at"), + }) + } + .await; + out.map_err(Into::into) + } + + async fn get_encrypted_private_key( + &self, + user_id: Uuid, + ) -> PortResult> { + let out: anyhow::Result> = async { + let row = sqlx::query( + r#"SELECT user_id, encrypted_private_key, nonce, created_at, updated_at + FROM user_encrypted_private_keys + WHERE user_id = $1"#, + ) + .bind(user_id) + .fetch_optional(&self.pool) + .await?; + + Ok(row.map(|row| UserEncryptedPrivateKeyRow { + user_id: row.get("user_id"), + encrypted_private_key: row.get("encrypted_private_key"), + nonce: row.get("nonce"), + created_at: row.get("created_at"), + updated_at: row.get("updated_at"), + })) + } + .await; + out.map_err(Into::into) + } + + async fn upsert_encrypted_private_key( + &self, + user_id: Uuid, + encrypted_private_key: &[u8], + nonce: &[u8], + ) -> PortResult { + let out: anyhow::Result = async { + let row = sqlx::query( + r#"INSERT INTO user_encrypted_private_keys (user_id, encrypted_private_key, nonce, created_at, updated_at) + VALUES ($1, $2, $3, now(), now()) + ON CONFLICT (user_id) + DO UPDATE SET + encrypted_private_key = EXCLUDED.encrypted_private_key, + nonce = EXCLUDED.nonce, + updated_at = now() + RETURNING user_id, encrypted_private_key, nonce, created_at, updated_at"#, + ) + .bind(user_id) + .bind(encrypted_private_key) + .bind(nonce) + .fetch_one(&self.pool) + .await?; + + Ok(UserEncryptedPrivateKeyRow { + user_id: row.get("user_id"), + encrypted_private_key: row.get("encrypted_private_key"), + nonce: row.get("nonce"), + created_at: row.get("created_at"), + updated_at: row.get("updated_at"), + }) + } + .await; + out.map_err(Into::into) + } + + async fn mark_encryption_setup_completed(&self, user_id: Uuid) -> PortResult<()> { + let out: anyhow::Result<()> = async { + sqlx::query( + r#"UPDATE users SET e2ee_setup_completed_at = now() WHERE id = $1"#, + ) + .bind(user_id) + .execute(&self.pool) + .await?; + Ok(()) + } + .await; + out.map_err(Into::into) + } + + async fn is_encryption_setup_completed(&self, user_id: Uuid) -> PortResult { + let out: anyhow::Result = async { + let row = sqlx::query( + r#"SELECT e2ee_setup_completed_at FROM users WHERE id = $1"#, + ) + .bind(user_id) + .fetch_optional(&self.pool) + .await?; + + Ok(row + .and_then(|r| r.try_get::>, _>("e2ee_setup_completed_at").ok()) + .flatten() + .is_some()) + } + .await; + out.map_err(Into::into) + } +} diff --git a/api/crates/infrastructure/src/workspaces/db/repositories/mod.rs b/api/crates/infrastructure/src/workspaces/db/repositories/mod.rs index 501757ec..43437fe6 100644 --- a/api/crates/infrastructure/src/workspaces/db/repositories/mod.rs +++ b/api/crates/infrastructure/src/workspaces/db/repositories/mod.rs @@ -1 +1,2 @@ +pub mod workspace_keys_repository_sqlx; pub mod workspace_repository_sqlx; diff --git a/api/crates/infrastructure/src/workspaces/db/repositories/workspace_keys_repository_sqlx/mod.rs b/api/crates/infrastructure/src/workspaces/db/repositories/workspace_keys_repository_sqlx/mod.rs new file mode 100644 index 00000000..5ad7f6cd --- /dev/null +++ b/api/crates/infrastructure/src/workspaces/db/repositories/workspace_keys_repository_sqlx/mod.rs @@ -0,0 +1,175 @@ +use async_trait::async_trait; +use sqlx::Row; +use uuid::Uuid; + +use crate::core::db::PgPool; +use application::core::ports::errors::PortResult; +use application::workspaces::ports::workspace_keys_repository::{ + WorkspaceEncryptedKeyRow, WorkspaceKeysRepository, +}; + +pub struct SqlxWorkspaceKeysRepository { + pool: PgPool, +} + +impl SqlxWorkspaceKeysRepository { + pub fn new(pool: PgPool) -> Self { + Self { pool } + } +} + +#[async_trait] +impl WorkspaceKeysRepository for SqlxWorkspaceKeysRepository { + async fn get_encrypted_kek( + &self, + workspace_id: Uuid, + user_id: Uuid, + ) -> PortResult> { + let out: anyhow::Result> = async { + let row = sqlx::query( + r#"SELECT id, workspace_id, user_id, encrypted_kek, key_version, created_at + FROM workspace_encrypted_keys + WHERE workspace_id = $1 AND user_id = $2 + ORDER BY key_version DESC + LIMIT 1"#, + ) + .bind(workspace_id) + .bind(user_id) + .fetch_optional(&self.pool) + .await?; + + Ok(row.map(|row| WorkspaceEncryptedKeyRow { + id: row.get("id"), + workspace_id: row.get("workspace_id"), + user_id: row.get("user_id"), + encrypted_kek: row.get("encrypted_kek"), + key_version: row.get("key_version"), + created_at: row.get("created_at"), + })) + } + .await; + out.map_err(Into::into) + } + + async fn list_encrypted_keks( + &self, + workspace_id: Uuid, + ) -> PortResult> { + let out: anyhow::Result> = async { + let rows = sqlx::query( + r#"SELECT DISTINCT ON (user_id) id, workspace_id, user_id, encrypted_kek, key_version, created_at + FROM workspace_encrypted_keys + WHERE workspace_id = $1 + ORDER BY user_id, key_version DESC"#, + ) + .bind(workspace_id) + .fetch_all(&self.pool) + .await?; + + Ok(rows + .into_iter() + .map(|row| WorkspaceEncryptedKeyRow { + id: row.get("id"), + workspace_id: row.get("workspace_id"), + user_id: row.get("user_id"), + encrypted_kek: row.get("encrypted_kek"), + key_version: row.get("key_version"), + created_at: row.get("created_at"), + }) + .collect()) + } + .await; + out.map_err(Into::into) + } + + async fn upsert_encrypted_kek( + &self, + workspace_id: Uuid, + user_id: Uuid, + encrypted_kek: &[u8], + key_version: i32, + ) -> PortResult { + let out: anyhow::Result = async { + let row = sqlx::query( + r#"INSERT INTO workspace_encrypted_keys (workspace_id, user_id, encrypted_kek, key_version, created_at) + VALUES ($1, $2, $3, $4, now()) + ON CONFLICT (workspace_id, user_id, key_version) + DO UPDATE SET + encrypted_kek = EXCLUDED.encrypted_kek + RETURNING id, workspace_id, user_id, encrypted_kek, key_version, created_at"#, + ) + .bind(workspace_id) + .bind(user_id) + .bind(encrypted_kek) + .bind(key_version) + .fetch_one(&self.pool) + .await?; + + Ok(WorkspaceEncryptedKeyRow { + id: row.get("id"), + workspace_id: row.get("workspace_id"), + user_id: row.get("user_id"), + encrypted_kek: row.get("encrypted_kek"), + key_version: row.get("key_version"), + created_at: row.get("created_at"), + }) + } + .await; + out.map_err(Into::into) + } + + async fn delete_encrypted_kek(&self, workspace_id: Uuid, user_id: Uuid) -> PortResult { + let out: anyhow::Result = async { + let result = sqlx::query( + r#"DELETE FROM workspace_encrypted_keys + WHERE workspace_id = $1 AND user_id = $2"#, + ) + .bind(workspace_id) + .bind(user_id) + .execute(&self.pool) + .await?; + + Ok(result.rows_affected() > 0) + } + .await; + out.map_err(Into::into) + } + + async fn delete_encrypted_kek_version( + &self, + workspace_id: Uuid, + key_version: i32, + ) -> PortResult { + let out: anyhow::Result = async { + let result = sqlx::query( + r#"DELETE FROM workspace_encrypted_keys + WHERE workspace_id = $1 AND key_version = $2"#, + ) + .bind(workspace_id) + .bind(key_version) + .execute(&self.pool) + .await?; + + Ok(result.rows_affected()) + } + .await; + out.map_err(Into::into) + } + + async fn get_current_key_version(&self, workspace_id: Uuid) -> PortResult> { + let out: anyhow::Result> = async { + let row = sqlx::query( + r#"SELECT MAX(key_version) as max_version + FROM workspace_encrypted_keys + WHERE workspace_id = $1"#, + ) + .bind(workspace_id) + .fetch_optional(&self.pool) + .await?; + + Ok(row.and_then(|r| r.try_get::, _>("max_version").ok().flatten())) + } + .await; + out.map_err(Into::into) + } +} diff --git a/api/crates/infrastructure/src/workspaces/db/repositories/workspace_repository_sqlx/helpers.rs b/api/crates/infrastructure/src/workspaces/db/repositories/workspace_repository_sqlx/helpers.rs index 642cfbe0..dba95f58 100644 --- a/api/crates/infrastructure/src/workspaces/db/repositories/workspace_repository_sqlx/helpers.rs +++ b/api/crates/infrastructure/src/workspaces/db/repositories/workspace_repository_sqlx/helpers.rs @@ -132,6 +132,9 @@ impl SqlxWorkspaceRepository { .ok() .flatten(), created_at: row.get("created_at"), + // E2EE fields + encrypted_kek_for_invite: row.try_get("encrypted_kek_for_invite").ok().flatten(), + kek_version: row.try_get("kek_version").ok().flatten(), }) } diff --git a/api/crates/infrastructure/src/workspaces/db/repositories/workspace_repository_sqlx/repository.rs b/api/crates/infrastructure/src/workspaces/db/repositories/workspace_repository_sqlx/repository.rs index 7c6a7e94..a06eced0 100644 --- a/api/crates/infrastructure/src/workspaces/db/repositories/workspace_repository_sqlx/repository.rs +++ b/api/crates/infrastructure/src/workspaces/db/repositories/workspace_repository_sqlx/repository.rs @@ -294,6 +294,18 @@ impl WorkspaceRepository for SqlxWorkspaceRepository { .map_err(Into::into) } + async fn update_invitation_kek( + &self, + workspace_id: Uuid, + invitation_id: Uuid, + encrypted_kek_for_invite: &str, + kek_version: i32, + ) -> PortResult> { + self.update_invitation_kek_impl(workspace_id, invitation_id, encrypted_kek_for_invite, kek_version) + .await + .map_err(Into::into) + } + async fn list_all_workspace_ids(&self) -> PortResult> { self.list_all_workspace_ids_impl().await.map_err(Into::into) } diff --git a/api/crates/infrastructure/src/workspaces/db/repositories/workspace_repository_sqlx/repository/invitations.rs b/api/crates/infrastructure/src/workspaces/db/repositories/workspace_repository_sqlx/repository/invitations.rs index 2d9903f8..af616e6c 100644 --- a/api/crates/infrastructure/src/workspaces/db/repositories/workspace_repository_sqlx/repository/invitations.rs +++ b/api/crates/infrastructure/src/workspaces/db/repositories/workspace_repository_sqlx/repository/invitations.rs @@ -26,7 +26,7 @@ impl SqlxWorkspaceRepository { VALUES ($1, $2, $3, $4, $5, $6, $7, $8) RETURNING id, workspace_id, email, role_kind, system_role, custom_role_id, invited_by, token, expires_at, accepted_by, accepted_at, revoked_at, - created_at"#, + created_at, encrypted_kek_for_invite, kek_version"#, ) .bind(workspace_id) .bind(email) @@ -47,7 +47,7 @@ impl SqlxWorkspaceRepository { let rows = sqlx::query( r#"SELECT id, workspace_id, email, role_kind, system_role, custom_role_id, invited_by, token, expires_at, accepted_by, accepted_at, revoked_at, - created_at + created_at, encrypted_kek_for_invite, kek_version FROM workspace_invitations WHERE workspace_id = $1 ORDER BY created_at DESC"#, @@ -69,7 +69,7 @@ impl SqlxWorkspaceRepository { let row = sqlx::query( r#"SELECT id, workspace_id, email, role_kind, system_role, custom_role_id, invited_by, token, expires_at, accepted_by, accepted_at, revoked_at, - created_at + created_at, encrypted_kek_for_invite, kek_version FROM workspace_invitations WHERE token = $1 FOR UPDATE"#, @@ -151,7 +151,7 @@ impl SqlxWorkspaceRepository { WHERE id = $1 AND workspace_id = $2 AND revoked_at IS NULL AND accepted_at IS NULL RETURNING id, workspace_id, email, role_kind, system_role, custom_role_id, invited_by, token, expires_at, accepted_by, accepted_at, revoked_at, - created_at"#, + created_at, encrypted_kek_for_invite, kek_version"#, ) .bind(invitation_id) .bind(workspace_id) @@ -159,4 +159,29 @@ impl SqlxWorkspaceRepository { .await?; row.map(|row| self.map_invitation_row(&row)).transpose() } + + /// Update invitation with encrypted KEK + pub(super) async fn update_invitation_kek_impl( + &self, + workspace_id: Uuid, + invitation_id: Uuid, + encrypted_kek_for_invite: &str, + kek_version: i32, + ) -> anyhow::Result> { + let row = sqlx::query( + r#"UPDATE workspace_invitations + SET encrypted_kek_for_invite = $3, kek_version = $4 + WHERE id = $1 AND workspace_id = $2 AND revoked_at IS NULL AND accepted_at IS NULL + RETURNING id, workspace_id, email, role_kind, system_role, custom_role_id, + invited_by, token, expires_at, accepted_by, accepted_at, revoked_at, + created_at, encrypted_kek_for_invite, kek_version"#, + ) + .bind(invitation_id) + .bind(workspace_id) + .bind(encrypted_kek_for_invite) + .bind(kek_version) + .fetch_optional(&self.pool) + .await?; + row.map(|row| self.map_invitation_row(&row)).transpose() + } } diff --git a/api/crates/presentation/Cargo.toml b/api/crates/presentation/Cargo.toml index 91d59542..11c47e45 100644 --- a/api/crates/presentation/Cargo.toml +++ b/api/crates/presentation/Cargo.toml @@ -9,6 +9,8 @@ domain = { path = "../domain" } contracts = { path = "../contracts", features = ["openapi"] } anyhow = "1" +base64 = "0.22" +bytes = "1" axum = { version = "0.7", features = ["macros", "json", "multipart", "ws"] } http = "1" serde = { version = "1.0", features = ["derive"] } @@ -19,6 +21,8 @@ uuid = { version = "1", features = ["v4", "serde"] } chrono = { version = "0.4", features = ["serde", "clock"] } futures-util = { version = "0.3", features = ["sink"] } rand = "0.8" +reqwest = { version = "0.12", default-features = false, features = ["rustls-tls"] } +ssh2 = "0.9" utoipa = { version = "4", features = ["axum_extras", "chrono", "uuid"] } yrs = { version = "0.24", features = ["sync"] } yrs-warp = "0.9" diff --git a/api/crates/presentation/src/context.rs b/api/crates/presentation/src/context.rs index e48799bd..ccebdc90 100644 --- a/api/crates/presentation/src/context.rs +++ b/api/crates/presentation/src/context.rs @@ -15,6 +15,7 @@ pub use application::documents::ports::realtime::realtime_types::{ }; use application::documents::services::DocumentServiceFacade; use application::documents::services::files::FileServiceFacade; +use application::documents::services::keys::DocumentKeysServiceFacade; use application::documents::services::publishing::PublicServiceFacade; use application::documents::services::sharing::ShareServiceFacade; use application::documents::services::tagging::TagServiceFacade; @@ -24,6 +25,8 @@ use application::identity::services::auth::account::AccountServiceFacade; use application::identity::services::auth::auth_service::AuthServiceFacade; use application::identity::services::auth::external::ExternalAuthRegistryFacade; use application::identity::services::auth::user_sessions::UserSessionServiceFacade; +use application::identity::services::migration::MigrationServiceFacade; +use application::identity::services::user_keys::UserKeysServiceFacade; use application::identity::services::user_shortcuts::UserShortcutServiceFacade; use application::plugins::ports::plugin_event_publisher::PluginScopedEvent; use application::plugins::ports::plugin_event_subscriber::PluginEventSubscriber; @@ -32,6 +35,7 @@ use application::plugins::services::execution::PluginExecutionServiceFacade; use application::plugins::services::management::PluginManagementServiceFacade; use application::plugins::services::permissions::PluginPermissionServiceFacade; use application::workspaces::services::WorkspaceServiceFacade; +use application::workspaces::services::workspace_keys::WorkspaceKeysServiceFacade; mod traits; pub use traits::{HasAuthServices, HasAuthorizationService, HasShareService, HasWorkspaceService}; @@ -83,6 +87,7 @@ pub struct CoreServicesDeps { #[derive(Clone)] pub struct DocumentServicesDeps { pub document_service: Arc, + pub document_keys_service: Arc, pub share_service: Arc, pub file_service: Arc, pub public_service: Arc, @@ -99,6 +104,8 @@ pub struct GitServicesDeps { pub struct IdentityServicesDeps { pub api_token_service: Arc, pub user_shortcut_service: Arc, + pub user_keys_service: Arc, + pub migration_service: Arc, pub account_service: Arc, pub auth_service: Arc, pub session_service: Arc, @@ -117,6 +124,7 @@ pub struct PluginServicesDeps { #[derive(Clone)] pub struct WorkspaceServicesDeps { pub workspace_service: Arc, + pub workspace_keys_service: Arc, } #[derive(Clone)] @@ -131,6 +139,7 @@ struct CoreServices { #[derive(Clone)] struct DocumentServices { document_service: Arc, + document_keys_service: Arc, share_service: Arc, file_service: Arc, public_service: Arc, @@ -147,6 +156,8 @@ struct GitServices { struct IdentityServices { api_token_service: Arc, user_shortcut_service: Arc, + user_keys_service: Arc, + migration_service: Arc, account_service: Arc, auth_service: Arc, session_service: Arc, @@ -165,6 +176,7 @@ struct PluginServices { #[derive(Clone)] struct WorkspaceServices { workspace_service: Arc, + workspace_keys_service: Arc, } mod subcontexts; @@ -185,6 +197,7 @@ impl AppServices { }, documents: DocumentServices { document_service: deps.documents.document_service, + document_keys_service: deps.documents.document_keys_service, share_service: deps.documents.share_service, file_service: deps.documents.file_service, public_service: deps.documents.public_service, @@ -197,6 +210,8 @@ impl AppServices { identity: IdentityServices { api_token_service: deps.identity.api_token_service, user_shortcut_service: deps.identity.user_shortcut_service, + user_keys_service: deps.identity.user_keys_service, + migration_service: deps.identity.migration_service, account_service: deps.identity.account_service, auth_service: deps.identity.auth_service, session_service: deps.identity.session_service, @@ -211,6 +226,7 @@ impl AppServices { }, workspaces: WorkspaceServices { workspace_service: deps.workspaces.workspace_service, + workspace_keys_service: deps.workspaces.workspace_keys_service, }, } } @@ -237,6 +253,10 @@ impl AppContext { self.services.documents.document_service.clone() } + pub fn document_keys_service(&self) -> Arc { + self.services.documents.document_keys_service.clone() + } + pub fn share_service(&self) -> Arc { self.services.documents.share_service.clone() } @@ -269,6 +289,10 @@ impl AppContext { self.services.workspaces.workspace_service.clone() } + pub fn workspace_keys_service(&self) -> Arc { + self.services.workspaces.workspace_keys_service.clone() + } + pub fn storage_ingest_queue(&self) -> Arc { self.services.core.storage_ingest_queue.clone() } @@ -336,6 +360,14 @@ impl AppContext { self.services.identity.api_token_service.clone() } + pub fn user_keys_service(&self) -> Arc { + self.services.identity.user_keys_service.clone() + } + + pub fn migration_service(&self) -> Arc { + self.services.identity.migration_service.clone() + } + pub fn realtime_engine(&self) -> Arc { self.services.documents.realtime_engine.clone() } diff --git a/api/crates/presentation/src/context/subcontexts.rs b/api/crates/presentation/src/context/subcontexts.rs index 242b6266..925efd3f 100644 --- a/api/crates/presentation/src/context/subcontexts.rs +++ b/api/crates/presentation/src/context/subcontexts.rs @@ -1,4 +1,5 @@ use super::*; +use application::documents::services::keys::DocumentKeysServiceFacade; #[derive(Clone)] pub struct CoreContext { @@ -90,6 +91,7 @@ pub struct DocumentsContext { pub cfg: PresentationConfig, authorization: Arc, document_service: Arc, + document_keys_service: Arc, file_service: Arc, public_service: Arc, share_service: Arc, @@ -124,6 +126,10 @@ impl DocumentsContext { self.document_service.clone() } + pub fn document_keys_service(&self) -> Arc { + self.document_keys_service.clone() + } + pub fn file_service(&self) -> Arc { self.file_service.clone() } @@ -171,6 +177,7 @@ impl FromRef for DocumentsContext { cfg: ctx.cfg.clone(), authorization: ctx.authorization(), document_service: ctx.document_service(), + document_keys_service: ctx.document_keys_service(), file_service: ctx.file_service(), public_service: ctx.public_service(), share_service: ctx.share_service(), @@ -237,11 +244,15 @@ impl FromRef for GitContext { } } +use application::identity::services::migration::MigrationServiceFacade; + #[derive(Clone)] pub struct IdentityContext { pub cfg: PresentationConfig, api_token_service: Arc, user_shortcut_service: Arc, + user_keys_service: Arc, + migration_service: Arc, account_service: Arc, auth_service: Arc, session_service: Arc, @@ -258,6 +269,14 @@ impl IdentityContext { self.user_shortcut_service.clone() } + pub fn user_keys_service(&self) -> Arc { + self.user_keys_service.clone() + } + + pub fn migration_service(&self) -> Arc { + self.migration_service.clone() + } + pub fn account_service(&self) -> Arc { self.account_service.clone() } @@ -301,6 +320,8 @@ impl FromRef for IdentityContext { cfg: ctx.cfg.clone(), api_token_service: ctx.api_token_service(), user_shortcut_service: ctx.user_shortcut_service(), + user_keys_service: ctx.user_keys_service(), + migration_service: ctx.migration_service(), account_service: ctx.account_service(), auth_service: ctx.auth_service(), session_service: ctx.session_service(), @@ -422,6 +443,7 @@ impl FromRef for PluginsContext { pub struct WorkspacesContext { pub cfg: PresentationConfig, workspace_service: Arc, + workspace_keys_service: Arc, account_service: Arc, document_service: Arc, auth_service: Arc, @@ -433,6 +455,10 @@ impl WorkspacesContext { self.workspace_service.clone() } + pub fn workspace_keys_service(&self) -> Arc { + self.workspace_keys_service.clone() + } + pub fn account_service(&self) -> Arc { self.account_service.clone() } @@ -471,6 +497,7 @@ impl FromRef for WorkspacesContext { Self { cfg: ctx.cfg.clone(), workspace_service: ctx.workspace_service(), + workspace_keys_service: ctx.workspace_keys_service(), account_service: ctx.account_service(), document_service: ctx.document_service(), auth_service: ctx.auth_service(), diff --git a/api/crates/presentation/src/http/core/markdown/handlers/mod.rs b/api/crates/presentation/src/http/core/markdown/handlers/mod.rs deleted file mode 100644 index 6a2e3596..00000000 --- a/api/crates/presentation/src/http/core/markdown/handlers/mod.rs +++ /dev/null @@ -1,120 +0,0 @@ -use std::collections::HashMap; - -use axum::{Json, extract::State, http::StatusCode}; -use uuid::Uuid; - -use crate::context::CoreContext; -use crate::http::error::ApiError; -use crate::http::identity::auth::Bearer; -use application::core::dtos::markdown::RenderOptions; -use application::core::services::errors::ServiceError; -use application::core::services::markdown_render::MarkdownRenderTask; - -use super::types::{RenderManyRequest, RenderManyResponse, RenderRequest, RenderResponseBody}; -use super::user_scope::resolve_user_scope_from_inputs; - -#[utoipa::path(post, path = "/api/markdown/render", tag = "Markdown", - request_body = RenderRequest, - responses((status = 200, body = RenderResponseBody)))] -pub async fn render_markdown( - State(ctx): State, - bearer: Option, - Json(req): Json, -) -> Result, ApiError> { - // Per-item size guard (2MB) - if req.text.len() > 2 * 1024 * 1024 { - return Err(ApiError::new( - StatusCode::PAYLOAD_TOO_LARGE, - "payload_too_large", - )); - } - let RenderRequest { text, options } = req; - let options: RenderOptions = options.into(); - - let bearer_token = bearer.as_ref().map(|b| b.0.as_str()); - let user_scope = - resolve_user_scope_from_inputs(&ctx, bearer_token, options.token.as_deref()).await; - - let renderer = ctx.markdown_renderer(); - let resp = renderer - .render_single(text, options, user_scope) - .await - .map_err(map_markdown_error)?; - Ok(Json(RenderResponseBody::from(resp))) -} - -#[utoipa::path(post, path = "/api/markdown/render-many", tag = "Markdown", - request_body = RenderManyRequest, - responses((status = 200, body = RenderManyResponse)))] -pub async fn render_markdown_many( - State(ctx): State, - bearer: Option, - Json(req): Json, -) -> Result, ApiError> { - // Guard: item count and total size - const MAX_ITEMS: usize = 128; - const MAX_TOTAL_BYTES: usize = 5 * 1024 * 1024; // 5MB - let items = req.items; - if items.len() > MAX_ITEMS { - return Err(ApiError::new( - StatusCode::PAYLOAD_TOO_LARGE, - "payload_too_large", - )); - } - let total: usize = items.iter().map(|i| i.text.len()).sum(); - if total > MAX_TOTAL_BYTES { - return Err(ApiError::new( - StatusCode::PAYLOAD_TOO_LARGE, - "payload_too_large", - )); - } - - let bearer_token = bearer.as_ref().map(|b| b.0.clone()); - let bearer_scope = resolve_user_scope_from_inputs(&ctx, bearer_token.as_deref(), None).await; - let mut share_scope_cache: HashMap> = HashMap::new(); - let mut tasks = Vec::with_capacity(items.len()); - - for item in items { - if item.text.len() > 2 * 1024 * 1024 { - return Err(ApiError::new( - StatusCode::PAYLOAD_TOO_LARGE, - "payload_too_large", - )); - } - let RenderRequest { text, options } = item; - let options: RenderOptions = options.into(); - let user_scope = if bearer_scope.is_some() { - bearer_scope - } else if let Some(token) = options.token.as_deref() { - if let Some(scope) = share_scope_cache.get(token) { - *scope - } else { - let scope = resolve_user_scope_from_inputs(&ctx, None, Some(token)).await; - share_scope_cache.insert(token.to_string(), scope); - scope - } - } else { - None - }; - tasks.push(MarkdownRenderTask { - text, - options, - user_scope, - }); - } - - let renderer = ctx.markdown_renderer(); - let responses = renderer - .render_many(tasks) - .await - .map_err(map_markdown_error)?; - let items = responses - .into_iter() - .map(RenderResponseBody::from) - .collect(); - Ok(Json(RenderManyResponse { items })) -} - -fn map_markdown_error(err: ServiceError) -> crate::http::error::ApiError { - crate::http::error::map_service_error_no_log(err) -} diff --git a/api/crates/presentation/src/http/core/markdown/mod.rs b/api/crates/presentation/src/http/core/markdown/mod.rs deleted file mode 100644 index 2b015277..00000000 --- a/api/crates/presentation/src/http/core/markdown/mod.rs +++ /dev/null @@ -1,21 +0,0 @@ -mod handlers; -pub mod types; -mod user_scope; - -use axum::{Router, routing::post}; - -use crate::context::AppContext; - -pub use handlers::{render_markdown, render_markdown_many}; -pub use types::*; - -pub mod openapi { - pub use super::handlers::*; -} - -pub fn routes(ctx: AppContext) -> Router { - Router::new() - .route("/markdown/render", post(render_markdown)) - .route("/markdown/render-many", post(render_markdown_many)) - .with_state(ctx) -} diff --git a/api/crates/presentation/src/http/core/markdown/types.rs b/api/crates/presentation/src/http/core/markdown/types.rs deleted file mode 100644 index 2fc3aff8..00000000 --- a/api/crates/presentation/src/http/core/markdown/types.rs +++ /dev/null @@ -1,106 +0,0 @@ -use serde::{Deserialize, Serialize}; -use utoipa::ToSchema; - -use application::core::dtos::markdown::{PlaceholderItem, RenderOptions, RenderResponse}; - -#[derive(Debug, Clone, Deserialize, Serialize, ToSchema, Default)] -#[serde(default)] -pub struct RenderOptionsPayload { - pub flavor: Option, - pub theme: Option, - pub features: Option>, - pub sanitize: Option, - pub hardbreaks: Option, - pub doc_id: Option, - pub base_origin: Option, - pub absolute_attachments: Option, - pub token: Option, -} - -impl From for RenderOptions { - fn from(value: RenderOptionsPayload) -> Self { - RenderOptions { - flavor: value.flavor, - theme: value.theme, - features: value.features, - sanitize: value.sanitize, - hardbreaks: value.hardbreaks, - doc_id: value.doc_id, - base_origin: value.base_origin, - absolute_attachments: value.absolute_attachments, - token: value.token, - } - } -} - -impl From for RenderOptionsPayload { - fn from(value: RenderOptions) -> Self { - Self { - flavor: value.flavor, - theme: value.theme, - features: value.features, - sanitize: value.sanitize, - hardbreaks: value.hardbreaks, - doc_id: value.doc_id, - base_origin: value.base_origin, - absolute_attachments: value.absolute_attachments, - token: value.token, - } - } -} - -#[derive(Debug, Clone, Serialize, ToSchema)] -pub struct PlaceholderItemPayload { - pub kind: String, - pub id: String, - pub code: String, -} - -impl From for PlaceholderItemPayload { - fn from(value: PlaceholderItem) -> Self { - Self { - kind: value.kind, - id: value.id, - code: value.code, - } - } -} - -#[derive(Debug, Clone, Serialize, ToSchema)] -pub struct RenderResponseBody { - pub html: String, - #[serde(skip_serializing_if = "Vec::is_empty")] - pub placeholders: Vec, - pub hash: String, -} - -impl From for RenderResponseBody { - fn from(value: RenderResponse) -> Self { - Self { - html: value.html, - placeholders: value - .placeholders - .into_iter() - .map(PlaceholderItemPayload::from) - .collect(), - hash: value.hash, - } - } -} - -#[derive(Debug, Deserialize, ToSchema)] -pub struct RenderRequest { - pub text: String, - #[serde(default)] - pub options: RenderOptionsPayload, -} - -#[derive(Debug, Deserialize, ToSchema)] -pub struct RenderManyRequest { - pub items: Vec, -} - -#[derive(Debug, Serialize, ToSchema)] -pub struct RenderManyResponse { - pub items: Vec, -} diff --git a/api/crates/presentation/src/http/core/markdown/user_scope.rs b/api/crates/presentation/src/http/core/markdown/user_scope.rs deleted file mode 100644 index 6ff0115b..00000000 --- a/api/crates/presentation/src/http/core/markdown/user_scope.rs +++ /dev/null @@ -1,79 +0,0 @@ -use uuid::Uuid; - -use crate::context::{HasAuthServices, HasShareService, HasWorkspaceService}; -use crate::security::{request_status, token}; -use application::core::services::access; -use domain::documents::share; - -pub(super) async fn resolve_user_scope_from_inputs( - ctx: &(impl HasAuthServices + HasWorkspaceService + HasShareService), - bearer_token: Option<&str>, - share_token: Option<&str>, -) -> Option { - if let Some(token) = bearer_token { - if let Some(workspace_id) = ctx.auth_service().workspace_from_token_claim(token) { - return Some(workspace_id); - } - if let Ok(Some(workspace_id)) = ctx.auth_service().workspace_from_token_async(token).await { - return Some(workspace_id); - } - match token::resolve_actor_from_token_str(ctx, token).await { - Ok(access::Actor::User(uid)) => { - if let Ok(workspaces) = ctx.workspace_service().list_for_user(uid).await { - if workspaces.is_empty() { - return None; - } - if let Some(default_ws) = workspaces.iter().find(|ws| ws.is_default) { - return Some(default_ws.id); - } - return Some(workspaces[0].id); - } - } - Ok(access::Actor::ShareToken(t)) => { - if let Ok(Some(ctx_share)) = ctx.share_service().resolve_share_context(&t).await { - if share::is_expired(ctx_share.expires_at.as_ref(), chrono::Utc::now()) { - return None; - } - return Some(ctx_share.workspace_id); - } - } - Ok(_) => {} - Err(token::ActorResolveError::TokenExpired) => { - request_status::mark_token_expired(); - return None; - } - Err(token::ActorResolveError::Unauthorized) => {} - } - } - if let Some(token) = share_token { - // Share token: resolve its workspace for renderer so plugin manifests can be loaded. - match token::resolve_actor_from_token_str(ctx, token).await { - Ok(access::Actor::User(uid)) => { - if let Ok(workspaces) = ctx.workspace_service().list_for_user(uid).await { - if workspaces.is_empty() { - return None; - } - if let Some(default_ws) = workspaces.iter().find(|ws| ws.is_default) { - return Some(default_ws.id); - } - return Some(workspaces[0].id); - } - } - Ok(access::Actor::ShareToken(t)) => { - if let Ok(Some(ctx_share)) = ctx.share_service().resolve_share_context(&t).await { - if share::is_expired(ctx_share.expires_at.as_ref(), chrono::Utc::now()) { - return None; - } - return Some(ctx_share.workspace_id); - } - } - Ok(_) => {} - Err(token::ActorResolveError::TokenExpired) => { - request_status::mark_token_expired(); - return None; - } - Err(token::ActorResolveError::Unauthorized) => {} - } - } - None -} diff --git a/api/crates/presentation/src/http/core/mod.rs b/api/crates/presentation/src/http/core/mod.rs index 4f37f2a6..570ce3a8 100644 --- a/api/crates/presentation/src/http/core/mod.rs +++ b/api/crates/presentation/src/http/core/mod.rs @@ -1,4 +1,3 @@ pub mod health; -pub mod markdown; pub mod metrics; pub mod storage_ingest; diff --git a/api/crates/presentation/src/http/documents/files/download.rs b/api/crates/presentation/src/http/documents/files/download.rs index c1aea111..bddfbe6a 100644 --- a/api/crates/presentation/src/http/documents/files/download.rs +++ b/api/crates/presentation/src/http/documents/files/download.rs @@ -1,59 +1,53 @@ use axum::{ extract::{Path as AxumPath, Query, State}, + http::HeaderMap, response::Response, }; use uuid::Uuid; use crate::context::DocumentsContext; use crate::http::error::ApiError; -use crate::http::extractors::WorkspaceAuth; -use application::core::services::access; -use domain::access::permissions::PERM_DOC_VIEW; +use crate::security::token; -use super::types::{FileByNameQuery, file_payload_response, map_file_error}; +use super::types::{file_payload_response, map_file_error}; +#[derive(Debug, serde::Deserialize)] +pub struct GetFileQuery { + pub token: Option, +} + +/// Download a file by ID. +/// Returns encrypted file with E2EE metadata in headers for client-side decryption. +/// Supports authentication via bearer token or share token query parameter. #[utoipa::path( get, path = "/api/files/{id}", tag = "Files", - params(("id" = Uuid, Path, description = "File ID")), + params( + ("id" = Uuid, Path, description = "File ID"), + ("token" = Option, Query, description = "Share token for authentication") + ), responses((status = 200, description = "OK", body = Vec, content_type = "application/octet-stream")) )] pub async fn get_file( State(ctx): State, - auth: WorkspaceAuth, + headers: HeaderMap, + Query(query): Query, AxumPath(id): AxumPath, ) -> Result { - auth.ensure_permission(PERM_DOC_VIEW)?; - let actor = access::Actor::User(auth.user_id); - let payload = ctx - .file_service() - .download_owned_file(&actor, auth.workspace_id, id) - .await - .map_err(map_file_error)?; - Ok(file_payload_response(payload)) -} + let share_token = query.token.as_deref(); + let bearer = token::bearer_from_headers(&headers); -#[utoipa::path( - get, - path = "/api/files/documents/{filename}", - tag = "Files", - params(("filename" = String, Path, description = "File name"), ("document_id" = Uuid, Query, description = "Document ID")), - responses((status = 200, description = "OK", body = Vec, content_type = "application/octet-stream")) -)] -pub async fn get_file_by_name( - State(ctx): State, - auth: WorkspaceAuth, - AxumPath(filename): AxumPath, - Query(q): Query, -) -> Result { - auth.ensure_permission(PERM_DOC_VIEW)?; + let actor = token::resolve_actor_from_parts(&ctx, bearer, share_token) + .await + .map_err(token::map_actor_error)? + .ok_or(ApiError::unauthorized("unauthorized"))?; - let actor = access::Actor::User(auth.user_id); let payload = ctx .file_service() - .get_file_by_name(&actor, q.document_id, &filename) + .download_file_for_actor(&actor, id) .await .map_err(map_file_error)?; + Ok(file_payload_response(payload)) } diff --git a/api/crates/presentation/src/http/documents/files/list.rs b/api/crates/presentation/src/http/documents/files/list.rs new file mode 100644 index 00000000..4ca995b4 --- /dev/null +++ b/api/crates/presentation/src/http/documents/files/list.rs @@ -0,0 +1,69 @@ +use axum::{ + Json, + extract::{Path as AxumPath, Query, State}, + http::HeaderMap, +}; +use base64::Engine; +use uuid::Uuid; + +use crate::context::DocumentsContext; +use crate::http::error::ApiError; +use crate::security::token; + +use super::types::{ListFileResponse, map_file_error}; + +#[derive(Debug, serde::Deserialize)] +pub struct ListFilesQuery { + pub token: Option, +} + +/// List files for a document. +/// Returns encrypted metadata for client-side decryption to build file map. +/// Supports authentication via bearer token or share token query parameter. +#[utoipa::path( + get, + path = "/api/documents/{docId}/files", + tag = "Files", + params( + ("docId" = Uuid, Path, description = "Document ID"), + ("token" = Option, Query, description = "Share token for authentication") + ), + responses((status = 200, description = "OK", body = Vec)) +)] +pub async fn list_files( + State(ctx): State, + headers: HeaderMap, + Query(query): Query, + AxumPath(doc_id): AxumPath, +) -> Result>, ApiError> { + let share_token = query.token.as_deref(); + let bearer = token::bearer_from_headers(&headers); + + let actor = token::resolve_actor_from_parts(&ctx, bearer, share_token) + .await + .map_err(token::map_actor_error)? + .ok_or(ApiError::unauthorized("unauthorized"))?; + + let files = ctx + .file_service() + .list_files_for_actor(&actor, doc_id) + .await + .map_err(map_file_error)?; + + let response: Vec = files + .into_iter() + .map(|f| ListFileResponse { + id: f.id, + encrypted_metadata: f + .encrypted_metadata + .map(|m| base64::engine::general_purpose::STANDARD.encode(m)), + encrypted_metadata_nonce: f + .encrypted_metadata_nonce + .map(|n| base64::engine::general_purpose::STANDARD.encode(n)), + encrypted_hash: f.encrypted_hash, + size: f.size, + }) + .collect(); + + Ok(Json(response)) +} diff --git a/api/crates/presentation/src/http/documents/files/mod.rs b/api/crates/presentation/src/http/documents/files/mod.rs index 7fc23e50..f9a5c5fa 100644 --- a/api/crates/presentation/src/http/documents/files/mod.rs +++ b/api/crates/presentation/src/http/documents/files/mod.rs @@ -1,4 +1,5 @@ mod download; +mod list; mod serve; pub mod types; mod upload; @@ -10,20 +11,21 @@ use axum::{ use crate::context::AppContext; -pub use download::{get_file, get_file_by_name}; +pub use download::get_file; +pub use list::list_files; pub use serve::serve_upload; pub use types::*; pub use upload::upload_file; pub mod openapi { - pub use super::download::*; - pub use super::upload::*; + pub use super::download::__path_get_file; + pub use super::list::__path_list_files; + pub use super::upload::__path_upload_file; } pub fn routes(ctx: AppContext) -> Router { Router::new() - .route("/files", post(upload_file)) + .route("/documents/:doc_id/files", post(upload_file).get(list_files)) .route("/files/:id", get(get_file)) - .route("/files/documents/:filename", get(get_file_by_name)) .with_state(ctx) } diff --git a/api/crates/presentation/src/http/documents/files/types.rs b/api/crates/presentation/src/http/documents/files/types.rs index 0742e1e3..f5ec249b 100644 --- a/api/crates/presentation/src/http/documents/files/types.rs +++ b/api/crates/presentation/src/http/documents/files/types.rs @@ -9,12 +9,17 @@ use uuid::Uuid; use application::core::services::errors::ServiceError; use application::documents::services::files::FilePayload; +/// Response for file upload (E2EE format per design) #[derive(Debug, Serialize, ToSchema)] +#[serde(rename_all = "camelCase")] pub struct UploadFileResponse { pub id: Uuid, + /// URL to access the file (relative or absolute) pub url: String, + /// Storage filename (UUID, for building relative paths) pub filename: String, - pub content_type: Option, + /// SHA256 hash of encrypted file content + pub encrypted_hash: String, pub size: i64, } @@ -22,32 +27,91 @@ pub fn map_file_error(err: ServiceError) -> crate::http::error::ApiError { crate::http::error::map_service_error(err, "file_service_error") } +/// File payload response with optional E2EE metadata in headers. +/// For E2EE files, returns encrypted content with metadata headers for client-side decryption. +/// For legacy files, returns raw bytes without E2EE headers. pub fn file_payload_response(payload: FilePayload) -> axum::response::Response { + use base64::Engine; + let mut headers = HeaderMap::new(); - if let Some(ct) = payload.content_type { - headers.insert( - axum::http::header::CONTENT_TYPE, - HeaderValue::from_str(&ct) - .unwrap_or(HeaderValue::from_static("application/octet-stream")), - ); - } + // Use octet-stream for all files (client determines type from metadata or content) + headers.insert( + axum::http::header::CONTENT_TYPE, + HeaderValue::from_static("application/octet-stream"), + ); headers.insert( axum::http::header::HeaderName::from_static("x-content-type-options"), HeaderValue::from_static("nosniff"), ); + + // Add E2EE metadata headers only if present + if let Some(ref encrypted_metadata) = payload.encrypted_metadata { + let encoded_metadata = base64::engine::general_purpose::STANDARD.encode(encrypted_metadata); + if let Ok(val) = HeaderValue::from_str(&encoded_metadata) { + headers.insert( + axum::http::header::HeaderName::from_static("x-encrypted-metadata"), + val, + ); + } + } + if let Some(ref encrypted_metadata_nonce) = payload.encrypted_metadata_nonce { + let encoded_nonce = base64::engine::general_purpose::STANDARD.encode(encrypted_metadata_nonce); + if let Ok(val) = HeaderValue::from_str(&encoded_nonce) { + headers.insert( + axum::http::header::HeaderName::from_static("x-encrypted-metadata-nonce"), + val, + ); + } + } + if let Some(ref encrypted_hash) = payload.encrypted_hash { + if let Ok(val) = HeaderValue::from_str(encrypted_hash) { + headers.insert( + axum::http::header::HeaderName::from_static("x-encrypted-hash"), + val, + ); + } + } + (headers, payload.bytes).into_response() } +/// Multipart upload schema for OpenAPI #[derive(ToSchema)] #[allow(dead_code)] pub struct UploadFileMultipart { + /// Encrypted file binary (.rme format) #[schema(value_type = String, format = Binary)] pub file: String, - #[schema(value_type = String, format = Uuid)] - pub document_id: String, + /// JSON metadata containing encrypted file metadata + #[schema(value_type = Option)] + pub metadata: Option, } +/// Metadata JSON structure for file upload #[derive(Debug, Deserialize)] -pub struct FileByNameQuery { - pub document_id: Uuid, +#[serde(rename_all = "camelCase")] +pub struct FileUploadMetadata { + /// Base64 encoded encrypted metadata + pub encrypted_metadata: Option, + /// Base64 encoded nonce for encrypted metadata + pub encrypted_metadata_nonce: Option, + /// Client-computed hash of encrypted file content (SHA256) + pub encrypted_hash: Option, } + +/// Response for listing files in a document. +/// Returns encrypted metadata for client-side decryption to build file map. +#[derive(Debug, Serialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct ListFileResponse { + pub id: Uuid, + /// Base64 encoded encrypted metadata (contains filename, logicalPath, mimeType) + pub encrypted_metadata: Option, + /// Base64 encoded nonce for encrypted metadata + pub encrypted_metadata_nonce: Option, + /// SHA256 hash of encrypted file content + pub encrypted_hash: Option, + /// File size in bytes + pub size: i64, +} + diff --git a/api/crates/presentation/src/http/documents/files/upload.rs b/api/crates/presentation/src/http/documents/files/upload.rs index 4d609a32..959ad8bf 100644 --- a/api/crates/presentation/src/http/documents/files/upload.rs +++ b/api/crates/presentation/src/http/documents/files/upload.rs @@ -1,21 +1,24 @@ use axum::{ Json, - extract::{Multipart, State}, + extract::{Multipart, Path, State}, http::StatusCode, }; +use base64::Engine; use uuid::Uuid; use crate::context::DocumentsContext; use crate::http::error::ApiError; use crate::http::extractors::WorkspaceAuth; +use application::documents::use_cases::files::upload_file::FileUploadInput; use domain::access::permissions::PERM_FILE_UPLOAD; -use super::types::{UploadFileResponse, map_file_error}; +use super::types::{FileUploadMetadata, UploadFileResponse, map_file_error}; #[utoipa::path( post, - path = "/api/files", + path = "/api/documents/{docId}/files", tag = "Files", + params(("docId" = Uuid, Path, description = "Document ID")), request_body( content = UploadFileMultipart, content_type = "multipart/form-data", @@ -27,14 +30,13 @@ use super::types::{UploadFileResponse, map_file_error}; pub async fn upload_file( State(ctx): State, auth: WorkspaceAuth, + Path(doc_id): Path, mut multipart: Multipart, ) -> Result<(StatusCode, Json), ApiError> { auth.ensure_permission(PERM_FILE_UPLOAD)?; - let mut document_id: Option = None; let mut file_bytes: Option> = None; - let mut orig_filename: Option = None; - let mut content_type: Option = None; + let mut metadata: Option = None; while let Some(field) = multipart .next_field() @@ -42,22 +44,8 @@ pub async fn upload_file( .map_err(|_| ApiError::bad_request("invalid_multipart"))? { let name = field.name().map(|s| s.to_string()); - let file_name = field.file_name().map(|s| s.to_string()); - let ct = field.content_type().map(|s| s.to_string()); match name.as_deref() { - Some("document_id") => { - let t = field - .text() - .await - .map_err(|_| ApiError::bad_request("invalid_document_id"))?; - document_id = Some( - Uuid::parse_str(t.trim()) - .map_err(|_| ApiError::bad_request("invalid_document_id"))?, - ); - } Some("file") => { - orig_filename = file_name.clone(); - content_type = ct.clone(); let data = field .bytes() .await @@ -70,34 +58,72 @@ pub async fn upload_file( } file_bytes = Some(data.to_vec()); } + Some("metadata") => { + let text = field + .text() + .await + .map_err(|_| ApiError::bad_request("invalid_metadata"))?; + metadata = serde_json::from_str(&text) + .map_err(|_| ApiError::bad_request("invalid_metadata_json"))?; + } _ => {} } } - let doc_id = document_id.ok_or(ApiError::bad_request("missing_document_id"))?; let bytes = file_bytes.ok_or(ApiError::bad_request("missing_file"))?; + let metadata = metadata.ok_or(ApiError::bad_request("missing_metadata"))?; + + // Extract E2EE fields from metadata (all required) + let encrypted_metadata = metadata + .encrypted_metadata + .ok_or(ApiError::bad_request("missing_encrypted_metadata")) + .and_then(|s| { + base64::engine::general_purpose::STANDARD + .decode(&s) + .map_err(|_| ApiError::bad_request("invalid_encrypted_metadata")) + })?; + let encrypted_metadata_nonce = metadata + .encrypted_metadata_nonce + .ok_or(ApiError::bad_request("missing_encrypted_metadata_nonce")) + .and_then(|s| { + base64::engine::general_purpose::STANDARD + .decode(&s) + .map_err(|_| ApiError::bad_request("invalid_encrypted_metadata_nonce")) + })?; + let encrypted_hash = metadata + .encrypted_hash + .ok_or(ApiError::bad_request("missing_encrypted_hash"))?; let public_base_url = ctx.cfg.public_base_url.clone(); let file_service = ctx.file_service(); + + // Upload encrypted file + let input = FileUploadInput { + bytes, + encrypted_metadata, + encrypted_metadata_nonce, + encrypted_hash, + }; let f = file_service - .upload_file( - auth.workspace_id, - auth.user_id, - doc_id, - bytes, - orig_filename, - content_type.clone(), - public_base_url, - ) + .upload_file(auth.workspace_id, auth.user_id, doc_id, input, public_base_url) .await .map_err(map_file_error)?; + + // Extract filename from storage_path (last segment after /) + let filename = f + .storage_path + .rsplit('/') + .next() + .unwrap_or(&f.id.to_string()) + .to_string(); + Ok(( StatusCode::CREATED, Json(UploadFileResponse { id: f.id, url: f.url, - filename: f.filename, - content_type: f.content_type, + filename, + encrypted_hash: f.encrypted_hash, size: f.size, }), )) diff --git a/api/crates/presentation/src/http/documents/handlers/content.rs b/api/crates/presentation/src/http/documents/handlers/content.rs index 9a0b15c0..266a5677 100644 --- a/api/crates/presentation/src/http/documents/handlers/content.rs +++ b/api/crates/presentation/src/http/documents/handlers/content.rs @@ -1,10 +1,7 @@ use axum::{ Json, extract::{Path, Query, State}, - http::{HeaderMap, HeaderValue, StatusCode}, - response::{IntoResponse, Response}, }; -use serde_json::{Value, json}; use uuid::Uuid; use crate::context::DocumentsContext; @@ -12,29 +9,64 @@ use crate::http::error::ApiError; use crate::http::extractors::AuthedUser; use crate::security::token::{self, Bearer}; use application::core::services::access; -use application::core::services::errors::ServiceError; use application::documents::services::DocumentPatchOperation; -#[allow(unused_imports)] use crate::http::documents::types::{ - Document, DocumentArchiveBinary, DocumentDownloadBinary, DownloadDocumentQuery, DownloadFormat, + Document, DocumentPatchOperationRequest, EncryptedUpdateEntry, GetContentResponse, PatchDocumentContentRequest, SnapshotTokenQuery, UpdateDocumentContentRequest, map_service_error, to_http_document, }; -#[utoipa::path(get, path = "/api/documents/{id}/content", tag = "Documents", params(("id" = Uuid, Path, description = "Document ID"),), responses((status = 200)))] +#[utoipa::path( + get, + path = "/api/documents/{id}/content", + tag = "Documents", + params(("id" = Uuid, Path, description = "Document ID")), + responses((status = 200, body = GetContentResponse)) +)] pub async fn get_document_content( State(ctx): State, auth: AuthedUser, Path(id): Path, -) -> Result, ApiError> { +) -> Result, ApiError> { + use base64::Engine; + let actor = access::Actor::User(auth.user_id); let service = ctx.document_service(); + + // Return Yjs snapshot bytes as Base64 let content = service .get_content(&actor, id) .await .map_err(map_service_error)?; - Ok(Json(json!({"content": content}))) + + let updates = content.updates.map(|updates| { + updates + .into_iter() + .map(|u| EncryptedUpdateEntry { + seq: u.seq, + data: base64::engine::general_purpose::STANDARD.encode(&u.data), + nonce: u + .nonce + .map(|n| base64::engine::general_purpose::STANDARD.encode(&n)), + signature: u + .signature + .map(|s| base64::engine::general_purpose::STANDARD.encode(&s)), + public_key: u + .public_key + .map(|p| base64::engine::general_purpose::STANDARD.encode(&p)), + }) + .collect() + }); + + Ok(Json(GetContentResponse { + content: base64::engine::general_purpose::STANDARD.encode(&content.content), + nonce: content + .nonce + .map(|n| base64::engine::general_purpose::STANDARD.encode(&n)), + seq_at_snapshot: content.seq_at_snapshot, + updates, + })) } #[utoipa::path( @@ -55,6 +87,8 @@ pub async fn update_document_content( q: Option>, Json(body): Json, ) -> Result, ApiError> { + use base64::Engine; + let params = q.map(|Query(v)| v).unwrap_or_default(); let token = params.token.as_deref(); let actor = token::resolve_actor_from_parts(&ctx, bearer, token) @@ -62,10 +96,44 @@ pub async fn update_document_content( .map_err(token::map_actor_error)? .ok_or(ApiError::unauthorized("unauthorized"))?; let service = ctx.document_service(); - let updated = service - .update_content(&actor, id, &body.content) - .await - .map_err(map_service_error)?; + + // Check if this is an E2EE update (nonce provided) + let updated = if body.nonce.is_some() { + // E2EE mode: content is Base64 encoded encrypted Yjs state + let content_bytes = base64::engine::general_purpose::STANDARD + .decode(&body.content) + .map_err(|_| ApiError::bad_request("invalid_content_base64"))?; + let nonce_bytes = body + .nonce + .as_ref() + .map(|s| { + base64::engine::general_purpose::STANDARD + .decode(s) + .map_err(|_| ApiError::bad_request("invalid_nonce_base64")) + }) + .transpose()?; + let signature_bytes = body + .signature + .as_ref() + .map(|s| { + base64::engine::general_purpose::STANDARD + .decode(s) + .map_err(|_| ApiError::bad_request("invalid_signature_base64")) + }) + .transpose()?; + + service + .update_content(&actor, id, &content_bytes, nonce_bytes.as_deref(), signature_bytes.as_deref()) + .await + .map_err(map_service_error)? + } else { + // Plaintext mode: content is markdown + service + .update_content_from_markdown(&actor, id, &body.content) + .await + .map_err(map_service_error)? + }; + Ok(Json(to_http_document(updated))) } @@ -87,9 +155,9 @@ pub async fn patch_document_content( q: Option>, Json(body): Json, ) -> Result, ApiError> { - if body.operations.is_empty() { - return Err(ApiError::bad_request("missing_operations")); - } + use application::documents::ports::realtime::realtime_port::EncryptedUpdate; + use base64::Engine; + let params = q.map(|Query(v)| v).unwrap_or_default(); let token = params.token.as_deref(); let actor = token::resolve_actor_from_parts(&ctx, bearer, token) @@ -97,104 +165,85 @@ pub async fn patch_document_content( .map_err(token::map_actor_error)? .ok_or(ApiError::unauthorized("unauthorized"))?; let service = ctx.document_service(); - let operations: Vec = body - .operations - .into_iter() - .map(DocumentPatchOperation::from) - .collect(); - let updated = service - .patch_content(&actor, id, &operations) - .await - .map_err(map_service_error)?; - Ok(Json(to_http_document(updated))) -} -#[utoipa::path( - get, - path = "/api/documents/{id}/download", - tag = "Documents", - operation_id = "download_document", - params( - ("id" = Uuid, Path, description = "Document ID"), - ("token" = Option, Query, description = "Share token (optional)"), - ("format" = Option, Query, description = "Download format (see schema for supported values)") - ), - responses( - (status = 200, description = "Document download", body = DocumentDownloadBinary, content_type = "application/octet-stream"), - (status = 401, description = "Unauthorized"), - (status = 404, description = "Document not found") - ) -)] -pub async fn download_document( - State(ctx): State, - bearer: Option, - Query(params): Query, - Path(id): Path, -) -> Result { - let token = params.token.as_deref(); - let format = params.format; + if body.operations.is_empty() { + return Err(ApiError::bad_request("missing_operations")); + } - let actor = match token::resolve_actor_from_parts(&ctx, bearer, token).await { - Ok(Some(actor)) => actor, - Ok(None) => return Err(ApiError::unauthorized("unauthorized")), - Err(err) => return Err(token::map_actor_error(err)), - }; + // Check if any operation has encrypted_data (E2EE mode) + let has_encrypted = body.operations.iter().any(|op| op.is_encrypted()); - let service = ctx.document_service(); - let download = match service.download_document(&actor, id, format.into()).await { - Ok(payload) => payload, - Err(ServiceError::Unauthorized) - | Err(ServiceError::TokenExpired) - | Err(ServiceError::Forbidden) - | Err(ServiceError::NotFound) => { - return Err(ApiError::not_found("not_found")); - } - Err(ServiceError::Conflict) => { - return Err(ApiError::conflict("conflict")); - } - Err(ServiceError::BadRequest(_)) => { - return Err(ApiError::bad_request("bad_request")); - } - Err(ServiceError::Unexpected(error)) => { - tracing::error!( - document_id = %id, - ?format, - error = ?error, - "document_download_failed" - ); - return Err(ApiError::new( - StatusCode::INTERNAL_SERVER_ERROR, - "internal_error", - )); - } - }; + let updated = if has_encrypted { + // E2EE mode: convert operations with encrypted_data to EncryptedUpdate + let encrypted_updates: Vec = body + .operations + .iter() + .filter_map(|op| { + match op { + DocumentPatchOperationRequest::Insert { + encrypted_data: Some(encrypted_data), + nonce, + signature, + public_key, + .. + } + | DocumentPatchOperationRequest::Replace { + encrypted_data: Some(encrypted_data), + nonce, + signature, + public_key, + .. + } => { + let data = base64::engine::general_purpose::STANDARD + .decode(encrypted_data) + .ok()?; + let nonce_bytes = nonce.as_ref().and_then(|n| { + base64::engine::general_purpose::STANDARD.decode(n).ok() + }); + let signature_bytes = signature.as_ref().and_then(|s| { + base64::engine::general_purpose::STANDARD.decode(s).ok() + }); + let public_key_bytes = public_key.as_ref().and_then(|p| { + base64::engine::general_purpose::STANDARD.decode(p).ok() + }); + Some(EncryptedUpdate { + data, + nonce: nonce_bytes, + signature: signature_bytes, + public_key: public_key_bytes, + }) + } + _ => None, + } + }) + .collect(); - let mut headers = HeaderMap::new(); - let content_type = match HeaderValue::from_str(&download.content_type) { - Ok(value) => value, - Err(_) => { - return Err(ApiError::new( - StatusCode::INTERNAL_SERVER_ERROR, - "internal_error", - )); + if encrypted_updates.is_empty() { + return Err(ApiError::bad_request("no_encrypted_data_in_operations")); } - }; - headers.insert(axum::http::header::CONTENT_TYPE, content_type); - headers.insert( - axum::http::header::HeaderName::from_static("x-content-type-options"), - HeaderValue::from_static("nosniff"), - ); - let disposition = format!("attachment; filename=\"{}\"", download.filename); - let content_disposition = match HeaderValue::from_str(&disposition) { - Ok(value) => value, - Err(_) => { - return Err(ApiError::new( - StatusCode::INTERNAL_SERVER_ERROR, - "internal_error", - )); + + service + .patch_content(&actor, id, None, Some(&encrypted_updates)) + .await + .map_err(map_service_error)? + } else { + // Plaintext mode: convert operations with text to DocumentPatchOperation + let plaintext_operations: Vec = body + .operations + .iter() + .filter_map(|op| op.to_plaintext_operation()) + .collect(); + + if plaintext_operations.is_empty() { + return Err(ApiError::bad_request("no_text_in_operations")); } + + service + .patch_content(&actor, id, Some(&plaintext_operations), None) + .await + .map_err(map_service_error)? }; - headers.insert(axum::http::header::CONTENT_DISPOSITION, content_disposition); - Ok((headers, download.bytes).into_response()) + Ok(Json(to_http_document(updated))) } + diff --git a/api/crates/presentation/src/http/documents/handlers/crud.rs b/api/crates/presentation/src/http/documents/handlers/crud.rs index b89ef78f..57f942db 100644 --- a/api/crates/presentation/src/http/documents/handlers/crud.rs +++ b/api/crates/presentation/src/http/documents/handlers/crud.rs @@ -20,8 +20,7 @@ use crate::http::documents::types::{ #[utoipa::path(get, path = "/api/documents", tag = "Documents", params( - ("query" = Option, Query, description = "Search query"), - ("tag" = Option, Query, description = "Filter by tag"), + ("tag" = Option, Query, description = "Filter by encrypted tag (Base64 encoded)"), ("state" = Option, Query, description = "Filter by document state (active|archived|all)") ), responses((status = 200, body = DocumentListResponse)))] @@ -30,21 +29,52 @@ pub async fn list_documents( auth: WorkspaceAuth, q: Option>, ) -> Result, ApiError> { + use base64::Engine; + use std::collections::HashSet; + auth.ensure_permission(PERM_DOC_VIEW)?; - let (qstr, tag, state_param) = q - .map(|Query(v)| (v.query, v.tag, v.state)) - .unwrap_or((None, None, None)); + let (tag, state_param) = q + .map(|Query(v)| (v.tag, v.state)) + .unwrap_or((None, None)); let state = state_param .map(DocumentStateFilter::into) .unwrap_or_default(); let service = ctx.document_service(); + + // If tag filter is provided, decode as encrypted tag and find matching documents + let tag_filter_ids: Option> = if let Some(ref encrypted_tag_b64) = tag { + let encrypted_tag = base64::engine::general_purpose::STANDARD + .decode(encrypted_tag_b64) + .map_err(|_| ApiError::bad_request("invalid_encrypted_tag_base64"))?; + + let tag_service = ctx.tag_service(); + let doc_ids = tag_service + .find_documents_by_encrypted_tag(auth.workspace_id, encrypted_tag) + .await + .map_err(map_service_error)?; + + Some(doc_ids.into_iter().collect()) + } else { + None + }; + + // Get all documents (without plaintext tag filter) let docs = service - .list_for_user(auth.workspace_id, qstr, tag, state) + .list_for_user(auth.workspace_id, None, state) .await .map_err(map_service_error)?; - let items: Vec = docs.into_iter().map(to_http_document).collect(); + // Filter by encrypted tag if provided + let filtered_docs = if let Some(ref filter_ids) = tag_filter_ids { + docs.into_iter() + .filter(|doc| filter_ids.contains(&doc.id())) + .collect() + } else { + docs + }; + + let items: Vec = filtered_docs.into_iter().map(to_http_document).collect(); Ok(Json(DocumentListResponse { items })) } @@ -54,12 +84,35 @@ pub async fn create_document( auth: WorkspaceAuth, Json(req): Json, ) -> Result, ApiError> { + use base64::Engine; + let title = req.title.unwrap_or_else(|| "Untitled".into()); let dtype = req .r#type .unwrap_or_else(|| DocumentType::Document.as_str().to_string()); let doc_type = DocumentType::try_from(dtype.as_str()) .map_err(|_| ApiError::bad_request("invalid_document_type"))?; + + // Decode E2EE fields if provided + let encrypted_title = req + .encrypted_title + .as_ref() + .map(|s| { + base64::engine::general_purpose::STANDARD + .decode(s) + .map_err(|_| ApiError::bad_request("invalid_encrypted_title_base64")) + }) + .transpose()?; + let encrypted_title_nonce = req + .encrypted_title_nonce + .as_ref() + .map(|s| { + base64::engine::general_purpose::STANDARD + .decode(s) + .map_err(|_| ApiError::bad_request("invalid_encrypted_title_nonce_base64")) + }) + .transpose()?; + let service = ctx.document_service(); let doc = service .create_for_user( @@ -74,6 +127,30 @@ pub async fn create_document( .await .map_err(map_service_error)?; + // Store DEK if provided (E2EE mode) + if let Some(dek_payload) = req.dek { + let (encrypted_dek, nonce, key_version) = dek_payload + .decode() + .map_err(|e| ApiError::bad_request(e))?; + let keys_service = ctx.document_keys_service(); + keys_service + .store_document_key(doc.id(), encrypted_dek, nonce, key_version) + .await + .map_err(|e| { + tracing::error!(error = ?e, "failed_to_store_document_key"); + ApiError::new(StatusCode::INTERNAL_SERVER_ERROR, "failed_to_store_document_key") + })?; + } + + // Store encrypted title if provided (E2EE mode) + // TODO: In the future, combine this with document creation in a single transaction + if let (Some(enc_title), Some(enc_nonce)) = (encrypted_title, encrypted_title_nonce) { + service + .update_encrypted_title(doc.id(), enc_title, enc_nonce) + .await + .map_err(map_service_error)?; + } + Ok(Json(to_http_document(doc))) } diff --git a/api/crates/presentation/src/http/documents/handlers/mod.rs b/api/crates/presentation/src/http/documents/handlers/mod.rs index 32bd27a8..e4e2fd97 100644 --- a/api/crates/presentation/src/http/documents/handlers/mod.rs +++ b/api/crates/presentation/src/http/documents/handlers/mod.rs @@ -1,11 +1,9 @@ pub mod content; pub mod crud; pub mod links; -pub mod search; pub mod snapshots; pub use content::*; pub use crud::*; pub use links::*; -pub use search::*; pub use snapshots::*; diff --git a/api/crates/presentation/src/http/documents/handlers/search.rs b/api/crates/presentation/src/http/documents/handlers/search.rs deleted file mode 100644 index a140d5a0..00000000 --- a/api/crates/presentation/src/http/documents/handlers/search.rs +++ /dev/null @@ -1,40 +0,0 @@ -use axum::{ - Json, - extract::{Query, State}, -}; - -use crate::context::DocumentsContext; -use crate::http::error::ApiError; -use crate::http::extractors::WorkspaceAuth; -use domain::access::permissions::PERM_DOC_VIEW; - -use crate::http::documents::types::{SearchQuery, SearchResult, map_service_error}; - -#[utoipa::path(get, path = "/api/documents/search", tag = "Documents", - params(("q" = Option, Query, description = "Query")), - responses((status = 200, body = [SearchResult])))] -pub async fn search_documents( - State(ctx): State, - auth: WorkspaceAuth, - q: Option>, -) -> Result>, ApiError> { - auth.ensure_permission(PERM_DOC_VIEW)?; - let query_text = q.and_then(|Query(v)| v.q); - - let service = ctx.document_service(); - let hits = service - .search_for_user(auth.workspace_id, query_text, 20) - .await - .map_err(map_service_error)?; - let items = hits - .into_iter() - .map(|h| SearchResult { - id: h.id, - title: h.title.into_string(), - document_type: h.doc_type.to_string(), - path: h.path, - updated_at: h.updated_at, - }) - .collect(); - Ok(Json(items)) -} diff --git a/api/crates/presentation/src/http/documents/handlers/snapshots.rs b/api/crates/presentation/src/http/documents/handlers/snapshots.rs index 4966c011..8e8624d3 100644 --- a/api/crates/presentation/src/http/documents/handlers/snapshots.rs +++ b/api/crates/presentation/src/http/documents/handlers/snapshots.rs @@ -1,8 +1,6 @@ use axum::{ Json, extract::{Path, Query, State}, - http::{HeaderMap, HeaderValue, StatusCode}, - response::{IntoResponse, Response}, }; use uuid::Uuid; @@ -10,9 +8,8 @@ use crate::context::DocumentsContext; use crate::http::error::ApiError; use crate::security::token::{self, Bearer}; -#[allow(unused_imports)] use crate::http::documents::types::{ - DocumentArchiveBinary, SnapshotDiffBaseParam, SnapshotDiffQuery, SnapshotDiffResponse, + SnapshotDetailResponse, SnapshotDiffBaseParam, SnapshotDiffQuery, SnapshotDiffResponse, SnapshotListResponse, SnapshotRestoreResponse, SnapshotTokenQuery, map_service_error, snapshot_diff_side_response_from, snapshot_summary_from, }; @@ -55,6 +52,48 @@ pub async fn list_document_snapshots( Ok(Json(SnapshotListResponse { items })) } +#[utoipa::path( + get, + path = "/api/documents/{id}/snapshots/{snapshot_id}", + tag = "Documents", + params( + ("id" = Uuid, Path, description = "Document ID"), + ("snapshot_id" = Uuid, Path, description = "Snapshot ID"), + ("token" = Option, Query, description = "Share token (optional)") + ), + responses((status = 200, body = SnapshotDetailResponse)) +)] +pub async fn get_document_snapshot( + State(ctx): State, + bearer: Option, + Path((id, snapshot_id)): Path<(Uuid, Uuid)>, + q: Option>, +) -> Result, ApiError> { + use base64::Engine; + + let params = q.map(|Query(v)| v).unwrap_or_default(); + let token = params.token.as_deref(); + let actor = token::resolve_actor_from_parts(&ctx, bearer, token) + .await + .map_err(token::map_actor_error)? + .ok_or(ApiError::unauthorized("unauthorized"))?; + + let service = ctx.document_service(); + let detail = service + .get_snapshot(&actor, id, snapshot_id) + .await + .map_err(map_service_error)?; + + Ok(Json(SnapshotDetailResponse { + id: detail.id, + content: base64::engine::general_purpose::STANDARD.encode(&detail.content), + nonce: detail + .nonce + .map(|n| base64::engine::general_purpose::STANDARD.encode(&n)), + created_at: detail.created_at, + })) +} + #[utoipa::path( get, path = "/api/documents/{id}/snapshots/{snapshot_id}/diff", @@ -134,49 +173,3 @@ pub async fn restore_document_snapshot( })) } -#[utoipa::path( - get, - path = "/api/documents/{id}/snapshots/{snapshot_id}/download", - tag = "Documents", - params( - ("id" = Uuid, Path, description = "Document ID"), - ("snapshot_id" = Uuid, Path, description = "Snapshot ID"), - ("token" = Option, Query, description = "Share token (optional)") - ), - responses( - (status = 200, description = "Snapshot archive", body = DocumentArchiveBinary, content_type = "application/zip"), - (status = 401, description = "Unauthorized"), - (status = 404, description = "Snapshot not found") - ) -)] -pub async fn download_document_snapshot( - State(ctx): State, - bearer: Option, - Path((id, snapshot_id)): Path<(Uuid, Uuid)>, - q: Option>, -) -> Result { - let params = q.map(|Query(v)| v).unwrap_or_default(); - let token = params.token.as_deref(); - let actor = token::resolve_actor_from_parts(&ctx, bearer, token) - .await - .map_err(token::map_actor_error)? - .ok_or(ApiError::unauthorized("unauthorized"))?; - - let service = ctx.document_service(); - let download = service - .download_snapshot(&actor, id, snapshot_id) - .await - .map_err(map_service_error)?; - - let mut headers = HeaderMap::new(); - headers.insert( - axum::http::header::CONTENT_TYPE, - HeaderValue::from_static("application/zip"), - ); - let disposition = format!("attachment; filename=\"{}\"", download.filename); - let content_disposition = HeaderValue::from_str(&disposition) - .map_err(|_| ApiError::new(StatusCode::INTERNAL_SERVER_ERROR, "internal_error"))?; - headers.insert(axum::http::header::CONTENT_DISPOSITION, content_disposition); - - Ok((headers, download.bytes).into_response()) -} diff --git a/api/crates/presentation/src/http/documents/keys/handlers/mod.rs b/api/crates/presentation/src/http/documents/keys/handlers/mod.rs new file mode 100644 index 00000000..1c35c371 --- /dev/null +++ b/api/crates/presentation/src/http/documents/keys/handlers/mod.rs @@ -0,0 +1,282 @@ +use axum::{ + extract::{Query, State}, + Json, +}; +use uuid::Uuid; + +use crate::context::DocumentsContext; +use crate::http::error::ApiError; +use crate::http::extractors::WorkspaceAuth; +use application::core::services::errors::ServiceError; + +/// Query parameters for share key access +#[derive(Debug, serde::Deserialize, utoipa::IntoParams)] +pub struct ShareKeyQuery { + /// Share token for authentication + pub token: String, +} + +use super::types::{ + DocumentKeyResponse, RotateDocumentKeyRequest, RotateDocumentKeyResponse, ShareKeyResponse, + ShareSaltResponse, StoreDocumentKeyRequest, StorePasswordProtectedShareKeyRequest, + StoreShareKeyRequest, +}; + +fn map_keys_error(err: ServiceError) -> ApiError { + crate::http::error::map_service_error(err, "document_keys_service_error") +} + +// ============================================================================ +// Document Key Endpoints +// ============================================================================ + +#[utoipa::path( + get, + path = "/api/documents/{id}/keys", + tag = "E2EE", + params(("id" = Uuid, Path, description = "Document ID")), + responses( + (status = 200, body = DocumentKeyResponse), + (status = 404, description = "Document key not found") + ) +)] +pub async fn get_document_key( + State(ctx): State, + _auth: WorkspaceAuth, + axum::extract::Path(document_id): axum::extract::Path, +) -> Result, ApiError> { + let service = ctx.document_keys_service(); + let dto = service + .get_document_key(document_id) + .await + .map_err(map_keys_error)? + .ok_or_else(|| ApiError::not_found("document_key_not_found"))?; + + Ok(Json(DocumentKeyResponse::from(dto))) +} + +#[utoipa::path( + post, + path = "/api/documents/{id}/keys", + tag = "E2EE", + params(("id" = Uuid, Path, description = "Document ID")), + request_body = StoreDocumentKeyRequest, + responses((status = 200, body = DocumentKeyResponse)) +)] +pub async fn store_document_key( + State(ctx): State, + _auth: WorkspaceAuth, + axum::extract::Path(document_id): axum::extract::Path, + Json(payload): Json, +) -> Result, ApiError> { + let (encrypted_dek, nonce) = payload + .decode() + .map_err(|e| ApiError::bad_request(e))?; + + let service = ctx.document_keys_service(); + let dto = service + .store_document_key(document_id, encrypted_dek, nonce, payload.key_version) + .await + .map_err(map_keys_error)?; + + Ok(Json(DocumentKeyResponse::from(dto))) +} + +// ============================================================================ +// Share Key Endpoints +// ============================================================================ + +#[utoipa::path( + get, + path = "/api/shares/{id}/keys", + tag = "E2EE", + params( + ("id" = Uuid, Path, description = "Share ID"), + ("token" = String, Query, description = "Share token for authentication") + ), + responses( + (status = 200, body = ShareKeyResponse), + (status = 401, description = "Invalid or missing share token"), + (status = 404, description = "Share key not found") + ) +)] +pub async fn get_share_key( + State(ctx): State, + axum::extract::Path(share_id): axum::extract::Path, + Query(query): Query, +) -> Result, ApiError> { + // Validate share token and ensure it maps to the requested share_id + let share_service = ctx.share_service(); + let share_ctx = share_service + .resolve_share_context(&query.token) + .await + .map_err(|e| { + tracing::warn!(error = ?e, share_id = %share_id, "share_token_validation_failed"); + ApiError::unauthorized("invalid_share_token") + })? + .ok_or_else(|| ApiError::unauthorized("invalid_share_token"))?; + + // Ensure the token's share_id matches the requested share_id + if share_ctx.share_id != share_id { + tracing::warn!( + requested_share_id = %share_id, + token_share_id = %share_ctx.share_id, + "share_id_mismatch" + ); + return Err(ApiError::unauthorized("share_id_mismatch")); + } + + let service = ctx.document_keys_service(); + let dto = service + .get_share_key(share_id) + .await + .map_err(map_keys_error)? + .ok_or_else(|| ApiError::not_found("share_key_not_found"))?; + + Ok(Json(ShareKeyResponse::from(dto))) +} + +#[utoipa::path( + get, + path = "/api/shares/{id}/salt", + tag = "E2EE", + params( + ("id" = Uuid, Path, description = "Share ID"), + ("token" = String, Query, description = "Share token for authentication") + ), + responses( + (status = 200, body = ShareSaltResponse), + (status = 401, description = "Invalid or missing share token") + ) +)] +pub async fn get_share_salt( + State(ctx): State, + axum::extract::Path(share_id): axum::extract::Path, + Query(query): Query, +) -> Result, ApiError> { + use base64::Engine; + + // Validate share token and ensure it maps to the requested share_id + let share_service = ctx.share_service(); + let share_ctx = share_service + .resolve_share_context(&query.token) + .await + .map_err(|e| { + tracing::warn!(error = ?e, share_id = %share_id, "share_token_validation_failed"); + ApiError::unauthorized("invalid_share_token") + })? + .ok_or_else(|| ApiError::unauthorized("invalid_share_token"))?; + + // Ensure the token's share_id matches the requested share_id + if share_ctx.share_id != share_id { + tracing::warn!( + requested_share_id = %share_id, + token_share_id = %share_ctx.share_id, + "share_id_mismatch" + ); + return Err(ApiError::unauthorized("share_id_mismatch")); + } + + let service = ctx.document_keys_service(); + let salt = service + .get_share_salt(share_id) + .await + .map_err(map_keys_error)?; + + Ok(Json(ShareSaltResponse { + share_id, + salt: salt.map(|s| base64::engine::general_purpose::STANDARD.encode(&s)), + })) +} + +#[utoipa::path( + post, + path = "/api/shares/{id}/keys", + tag = "E2EE", + params(("id" = Uuid, Path, description = "Share ID")), + request_body = StoreShareKeyRequest, + responses((status = 200, body = ShareKeyResponse)) +)] +pub async fn store_share_key( + State(ctx): State, + _auth: WorkspaceAuth, + axum::extract::Path(share_id): axum::extract::Path, + Json(payload): Json, +) -> Result, ApiError> { + let encrypted_dek = payload + .decode() + .map_err(|e| ApiError::bad_request(e))?; + + let service = ctx.document_keys_service(); + let dto = service + .store_share_key(share_id, encrypted_dek, None, None) + .await + .map_err(map_keys_error)?; + + Ok(Json(ShareKeyResponse::from(dto))) +} + +#[utoipa::path( + post, + path = "/api/shares/{id}/keys/password-protected", + tag = "E2EE", + params(("id" = Uuid, Path, description = "Share ID")), + request_body = StorePasswordProtectedShareKeyRequest, + responses((status = 200, body = ShareKeyResponse)) +)] +pub async fn store_password_protected_share_key( + State(ctx): State, + _auth: WorkspaceAuth, + axum::extract::Path(share_id): axum::extract::Path, + Json(payload): Json, +) -> Result, ApiError> { + let (encrypted_dek, salt, kdf_params) = payload + .decode() + .map_err(|e| ApiError::bad_request(e))?; + + let service = ctx.document_keys_service(); + let dto = service + .store_password_protected_share_key(share_id, encrypted_dek, salt, kdf_params, None, None) + .await + .map_err(map_keys_error)?; + + Ok(Json(ShareKeyResponse::from(dto))) +} + +// ============================================================================ +// Document Key Rotation +// ============================================================================ + +#[utoipa::path( + post, + path = "/api/documents/{id}/keys/rotate", + tag = "E2EE", + params(("id" = Uuid, Path, description = "Document ID")), + request_body = RotateDocumentKeyRequest, + responses( + (status = 200, body = RotateDocumentKeyResponse), + (status = 400, description = "Invalid request"), + (status = 403, description = "Permission denied") + ) +)] +pub async fn rotate_document_key( + State(ctx): State, + _auth: WorkspaceAuth, + axum::extract::Path(document_id): axum::extract::Path, + Json(payload): Json, +) -> Result, ApiError> { + let (encrypted_dek, nonce) = payload + .decode() + .map_err(|e| ApiError::bad_request(e))?; + + let service = ctx.document_keys_service(); + let new_version = service + .rotate_document_key(document_id, encrypted_dek, nonce) + .await + .map_err(map_keys_error)?; + + Ok(Json(RotateDocumentKeyResponse { + document_id, + new_key_version: new_version, + })) +} diff --git a/api/crates/presentation/src/http/documents/keys/mod.rs b/api/crates/presentation/src/http/documents/keys/mod.rs new file mode 100644 index 00000000..33672927 --- /dev/null +++ b/api/crates/presentation/src/http/documents/keys/mod.rs @@ -0,0 +1,38 @@ +mod handlers; +pub mod types; + +use axum::routing::{get, post}; +use axum::Router; + +use crate::context::AppContext; + +pub use handlers::{ + get_document_key, get_share_key, get_share_salt, rotate_document_key, store_document_key, + store_password_protected_share_key, store_share_key, +}; +pub use types::*; + +pub mod openapi { + pub use super::handlers::*; +} + +pub fn routes(ctx: AppContext) -> Router { + Router::new() + // Document key endpoints + .route( + "/documents/:id/keys", + get(get_document_key).post(store_document_key), + ) + .route("/documents/:id/keys/rotate", post(rotate_document_key)) + // Share key endpoints + .route( + "/shares/:id/keys", + get(get_share_key).post(store_share_key), + ) + .route( + "/shares/:id/keys/password-protected", + post(store_password_protected_share_key), + ) + .route("/shares/:id/salt", get(get_share_salt)) + .with_state(ctx) +} diff --git a/api/crates/presentation/src/http/documents/keys/types.rs b/api/crates/presentation/src/http/documents/keys/types.rs new file mode 100644 index 00000000..1ea37b31 --- /dev/null +++ b/api/crates/presentation/src/http/documents/keys/types.rs @@ -0,0 +1,218 @@ +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; +use utoipa::ToSchema; +use uuid::Uuid; + +use application::documents::dtos::{DocumentEncryptedKeyDto, ShareEncryptedKeyDto}; +use domain::identity::keys::KdfParams; + +// ============================================================================ +// Document Key Types +// ============================================================================ + +#[derive(Debug, Serialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct DocumentKeyResponse { + pub document_id: Uuid, + #[schema(value_type = String, format = "byte")] + pub encrypted_dek: String, // base64 encoded + #[schema(value_type = String, format = "byte")] + pub nonce: String, // base64 encoded + pub key_version: i32, + pub created_at: DateTime, + pub updated_at: DateTime, +} + +impl From for DocumentKeyResponse { + fn from(dto: DocumentEncryptedKeyDto) -> Self { + use base64::Engine; + Self { + document_id: dto.document_id, + encrypted_dek: base64::engine::general_purpose::STANDARD.encode(&dto.encrypted_dek), + nonce: base64::engine::general_purpose::STANDARD.encode(&dto.nonce), + key_version: dto.key_version, + created_at: dto.created_at, + updated_at: dto.updated_at, + } + } +} + +#[derive(Debug, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct StoreDocumentKeyRequest { + /// Base64 encoded encrypted DEK + #[schema(value_type = String, format = "byte")] + pub encrypted_dek: String, + /// Base64 encoded nonce + #[schema(value_type = String, format = "byte")] + pub nonce: String, + /// Key version + pub key_version: i32, +} + +impl StoreDocumentKeyRequest { + pub fn decode(&self) -> Result<(Vec, Vec), &'static str> { + use base64::Engine; + let encrypted_dek = base64::engine::general_purpose::STANDARD + .decode(&self.encrypted_dek) + .map_err(|_| "invalid_encrypted_dek_base64")?; + let nonce = base64::engine::general_purpose::STANDARD + .decode(&self.nonce) + .map_err(|_| "invalid_nonce_base64")?; + Ok((encrypted_dek, nonce)) + } +} + +// ============================================================================ +// Share Key Types +// ============================================================================ + +#[derive(Debug, Serialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct ShareKeyResponse { + pub share_id: Uuid, + #[schema(value_type = String, format = "byte")] + pub encrypted_dek: String, // base64 encoded + #[serde(skip_serializing_if = "Option::is_none")] + #[schema(value_type = Option, format = "byte")] + pub salt: Option, // base64 encoded, for password-protected shares + #[serde(skip_serializing_if = "Option::is_none")] + pub kdf_params: Option, + pub is_password_protected: bool, + pub created_at: DateTime, +} + +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct KdfParamsResponse { + #[serde(skip_serializing_if = "Option::is_none")] + pub memory: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub iterations: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub parallelism: Option, +} + +impl From<&KdfParams> for KdfParamsResponse { + fn from(params: &KdfParams) -> Self { + Self { + memory: params.memory, + iterations: params.iterations, + parallelism: params.parallelism, + } + } +} + +impl From for KdfParams { + fn from(resp: KdfParamsResponse) -> Self { + Self { + memory: resp.memory, + iterations: resp.iterations, + parallelism: resp.parallelism, + } + } +} + +impl From for ShareKeyResponse { + fn from(dto: ShareEncryptedKeyDto) -> Self { + use base64::Engine; + let is_password_protected = dto.is_password_protected(); + Self { + share_id: dto.share_id, + encrypted_dek: base64::engine::general_purpose::STANDARD.encode(&dto.encrypted_dek), + salt: dto.salt.map(|s| base64::engine::general_purpose::STANDARD.encode(&s)), + kdf_params: dto.kdf_params.as_ref().map(KdfParamsResponse::from), + is_password_protected, + created_at: dto.created_at, + } + } +} + +#[derive(Debug, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct StoreShareKeyRequest { + /// Base64 encoded encrypted DEK + #[schema(value_type = String, format = "byte")] + pub encrypted_dek: String, +} + +impl StoreShareKeyRequest { + pub fn decode(&self) -> Result, &'static str> { + use base64::Engine; + base64::engine::general_purpose::STANDARD + .decode(&self.encrypted_dek) + .map_err(|_| "invalid_encrypted_dek_base64") + } +} + +#[derive(Debug, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct StorePasswordProtectedShareKeyRequest { + /// Base64 encoded encrypted DEK + #[schema(value_type = String, format = "byte")] + pub encrypted_dek: String, + /// Base64 encoded salt + #[schema(value_type = String, format = "byte")] + pub salt: String, + /// KDF parameters + pub kdf_params: KdfParamsResponse, +} + +impl StorePasswordProtectedShareKeyRequest { + pub fn decode(&self) -> Result<(Vec, Vec, KdfParams), &'static str> { + use base64::Engine; + let encrypted_dek = base64::engine::general_purpose::STANDARD + .decode(&self.encrypted_dek) + .map_err(|_| "invalid_encrypted_dek_base64")?; + let salt = base64::engine::general_purpose::STANDARD + .decode(&self.salt) + .map_err(|_| "invalid_salt_base64")?; + let kdf_params = KdfParams::from(self.kdf_params.clone()); + Ok((encrypted_dek, salt, kdf_params)) + } +} + +#[derive(Debug, Serialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct ShareSaltResponse { + pub share_id: Uuid, + #[schema(value_type = Option, format = "byte")] + pub salt: Option, // base64 encoded +} + +// ============================================================================ +// Document Key Rotation Types +// ============================================================================ + +/// Request body for document DEK rotation +#[derive(Debug, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct RotateDocumentKeyRequest { + /// Base64 encoded new encrypted DEK + #[schema(value_type = String, format = "byte")] + pub encrypted_dek: String, + /// Base64 encoded nonce + #[schema(value_type = String, format = "byte")] + pub nonce: String, +} + +impl RotateDocumentKeyRequest { + pub fn decode(&self) -> Result<(Vec, Vec), &'static str> { + use base64::Engine; + let encrypted_dek = base64::engine::general_purpose::STANDARD + .decode(&self.encrypted_dek) + .map_err(|_| "invalid_encrypted_dek_base64")?; + let nonce = base64::engine::general_purpose::STANDARD + .decode(&self.nonce) + .map_err(|_| "invalid_nonce_base64")?; + Ok((encrypted_dek, nonce)) + } +} + +/// Response for document DEK rotation +#[derive(Debug, Serialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct RotateDocumentKeyResponse { + pub document_id: Uuid, + pub new_key_version: i32, +} diff --git a/api/crates/presentation/src/http/documents/mod.rs b/api/crates/presentation/src/http/documents/mod.rs index e465b03c..4670acd1 100644 --- a/api/crates/presentation/src/http/documents/mod.rs +++ b/api/crates/presentation/src/http/documents/mod.rs @@ -1,5 +1,6 @@ pub mod files; mod handlers; +pub mod keys; pub mod publishing; pub mod sharing; pub mod tagging; @@ -13,11 +14,10 @@ use axum::{ use crate::context::AppContext; pub use handlers::{ - archive_document, create_document, delete_document, download_document, - download_document_snapshot, duplicate_document, get_backlinks, get_document, - get_document_content, get_document_snapshot_diff, get_outgoing_links, list_document_snapshots, - list_documents, patch_document_content, restore_document_snapshot, search_documents, - unarchive_document, update_document, update_document_content, + archive_document, create_document, delete_document, duplicate_document, get_backlinks, + get_document, get_document_content, get_document_snapshot, get_document_snapshot_diff, + get_outgoing_links, list_document_snapshots, list_documents, patch_document_content, + restore_document_snapshot, unarchive_document, update_document, update_document_content, }; pub use types::*; @@ -44,6 +44,10 @@ pub fn routes(ctx: AppContext) -> Router { .route("/documents/:id/archive", post(archive_document)) .route("/documents/:id/unarchive", post(unarchive_document)) .route("/documents/:id/snapshots", get(list_document_snapshots)) + .route( + "/documents/:id/snapshots/:snapshot_id", + get(get_document_snapshot), + ) .route( "/documents/:id/snapshots/:snapshot_id/diff", get(get_document_snapshot_diff), @@ -52,13 +56,7 @@ pub fn routes(ctx: AppContext) -> Router { "/documents/:id/snapshots/:snapshot_id/restore", post(restore_document_snapshot), ) - .route( - "/documents/:id/snapshots/:snapshot_id/download", - get(download_document_snapshot), - ) - .route("/documents/:id/download", get(download_document)) .route("/documents/:id/backlinks", get(get_backlinks)) .route("/documents/:id/links", get(get_outgoing_links)) - .route("/documents/search", get(search_documents)) .with_state(ctx) } diff --git a/api/crates/presentation/src/http/documents/publishing/handlers/mod.rs b/api/crates/presentation/src/http/documents/publishing/handlers/mod.rs index 1624d4ca..2beafa6e 100644 --- a/api/crates/presentation/src/http/documents/publishing/handlers/mod.rs +++ b/api/crates/presentation/src/http/documents/publishing/handlers/mod.rs @@ -11,7 +11,7 @@ use crate::http::error::ApiError; use crate::http::extractors::WorkspaceAuth; use application::core::services::errors::ServiceError; -use super::types::{PublicDocumentSummary, PublishResponse}; +use super::types::{PublicDocumentSummary, PublicFile, PublishRequest, PublishResponse, UpdatePublishSettingsRequest, UploadPublicFileRequest}; fn map_public_error(err: ServiceError) -> crate::http::error::ApiError { crate::http::error::map_service_error(err, "public_service_error") @@ -22,21 +22,36 @@ fn map_public_error(err: ServiceError) -> crate::http::error::ApiError { path = "/api/public/documents/{id}", tag = "Public Documents", params(("id" = Uuid, Path, description = "Document ID")), + request_body(content = Option, description = "Optional plaintext content for E2EE workspaces"), responses((status = 200, description = "Published", body = PublishResponse)) )] pub async fn publish_document( State(ctx): State, auth: WorkspaceAuth, Path(id): Path, + body: Option>, ) -> Result, ApiError> { - let service = ctx.public_service(); - let out = service - .publish_document(auth.workspace_id, &auth.permissions, id) + let (plaintext_title, plaintext_content, noindex) = body + .map(|Json(req)| (req.plaintext_title, req.plaintext_content, req.noindex.unwrap_or(true))) + .unwrap_or((None, None, true)); + + let out = ctx + .public_service() + .publish_document( + auth.workspace_id, + &auth.permissions, + id, + plaintext_title.as_deref(), + plaintext_content.as_deref(), + noindex, + ) .await .map_err(map_public_error)?; + Ok(Json(PublishResponse { slug: out.slug, public_url: out.public_url, + noindex: out.noindex, })) } @@ -84,9 +99,36 @@ pub async fn get_publish_status( Ok(Json(PublishResponse { slug: out.slug, public_url: out.public_url, + noindex: out.noindex, })) } +#[utoipa::path( + patch, + path = "/api/public/documents/{id}", + tag = "Public Documents", + params(("id" = Uuid, Path, description = "Document ID")), + request_body(content = UpdatePublishSettingsRequest, description = "Settings to update"), + responses((status = 204, description = "Settings updated")) +)] +pub async fn update_publish_settings( + State(ctx): State, + auth: WorkspaceAuth, + Path(id): Path, + Json(req): Json, +) -> Result { + let updated = ctx + .public_service() + .update_noindex(auth.workspace_id, &auth.permissions, id, req.noindex) + .await + .map_err(map_public_error)?; + if updated { + Ok(StatusCode::NO_CONTENT) + } else { + Err(ApiError::not_found("not_found")) + } +} + // Slug-based endpoints are intentionally omitted to simplify routing and match legacy pattern strictly. #[utoipa::path( @@ -140,10 +182,126 @@ pub async fn get_public_content_by_workspace_and_id( State(ctx): State, Path((slug, id)): Path<(String, Uuid)>, ) -> Result, ApiError> { - let content = ctx + let (content, noindex) = ctx .public_service() .get_public_content_by_workspace_and_id(&slug, id) .await .map_err(map_public_error)?; - Ok(Json(serde_json::json!({"content": content, "id": id}))) + Ok(Json(serde_json::json!({"content": content, "id": id, "noindex": noindex}))) +} + +// --- Public file endpoints --- + +#[utoipa::path( + post, + path = "/api/public/documents/{id}/files/{file_id}", + tag = "Public Documents", + params( + ("id" = Uuid, Path, description = "Document ID"), + ("file_id" = Uuid, Path, description = "File ID (original encrypted file ID)") + ), + request_body(content = UploadPublicFileRequest, description = "Decrypted file data"), + responses((status = 204, description = "File uploaded")) +)] +pub async fn upload_public_file( + State(ctx): State, + auth: WorkspaceAuth, + Path((doc_id, file_id)): Path<(Uuid, Uuid)>, + Json(req): Json, +) -> Result { + use base64::{Engine, engine::general_purpose::STANDARD}; + + let bytes = STANDARD + .decode(&req.content) + .map_err(|_| ApiError::bad_request("invalid_base64"))?; + + ctx.public_service() + .store_public_file( + auth.workspace_id, + &auth.permissions, + doc_id, + file_id, + &req.original_filename, + &req.logical_filename, + &req.mime_type, + &bytes, + ) + .await + .map_err(map_public_error)?; + + Ok(StatusCode::NO_CONTENT) +} + +#[utoipa::path( + get, + path = "/api/public/workspaces/{slug}/{id}/files", + tag = "Public Documents", + params( + ("slug" = String, Path, description = "Workspace slug"), + ("id" = Uuid, Path, description = "Document ID") + ), + responses((status = 200, description = "List of public files", body = [PublicFile])) +)] +pub async fn list_public_files( + State(ctx): State, + Path((slug, doc_id)): Path<(String, Uuid)>, +) -> Result>, ApiError> { + let files = ctx + .public_service() + .get_public_files(&slug, doc_id) + .await + .map_err(map_public_error)?; + + Ok(Json( + files + .into_iter() + .map(|f| PublicFile { + id: f.id, + file_id: f.file_id, + original_filename: f.original_filename, + logical_filename: f.logical_filename, + mime_type: f.mime_type, + size: f.size, + created_at: f.created_at, + }) + .collect(), + )) +} + +#[utoipa::path( + get, + path = "/api/public/workspaces/{slug}/{id}/files/{filename}", + tag = "Public Documents", + params( + ("slug" = String, Path, description = "Workspace slug"), + ("id" = Uuid, Path, description = "Document ID"), + ("filename" = String, Path, description = "Logical filename as it appears in markdown") + ), + responses( + (status = 200, description = "File content", content_type = "application/octet-stream") + ) +)] +pub async fn get_public_file( + State(ctx): State, + Path((slug, doc_id, filename)): Path<(String, Uuid, String)>, +) -> Result { + use axum::http::header; + + let (bytes, meta) = ctx + .public_service() + .read_public_file_by_logical_filename(&slug, doc_id, &filename) + .await + .map_err(map_public_error)?; + + // Use inline disposition for images and other displayable content + // so browsers can render them in tags + let content_disposition = format!("inline; filename=\"{}\"", meta.original_filename); + + Ok(( + [ + (header::CONTENT_TYPE, meta.mime_type), + (header::CONTENT_DISPOSITION, content_disposition), + ], + bytes, + )) } diff --git a/api/crates/presentation/src/http/documents/publishing/mod.rs b/api/crates/presentation/src/http/documents/publishing/mod.rs index 9feac42c..9bc26599 100644 --- a/api/crates/presentation/src/http/documents/publishing/mod.rs +++ b/api/crates/presentation/src/http/documents/publishing/mod.rs @@ -7,8 +7,9 @@ use axum::routing::{get, post}; use crate::context::AppContext; pub use handlers::{ - get_public_by_workspace_and_id, get_public_content_by_workspace_and_id, get_publish_status, - list_workspace_public_documents, publish_document, unpublish_document, + get_public_by_workspace_and_id, get_public_content_by_workspace_and_id, get_public_file, + get_publish_status, list_public_files, list_workspace_public_documents, publish_document, + unpublish_document, update_publish_settings, upload_public_file, }; pub use types::*; @@ -22,14 +23,18 @@ pub fn routes(ctx: AppContext) -> Router { "/documents/:id", post(publish_document) .delete(unpublish_document) - .get(get_publish_status), + .get(get_publish_status) + .patch(update_publish_settings), ) + .route("/documents/:id/files/:file_id", post(upload_public_file)) .route("/workspaces/:slug", get(list_workspace_public_documents)) .route("/workspaces/:slug/:id", get(get_public_by_workspace_and_id)) .route( "/workspaces/:slug/:id/content", get(get_public_content_by_workspace_and_id), ) + .route("/workspaces/:slug/:id/files", get(list_public_files)) + .route("/workspaces/:slug/:id/files/:file_id", get(get_public_file)) // legacy aliases .route("/users/:slug", get(list_workspace_public_documents)) .route("/users/:slug/:id", get(get_public_by_workspace_and_id)) diff --git a/api/crates/presentation/src/http/documents/publishing/types.rs b/api/crates/presentation/src/http/documents/publishing/types.rs index 690fac73..4f405174 100644 --- a/api/crates/presentation/src/http/documents/publishing/types.rs +++ b/api/crates/presentation/src/http/documents/publishing/types.rs @@ -1,13 +1,39 @@ -use serde::Serialize; +use serde::{Deserialize, Serialize}; use utoipa::ToSchema; use uuid::Uuid; use application::documents::dtos::PublicDocumentSummaryDto; +/// Request to publish a document. For E2EE workspaces, plaintext title and content +/// must be provided so public pages can be rendered without decryption. +#[derive(Debug, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct PublishRequest { + /// Plaintext title (required for E2EE mode) + #[serde(default)] + pub plaintext_title: Option, + /// Plaintext content (required for E2EE mode) + #[serde(default)] + pub plaintext_content: Option, + /// If true, adds noindex meta tag to prevent search engine indexing (default: true) + #[serde(default)] + pub noindex: Option, +} + +/// Request to update noindex setting for a published document +#[derive(Debug, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct UpdatePublishSettingsRequest { + /// If true, adds noindex meta tag to prevent search engine indexing + pub noindex: bool, +} + #[derive(Debug, Serialize, ToSchema)] pub struct PublishResponse { pub slug: String, pub public_url: String, + /// If true, noindex meta tag is added to prevent search engine indexing + pub noindex: bool, } #[derive(Debug, Serialize, ToSchema)] @@ -28,3 +54,30 @@ impl From for PublicDocumentSummary { } } } + +/// Request to upload a public file (decrypted attachment for E2EE documents) +#[derive(Debug, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct UploadPublicFileRequest { + /// Original filename (decrypted) + pub original_filename: String, + /// Logical filename as it appears in markdown (e.g., "image.png" from "./attachments/image.png") + pub logical_filename: String, + /// MIME type of the file + pub mime_type: String, + /// Base64 encoded file content + pub content: String, +} + +/// Public file metadata +#[derive(Debug, Serialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct PublicFile { + pub id: Uuid, + pub file_id: Uuid, + pub original_filename: String, + pub logical_filename: String, + pub mime_type: String, + pub size: i64, + pub created_at: chrono::DateTime, +} diff --git a/api/crates/presentation/src/http/documents/sharing/mod.rs b/api/crates/presentation/src/http/documents/sharing/mod.rs index 85543b7e..aa1b40fc 100644 --- a/api/crates/presentation/src/http/documents/sharing/mod.rs +++ b/api/crates/presentation/src/http/documents/sharing/mod.rs @@ -19,7 +19,7 @@ pub use mounts::{ }; pub use shares::{create_share, delete_share, list_document_shares}; pub use types::*; -pub use validation::{browse_share, validate_share_token}; +pub use validation::{browse_share, get_share_salt, validate_share_token}; pub mod openapi { pub use super::active::*; @@ -38,6 +38,7 @@ pub fn routes(ctx: AppContext) -> Router { ) .route("/shares/browse", get(browse_share)) .route("/shares/validate", get(validate_share_token)) + .route("/shares/salt", get(get_share_salt)) .route("/shares/documents/:id", get(list_document_shares)) .route("/shares/applicable", get(list_applicable_shares)) .route( diff --git a/api/crates/presentation/src/http/documents/sharing/shares.rs b/api/crates/presentation/src/http/documents/sharing/shares.rs index 476bba3e..597c7140 100644 --- a/api/crates/presentation/src/http/documents/sharing/shares.rs +++ b/api/crates/presentation/src/http/documents/sharing/shares.rs @@ -3,6 +3,7 @@ use axum::{ extract::{Path, State}, http::StatusCode, }; +use base64::Engine; use uuid::Uuid; use crate::context::DocumentsContext; @@ -10,6 +11,7 @@ use crate::http::error::ApiError; use crate::http::extractors::WorkspaceAuth; use application::core::services::access; use domain::documents::share::SHARE_PERMISSION_VIEW; +use domain::identity::keys::KdfParams; use application::documents::dtos::ShareItemDto; @@ -48,6 +50,96 @@ pub async fn create_share( ) .await .map_err(map_share_error)?; + + // Decode E2EE fields + let encrypted_dek = req + .encrypted_dek + .as_ref() + .map(|s| base64::engine::general_purpose::STANDARD.decode(s)) + .transpose() + .map_err(|_| ApiError::bad_request("invalid_encrypted_dek_base64"))?; + let creator_encrypted_share_key = req + .creator_encrypted_share_key + .as_ref() + .map(|s| base64::engine::general_purpose::STANDARD.decode(s)) + .transpose() + .map_err(|_| ApiError::bad_request("invalid_creator_encrypted_share_key_base64"))?; + let creator_share_key_nonce = req + .creator_share_key_nonce + .as_ref() + .map(|s| base64::engine::general_purpose::STANDARD.decode(s)) + .transpose() + .map_err(|_| ApiError::bad_request("invalid_creator_share_key_nonce_base64"))?; + + // Store share key if we have DEK (document) or creator keys (folder for URL recovery) + // Documents: encrypted_dek is Some, folders: encrypted_dek is None but creator keys are Some + let has_creator_keys = creator_encrypted_share_key.is_some() && creator_share_key_nonce.is_some(); + if encrypted_dek.is_some() || has_creator_keys { + let dek = encrypted_dek.unwrap_or_default(); // Empty for folders (no content to encrypt) + let keys_service = ctx.document_keys_service(); + + if let (Some(salt_b64), Some(kdf_params_json)) = (req.salt.clone(), req.kdf_params.clone()) { + // Password-protected share + let salt = base64::engine::general_purpose::STANDARD + .decode(&salt_b64) + .map_err(|_| ApiError::bad_request("invalid_salt_base64"))?; + let kdf_params: KdfParams = serde_json::from_value(kdf_params_json) + .map_err(|_| ApiError::bad_request("invalid_kdf_params"))?; + + keys_service + .store_password_protected_share_key( + res.share_id, + dek, + salt, + kdf_params, + creator_encrypted_share_key, + creator_share_key_nonce, + ) + .await + .map_err(|e| { + tracing::error!(error = ?e, "failed_to_store_share_key"); + ApiError::new(StatusCode::INTERNAL_SERVER_ERROR, "failed_to_store_share_key") + })?; + } else { + // URL fragment based share (no password) + keys_service + .store_share_key(res.share_id, dek, creator_encrypted_share_key, creator_share_key_nonce) + .await + .map_err(|e| { + tracing::error!(error = ?e, "failed_to_store_share_key"); + ApiError::new(StatusCode::INTERNAL_SERVER_ERROR, "failed_to_store_share_key") + })?; + } + } + + // For folder shares: store child document DEKs + if res.document_type == "folder" { + if let Some(doc_encrypted_deks) = req.document_encrypted_deks { + let keys_service = ctx.document_keys_service(); + let child_shares = service + .list_child_share_info(res.share_id) + .await + .map_err(map_share_error)?; + + for child in child_shares { + let doc_id_str = child.document_id.to_string(); + if let Some(encrypted_dek_b64) = doc_encrypted_deks.get(&doc_id_str) { + let child_dek = base64::engine::general_purpose::STANDARD + .decode(encrypted_dek_b64) + .map_err(|_| ApiError::bad_request("invalid_document_encrypted_dek_base64"))?; + + keys_service + .store_share_key(child.share_id, child_dek, None, None) + .await + .map_err(|e| { + tracing::error!(error = ?e, document_id = %child.document_id, "failed_to_store_child_share_key"); + ApiError::new(StatusCode::INTERNAL_SERVER_ERROR, "failed_to_store_child_share_key") + })?; + } + } + } + } + let base = frontend_base(&ctx.cfg); let url = build_share_url(&base, &res.document_type, res.document_id, &res.token); Ok(Json(CreateShareResponse { diff --git a/api/crates/presentation/src/http/documents/sharing/types.rs b/api/crates/presentation/src/http/documents/sharing/types.rs index caf43507..af8c300f 100644 --- a/api/crates/presentation/src/http/documents/sharing/types.rs +++ b/api/crates/presentation/src/http/documents/sharing/types.rs @@ -1,3 +1,4 @@ +use base64::Engine; use serde::{Deserialize, Serialize}; use utoipa::ToSchema; use uuid::Uuid; @@ -38,10 +39,36 @@ pub fn map_share_error(err: ServiceError) -> crate::http::error::ApiError { } #[derive(Debug, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] pub struct CreateShareRequest { pub document_id: Uuid, pub permission: Option, pub expires_at: Option>, + // E2EE fields - encrypted DEK for share access + /// Base64 encoded encrypted DEK (encrypted with share key derived from password) + #[serde(default)] + #[schema(value_type = Option, format = "byte")] + pub encrypted_dek: Option, + /// Base64 encoded salt for key derivation + #[serde(default)] + #[schema(value_type = Option, format = "byte")] + pub salt: Option, + /// KDF parameters (e.g., Argon2id settings) + #[serde(default)] + pub kdf_params: Option, + /// Base64 encoded share key encrypted with creator's KEK (for URL recovery) + #[serde(default)] + #[schema(value_type = Option, format = "byte")] + pub creator_encrypted_share_key: Option, + /// Base64 encoded nonce for creator_encrypted_share_key + #[serde(default)] + #[schema(value_type = Option, format = "byte")] + pub creator_share_key_nonce: Option, + /// For folder shares: encrypted DEKs for each document in the folder + /// Map of document_id (as string) -> base64 encoded encrypted DEK (nonce prepended) + #[serde(default)] + #[schema(value_type = Option>)] + pub document_encrypted_deks: Option>, } #[derive(Debug, Serialize, ToSchema)] @@ -51,6 +78,7 @@ pub struct CreateShareResponse { } #[derive(Debug, Serialize, ToSchema)] +#[serde(rename_all = "camelCase")] pub struct ShareItem { pub id: Uuid, pub token: String, @@ -59,6 +87,14 @@ pub struct ShareItem { pub url: String, pub scope: String, pub parent_share_id: Option, + /// Base64 encoded share key encrypted with creator's KEK (for URL recovery) + #[serde(skip_serializing_if = "Option::is_none")] + #[schema(value_type = Option, format = "byte")] + pub creator_encrypted_share_key: Option, + /// Base64 encoded nonce for creator_encrypted_share_key + #[serde(skip_serializing_if = "Option::is_none")] + #[schema(value_type = Option, format = "byte")] + pub creator_share_key_nonce: Option, } #[derive(Debug, Deserialize)] @@ -86,11 +122,24 @@ impl From for ApplicableShareItem { } #[derive(Debug, Serialize, ToSchema)] +#[serde(rename_all = "camelCase")] pub struct ShareDocumentResponse { pub id: Uuid, pub title: String, pub permission: String, pub content: Option, + // E2EE fields + /// Base64 encoded encrypted DEK (encrypted with share key) + #[serde(skip_serializing_if = "Option::is_none")] + #[schema(value_type = Option, format = "byte")] + pub encrypted_dek: Option, + /// Base64 encoded salt for password-protected shares + #[serde(skip_serializing_if = "Option::is_none")] + #[schema(value_type = Option, format = "byte")] + pub salt: Option, + /// KDF parameters for password-protected shares + #[serde(skip_serializing_if = "Option::is_none")] + pub kdf_params: Option, } impl From for ShareDocumentResponse { @@ -100,6 +149,9 @@ impl From for ShareDocumentResponse { title: d.title, permission: d.permission, content: d.content, + encrypted_dek: None, + salt: None, + kdf_params: None, } } } @@ -109,6 +161,21 @@ pub struct ShareTokenQuery { pub token: String, } +/// Response for share salt challenge (for password-protected shares) +#[derive(Debug, Serialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct ShareSaltResponse { + /// Whether this share is password-protected + pub password_protected: bool, + /// Base64 encoded salt for key derivation (only present if password-protected) + #[serde(skip_serializing_if = "Option::is_none")] + #[schema(value_type = Option, format = "byte")] + pub salt: Option, + /// KDF parameters for key derivation (only present if password-protected) + #[serde(skip_serializing_if = "Option::is_none")] + pub kdf_params: Option, +} + #[derive(Debug, Serialize, ToSchema)] pub struct ActiveShareItem { pub id: Uuid, @@ -175,6 +242,7 @@ impl From for ShareMountItem { } #[derive(Debug, Serialize, ToSchema)] +#[serde(rename_all = "camelCase")] pub struct ShareBrowseTreeItem { pub id: Uuid, pub title: String, @@ -183,6 +251,13 @@ pub struct ShareBrowseTreeItem { pub r#type: String, pub created_at: chrono::DateTime, pub updated_at: chrono::DateTime, + /// Child share token for documents within a folder share + #[serde(skip_serializing_if = "Option::is_none")] + pub share_token: Option, + /// Encrypted DEK for this document (base64, nonce prepended) + #[serde(skip_serializing_if = "Option::is_none")] + #[schema(value_type = Option, format = "byte")] + pub encrypted_dek: Option, } #[derive(Debug, Serialize, ToSchema)] @@ -199,6 +274,8 @@ impl From for ShareBrowseTreeItem { r#type: t.r#type, created_at: t.created_at, updated_at: t.updated_at, + share_token: t.share_token, + encrypted_dek: t.encrypted_dek, } } } @@ -226,7 +303,8 @@ impl ShareItem { document_id, document_type, parent_share_id, - .. + creator_encrypted_share_key, + creator_share_key_nonce, } = dto; let url = build_share_url(base, &document_type, document_id, &token); ShareItem { @@ -237,6 +315,10 @@ impl ShareItem { url, scope: share_scope(&document_type), parent_share_id, + creator_encrypted_share_key: creator_encrypted_share_key + .map(|v| base64::engine::general_purpose::STANDARD.encode(&v)), + creator_share_key_nonce: creator_share_key_nonce + .map(|v| base64::engine::general_purpose::STANDARD.encode(&v)), } } } diff --git a/api/crates/presentation/src/http/documents/sharing/validation.rs b/api/crates/presentation/src/http/documents/sharing/validation.rs index dcd98ef8..b8bf9efa 100644 --- a/api/crates/presentation/src/http/documents/sharing/validation.rs +++ b/api/crates/presentation/src/http/documents/sharing/validation.rs @@ -2,11 +2,12 @@ use axum::{ Json, extract::{Query, State}, }; +use base64::Engine; use crate::context::DocumentsContext; use crate::http::error::ApiError; -use super::types::{ShareBrowseResponse, ShareDocumentResponse, ShareTokenQuery, map_share_error}; +use super::types::{ShareBrowseResponse, ShareDocumentResponse, ShareSaltResponse, ShareTokenQuery, map_share_error}; #[utoipa::path( get, @@ -19,17 +20,87 @@ pub async fn validate_share_token( State(ctx): State, Query(query): Query, ) -> Result, ApiError> { - let service = ctx.share_service(); - let res = service + let share_service = ctx.share_service(); + + // Get basic share document info + let res = share_service .validate_token(&query.token) .await .map_err(map_share_error)?; - let out: ShareDocumentResponse = res + let mut out: ShareDocumentResponse = res .map(Into::into) .ok_or(ApiError::not_found("not_found"))?; + + // Get share context to obtain share_id for E2EE key lookup + if let Ok(Some(share_ctx)) = share_service.resolve_share_context(&query.token).await { + let keys_service = ctx.document_keys_service(); + if let Ok(Some(share_key)) = keys_service.get_share_key(share_ctx.share_id).await { + out.encrypted_dek = Some( + base64::engine::general_purpose::STANDARD.encode(&share_key.encrypted_dek), + ); + if let Some(salt) = share_key.salt { + out.salt = Some(base64::engine::general_purpose::STANDARD.encode(&salt)); + } + if let Some(kdf_params) = share_key.kdf_params { + out.kdf_params = serde_json::to_value(&kdf_params).ok(); + } + } + } + Ok(Json(out)) } +/// Get salt for password-protected share (for password challenge) +#[utoipa::path( + get, + path = "/api/shares/salt", + tag = "Sharing", + params(("token" = String, Query, description = "Share token")), + responses((status = 200, description = "Salt info for password-protected share", body = ShareSaltResponse)) +)] +pub async fn get_share_salt( + State(ctx): State, + Query(query): Query, +) -> Result, ApiError> { + let share_service = ctx.share_service(); + + // First validate that the share token exists + let share_ctx = share_service + .resolve_share_context(&query.token) + .await + .map_err(map_share_error)? + .ok_or(ApiError::not_found("not_found"))?; + + // Get share key info + let keys_service = ctx.document_keys_service(); + let share_key = keys_service + .get_share_key(share_ctx.share_id) + .await + .map_err(|e| { + tracing::error!(error = ?e, "failed_to_get_share_key"); + ApiError::not_found("not_found") + })?; + + match share_key { + Some(key) => { + let password_protected = key.salt.is_some(); + Ok(Json(ShareSaltResponse { + password_protected, + salt: key.salt.map(|s| base64::engine::general_purpose::STANDARD.encode(&s)), + kdf_params: key.kdf_params.and_then(|p| serde_json::to_value(&p).ok()), + })) + } + None => { + // No E2EE key stored, share is not encrypted + Ok(Json(ShareSaltResponse { + password_protected: false, + salt: None, + kdf_params: None, + })) + } + } +} + #[utoipa::path(get, path = "/api/shares/browse", tag = "Sharing", params(("token" = String, Query, description = "Share token")), responses((status = 200, description = "Share tree", body = ShareBrowseResponse)))] diff --git a/api/crates/presentation/src/http/documents/tagging/handlers/mod.rs b/api/crates/presentation/src/http/documents/tagging/handlers/mod.rs index 0ffbf918..d1d4dae8 100644 --- a/api/crates/presentation/src/http/documents/tagging/handlers/mod.rs +++ b/api/crates/presentation/src/http/documents/tagging/handlers/mod.rs @@ -1,35 +1,129 @@ use axum::{ Json, - extract::{Query, State}, + extract::{Path, Query, State}, }; +use base64::Engine; +use uuid::Uuid; use crate::context::DocumentsContext; use crate::http::error::ApiError; use crate::http::extractors::WorkspaceAuth; +use application::core::services::access; use application::core::services::errors::ServiceError; -use domain::access::permissions::PERM_DOC_VIEW; +use domain::access::permissions::{PERM_DOC_EDIT, PERM_DOC_VIEW}; -use super::types::TagItem; +use super::types::{ + DocumentTagEntry, DocumentTagsResponse, ListTagsResponse, TagEntry, TagSearchQuery, + UpdateDocumentTagsRequest, +}; fn map_tag_error(err: ServiceError) -> crate::http::error::ApiError { crate::http::error::map_service_error(err, "tag_service_error") } -#[utoipa::path(get, path = "/api/tags", tag = "Tags", - params(("q" = Option, Query, description = "Filter contains")), - responses((status = 200, body = [TagItem])))] +/// List all tags in the workspace (E2EE format) +#[utoipa::path( + get, + path = "/api/tags", + tag = "Tags", + params(("q" = Option, Query, description = "Base64 encoded encrypted tag for exact match filter")), + responses((status = 200, body = ListTagsResponse)) +)] pub async fn list_tags( State(ctx): State, auth: WorkspaceAuth, - q: Option>>, -) -> Result>, ApiError> { + Query(query): Query, +) -> Result, ApiError> { auth.ensure_permission(PERM_DOC_VIEW)?; - let filter = q.and_then(|Query(m)| m.get("q").cloned()); + + let service = ctx.tag_service(); + + // If filter is provided, decode and use it for exact match + let items = if let Some(q) = query.q { + let encrypted_tag = base64::engine::general_purpose::STANDARD + .decode(&q) + .map_err(|_| ApiError::bad_request("invalid_encrypted_tag_base64"))?; + service + .find_encrypted_tag(auth.workspace_id, encrypted_tag) + .await + .map_err(map_tag_error)? + } else { + service + .list_encrypted_tags(auth.workspace_id) + .await + .map_err(map_tag_error)? + }; + + let tags: Vec = items.into_iter().map(Into::into).collect(); + Ok(Json(ListTagsResponse { tags })) +} + +/// Get tags for a specific document (E2EE format) +#[utoipa::path( + get, + path = "/api/documents/{id}/tags", + tag = "Tags", + params(("id" = Uuid, Path, description = "Document ID")), + responses((status = 200, body = DocumentTagsResponse)) +)] +pub async fn get_document_tags( + State(ctx): State, + auth: WorkspaceAuth, + Path(id): Path, +) -> Result, ApiError> { + let actor = access::Actor::User(auth.user_id); + ctx.authorization() + .require_view(&actor, id) + .await + .map_err(|err| crate::http::error::map_service_error(err, "authorization_error"))?; + + let service = ctx.tag_service(); + let items = service + .list_document_encrypted_tags(id) + .await + .map_err(map_tag_error)?; + let tags: Vec = items.into_iter().map(Into::into).collect(); + Ok(Json(DocumentTagsResponse { tags })) +} + +/// Replace tags for a document (E2EE format) +#[utoipa::path( + put, + path = "/api/documents/{id}/tags", + tag = "Tags", + params(("id" = Uuid, Path, description = "Document ID")), + request_body = UpdateDocumentTagsRequest, + responses((status = 200, body = DocumentTagsResponse)) +)] +pub async fn update_document_tags( + State(ctx): State, + auth: WorkspaceAuth, + Path(id): Path, + Json(req): Json, +) -> Result, ApiError> { + auth.ensure_permission(PERM_DOC_EDIT)?; + let actor = access::Actor::User(auth.user_id); + ctx.authorization() + .require_edit(&actor, id) + .await + .map_err(|err| crate::http::error::map_service_error(err, "authorization_error"))?; + + // Decode Base64 encoded tags + let encrypted_tags: Vec> = req + .encrypted_tags + .iter() + .map(|t| { + base64::engine::general_purpose::STANDARD + .decode(&t.encrypted_name) + .map_err(|_| ApiError::bad_request("invalid_encrypted_tag_base64")) + }) + .collect::, _>>()?; + let service = ctx.tag_service(); let items = service - .list(auth.workspace_id, filter) + .replace_document_encrypted_tags(auth.workspace_id, id, encrypted_tags) .await .map_err(map_tag_error)?; - let out: Vec = items.into_iter().map(Into::into).collect(); - Ok(Json(out)) + let tags: Vec = items.into_iter().map(Into::into).collect(); + Ok(Json(DocumentTagsResponse { tags })) } diff --git a/api/crates/presentation/src/http/documents/tagging/mod.rs b/api/crates/presentation/src/http/documents/tagging/mod.rs index 575f2868..0913847e 100644 --- a/api/crates/presentation/src/http/documents/tagging/mod.rs +++ b/api/crates/presentation/src/http/documents/tagging/mod.rs @@ -5,7 +5,7 @@ use axum::{Router, routing::get}; use crate::context::AppContext; -pub use handlers::list_tags; +pub use handlers::{get_document_tags, list_tags, update_document_tags}; pub use types::*; pub mod openapi { @@ -13,5 +13,11 @@ pub mod openapi { } pub fn routes(ctx: AppContext) -> Router { - Router::new().route("/tags", get(list_tags)).with_state(ctx) + Router::new() + .route("/tags", get(list_tags)) + .route( + "/documents/:id/tags", + get(get_document_tags).put(update_document_tags), + ) + .with_state(ctx) } diff --git a/api/crates/presentation/src/http/documents/tagging/types.rs b/api/crates/presentation/src/http/documents/tagging/types.rs index 69bf44a6..4d549998 100644 --- a/api/crates/presentation/src/http/documents/tagging/types.rs +++ b/api/crates/presentation/src/http/documents/tagging/types.rs @@ -1,19 +1,84 @@ -use serde::Serialize; +use base64::Engine; +use serde::{Deserialize, Serialize}; use utoipa::ToSchema; +use uuid::Uuid; -use application::documents::dtos::TagItemDto; +use application::documents::dtos::{EncryptedTagEntryDto, EncryptedTagItemDto}; -#[derive(Serialize, ToSchema)] -pub struct TagItem { - pub name: String, - pub count: i64, +/// Tag entry in list response (E2EE format) +#[derive(Debug, Serialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct TagEntry { + /// Base64 encoded deterministically encrypted tag name + #[schema(value_type = String, format = "byte")] + pub encrypted_name: String, + pub document_count: i64, } -impl From for TagItem { - fn from(d: TagItemDto) -> Self { - TagItem { - name: d.name, - count: d.count, +impl From for TagEntry { + fn from(d: EncryptedTagItemDto) -> Self { + TagEntry { + encrypted_name: base64::engine::general_purpose::STANDARD.encode(&d.encrypted_tag), + document_count: d.count, } } } + +/// Response for GET /api/tags +#[derive(Debug, Serialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct ListTagsResponse { + pub tags: Vec, +} + +/// Tag entry in document tags response +#[derive(Debug, Serialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct DocumentTagEntry { + pub id: Uuid, + /// Base64 encoded deterministically encrypted tag name + #[schema(value_type = String, format = "byte")] + pub encrypted_name: String, + pub created_at: chrono::DateTime, +} + +impl From for DocumentTagEntry { + fn from(d: EncryptedTagEntryDto) -> Self { + DocumentTagEntry { + id: d.id, + encrypted_name: base64::engine::general_purpose::STANDARD.encode(&d.encrypted_tag), + created_at: d.created_at, + } + } +} + +/// Response for GET /api/documents/{id}/tags +#[derive(Debug, Serialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct DocumentTagsResponse { + pub tags: Vec, +} + +/// Single encrypted tag in request +#[derive(Debug, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct EncryptedTagInput { + /// Base64 encoded deterministically encrypted tag name + #[schema(value_type = String, format = "byte")] + pub encrypted_name: String, +} + +/// Request for PUT /api/documents/{id}/tags +#[derive(Debug, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct UpdateDocumentTagsRequest { + pub encrypted_tags: Vec, +} + +/// Query for tag search +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct TagSearchQuery { + /// Optional filter query (Base64 encoded encrypted tag for exact match) + pub q: Option, +} diff --git a/api/crates/presentation/src/http/documents/types.rs b/api/crates/presentation/src/http/documents/types.rs index 9c1e5436..3ffd63a3 100644 --- a/api/crates/presentation/src/http/documents/types.rs +++ b/api/crates/presentation/src/http/documents/types.rs @@ -4,7 +4,6 @@ use uuid::Uuid; use crate::http::error::ApiError; use application::core::services::errors::ServiceError; -use application::documents::dtos::DocumentDownloadFormat; use application::documents::dtos::{ DocumentListFilter, SnapshotDiffBaseMode, SnapshotDiffSideDto, SnapshotSummaryDto, }; @@ -32,9 +31,17 @@ pub struct Document { pub archived_at: Option>, pub archived_by: Option, pub archived_parent_id: Option, + // E2EE fields + #[serde(skip_serializing_if = "Option::is_none", rename = "encryptedTitle")] + #[schema(value_type = Option, format = "byte")] + pub encrypted_title: Option, + #[serde(skip_serializing_if = "Option::is_none", rename = "encryptedTitleNonce")] + #[schema(value_type = Option, format = "byte")] + pub encrypted_title_nonce: Option, } pub fn to_http_document(doc: domain::Document) -> Document { + use base64::Engine; Document { id: doc.id(), // NOTE: Older clients used `owner_id` to identify the workspace. @@ -53,6 +60,12 @@ pub fn to_http_document(doc: domain::Document) -> Document { archived_at: doc.archived_at(), archived_by: doc.archived_by(), archived_parent_id: doc.archived_parent_id(), + encrypted_title: doc + .encrypted_title() + .map(|b| base64::engine::general_purpose::STANDARD.encode(b)), + encrypted_title_nonce: doc + .encrypted_title_nonce() + .map(|b| base64::engine::general_purpose::STANDARD.encode(b)), } } @@ -76,6 +89,13 @@ pub struct SnapshotSummary { pub created_by: Option, pub byte_size: i64, pub content_hash: String, + // E2EE fields + #[serde(skip_serializing_if = "Option::is_none")] + #[schema(value_type = Option, format = "byte")] + pub nonce: Option, + #[serde(skip_serializing_if = "Option::is_none")] + #[schema(value_type = Option, format = "byte")] + pub signature: Option, } #[derive(Debug, Serialize, ToSchema)] @@ -128,7 +148,25 @@ pub struct SnapshotRestoreResponse { pub snapshot: SnapshotSummary, } +/// Response for GET /api/documents/{id}/snapshots/{snapshotId} +/// - For E2EE documents: content is encrypted, nonce is present +/// - For non-E2EE documents: content is plaintext Yjs state, nonce is None +#[derive(Debug, Serialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct SnapshotDetailResponse { + pub id: Uuid, + /// Base64 encoded Yjs snapshot (encrypted for E2EE, plaintext for non-E2EE) + #[schema(value_type = String, format = "byte")] + pub content: String, + /// Base64 encoded nonce (present for E2EE documents) + #[serde(skip_serializing_if = "Option::is_none")] + #[schema(value_type = Option, format = "byte")] + pub nonce: Option, + pub created_at: chrono::DateTime, +} + pub fn snapshot_summary_from(record: SnapshotSummaryDto) -> SnapshotSummary { + use base64::Engine; SnapshotSummary { id: record.id, document_id: record.document_id, @@ -139,6 +177,12 @@ pub fn snapshot_summary_from(record: SnapshotSummaryDto) -> SnapshotSummary { created_by: record.created_by, byte_size: record.byte_size, content_hash: record.content_hash, + nonce: record + .nonce + .map(|b| base64::engine::general_purpose::STANDARD.encode(&b)), + signature: record + .signature + .map(|b| base64::engine::general_purpose::STANDARD.encode(&b)), } } @@ -158,10 +202,55 @@ pub fn snapshot_diff_side_response_from(side: SnapshotDiffSideDto) -> SnapshotDi } #[derive(Debug, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] pub struct CreateDocumentRequest { pub title: Option, pub parent_id: Option, pub r#type: Option, + // E2EE fields + /// Base64 encoded encrypted title (for E2EE clients) + #[serde(default)] + #[schema(value_type = Option, format = "byte")] + pub encrypted_title: Option, + /// Base64 encoded nonce for encrypted title + #[serde(default)] + #[schema(value_type = Option, format = "byte")] + pub encrypted_title_nonce: Option, + /// Encrypted DEK for this document (optional, for E2EE clients) + #[serde(default)] + pub dek: Option, +} + +/// DEK payload for document creation +#[derive(Debug, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct CreateDocumentDekPayload { + /// Base64 encoded encrypted DEK + #[schema(value_type = String, format = "byte")] + pub encrypted_dek: String, + /// Base64 encoded nonce + #[schema(value_type = String, format = "byte")] + pub nonce: String, + /// Key version + #[serde(default = "default_key_version")] + pub key_version: i32, +} + +fn default_key_version() -> i32 { + 1 +} + +impl CreateDocumentDekPayload { + pub fn decode(&self) -> Result<(Vec, Vec, i32), &'static str> { + use base64::Engine; + let encrypted_dek = base64::engine::general_purpose::STANDARD + .decode(&self.encrypted_dek) + .map_err(|_| "invalid_encrypted_dek_base64")?; + let nonce = base64::engine::general_purpose::STANDARD + .decode(&self.nonce) + .map_err(|_| "invalid_nonce_base64")?; + Ok((encrypted_dek, nonce, self.key_version)) + } } #[derive(Debug, Deserialize, ToSchema)] @@ -219,7 +308,6 @@ where #[derive(Debug, Deserialize)] pub struct ListDocumentsQuery { - pub query: Option, pub tag: Option, #[serde(default)] pub state: Option, @@ -244,16 +332,47 @@ impl From for DocumentListFilter { } #[derive(Debug, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] pub struct UpdateDocumentContentRequest { + /// Document content (plaintext or Base64-encoded encrypted Yjs state for E2EE) pub content: String, + /// Base64 encoded nonce (required for E2EE content) + #[serde(default)] + #[schema(value_type = Option, format = "byte")] + pub nonce: Option, + /// Base64 encoded signature for integrity verification (optional for E2EE) + #[serde(default)] + #[schema(value_type = Option, format = "byte")] + pub signature: Option, } +/// Patch operation for document content. +/// For plaintext mode: use `text` field. +/// For E2EE mode: use `encrypted_data` and `nonce` fields instead of `text`. #[derive(Debug, Deserialize, ToSchema)] #[serde(tag = "op", rename_all = "snake_case")] pub enum DocumentPatchOperationRequest { Insert { offset: usize, - text: String, + /// Plaintext to insert (for non-E2EE documents) + #[serde(default)] + text: Option, + /// Base64 encoded encrypted data (for E2EE documents) + #[serde(default)] + #[schema(value_type = Option, format = "byte")] + encrypted_data: Option, + /// Base64 encoded nonce (required when encrypted_data is provided) + #[serde(default)] + #[schema(value_type = Option, format = "byte")] + nonce: Option, + /// Base64 encoded Ed25519 signature (for E2EE documents) + #[serde(default)] + #[schema(value_type = Option, format = "byte")] + signature: Option, + /// Base64 encoded Ed25519 public key (for E2EE documents) + #[serde(default)] + #[schema(value_type = Option, format = "byte")] + public_key: Option, }, Delete { offset: usize, @@ -262,158 +381,75 @@ pub enum DocumentPatchOperationRequest { Replace { offset: usize, length: usize, - text: String, + /// Plaintext replacement (for non-E2EE documents) + #[serde(default)] + text: Option, + /// Base64 encoded encrypted data (for E2EE documents) + #[serde(default)] + #[schema(value_type = Option, format = "byte")] + encrypted_data: Option, + /// Base64 encoded nonce (required when encrypted_data is provided) + #[serde(default)] + #[schema(value_type = Option, format = "byte")] + nonce: Option, + /// Base64 encoded Ed25519 signature (for E2EE documents) + #[serde(default)] + #[schema(value_type = Option, format = "byte")] + signature: Option, + /// Base64 encoded Ed25519 public key (for E2EE documents) + #[serde(default)] + #[schema(value_type = Option, format = "byte")] + public_key: Option, }, } -impl From for DocumentPatchOperation { - fn from(value: DocumentPatchOperationRequest) -> Self { - match value { - DocumentPatchOperationRequest::Insert { offset, text } => { - DocumentPatchOperation::Insert { offset, text } +impl DocumentPatchOperationRequest { + /// Check if this operation is for E2EE (has encrypted_data) + pub fn is_encrypted(&self) -> bool { + match self { + DocumentPatchOperationRequest::Insert { encrypted_data, .. } => encrypted_data.is_some(), + DocumentPatchOperationRequest::Delete { .. } => false, + DocumentPatchOperationRequest::Replace { encrypted_data, .. } => encrypted_data.is_some(), + } + } + + /// Convert to plaintext DocumentPatchOperation (for non-E2EE mode) + pub fn to_plaintext_operation(&self) -> Option { + match self { + DocumentPatchOperationRequest::Insert { offset, text, .. } => { + text.as_ref().map(|t| DocumentPatchOperation::Insert { + offset: *offset, + text: t.clone(), + }) } DocumentPatchOperationRequest::Delete { offset, length } => { - DocumentPatchOperation::Delete { offset, length } + Some(DocumentPatchOperation::Delete { + offset: *offset, + length: *length, + }) + } + DocumentPatchOperationRequest::Replace { offset, length, text, .. } => { + text.as_ref().map(|t| DocumentPatchOperation::Replace { + offset: *offset, + length: *length, + text: t.clone(), + }) } - DocumentPatchOperationRequest::Replace { - offset, - length, - text, - } => DocumentPatchOperation::Replace { - offset, - length, - text, - }, } } } #[derive(Debug, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] pub struct PatchDocumentContentRequest { + /// Patch operations. Each operation can be either plaintext (using `text` field) + /// or encrypted (using `encryptedData` and `nonce` fields). + #[serde(default)] pub operations: Vec, -} - -#[allow(dead_code)] -#[derive(ToSchema)] -pub struct DocumentDownloadBinary(#[schema(value_type = String, format = Binary)] pub Vec); - -#[allow(dead_code)] -#[derive(ToSchema)] -pub struct DocumentArchiveBinary(#[schema(value_type = String, format = Binary)] pub Vec); - -#[derive(Debug, Clone, Copy, Deserialize, ToSchema, Default)] -#[serde(rename_all = "snake_case")] -#[schema(rename_all = "snake_case")] -pub enum DownloadFormat { - #[default] - Archive, - Markdown, - Html, - Html5, - Pdf, - Docx, - Latex, - Beamer, - Context, - Man, - Mediawiki, - Dokuwiki, - Textile, - Org, - Texinfo, - Opml, - Docbook, - Opendocument, - Odt, - Rtf, - Epub, - Epub3, - Fb2, - Asciidoc, - Icml, - Slidy, - Slideous, - Dzslides, - Revealjs, - S5, - Json, - Plain, - Commonmark, - CommonmarkX, - MarkdownStrict, - MarkdownPhpextra, - MarkdownGithub, - Rst, - Native, - Haddock, -} - -impl From for DocumentDownloadFormat { - fn from(value: DownloadFormat) -> Self { - match value { - DownloadFormat::Archive => DocumentDownloadFormat::Archive, - DownloadFormat::Markdown => DocumentDownloadFormat::Markdown, - DownloadFormat::Html => DocumentDownloadFormat::Html, - DownloadFormat::Html5 => DocumentDownloadFormat::Html5, - DownloadFormat::Pdf => DocumentDownloadFormat::Pdf, - DownloadFormat::Docx => DocumentDownloadFormat::Docx, - DownloadFormat::Latex => DocumentDownloadFormat::Latex, - DownloadFormat::Beamer => DocumentDownloadFormat::Beamer, - DownloadFormat::Context => DocumentDownloadFormat::Context, - DownloadFormat::Man => DocumentDownloadFormat::Man, - DownloadFormat::Mediawiki => DocumentDownloadFormat::MediaWiki, - DownloadFormat::Dokuwiki => DocumentDownloadFormat::Dokuwiki, - DownloadFormat::Textile => DocumentDownloadFormat::Textile, - DownloadFormat::Org => DocumentDownloadFormat::Org, - DownloadFormat::Texinfo => DocumentDownloadFormat::Texinfo, - DownloadFormat::Opml => DocumentDownloadFormat::Opml, - DownloadFormat::Docbook => DocumentDownloadFormat::Docbook, - DownloadFormat::Opendocument => DocumentDownloadFormat::OpenDocument, - DownloadFormat::Odt => DocumentDownloadFormat::Odt, - DownloadFormat::Rtf => DocumentDownloadFormat::Rtf, - DownloadFormat::Epub => DocumentDownloadFormat::Epub, - DownloadFormat::Epub3 => DocumentDownloadFormat::Epub3, - DownloadFormat::Fb2 => DocumentDownloadFormat::Fb2, - DownloadFormat::Asciidoc => DocumentDownloadFormat::Asciidoc, - DownloadFormat::Icml => DocumentDownloadFormat::Icml, - DownloadFormat::Slidy => DocumentDownloadFormat::Slidy, - DownloadFormat::Slideous => DocumentDownloadFormat::Slideous, - DownloadFormat::Dzslides => DocumentDownloadFormat::Dzslides, - DownloadFormat::Revealjs => DocumentDownloadFormat::Revealjs, - DownloadFormat::S5 => DocumentDownloadFormat::S5, - DownloadFormat::Json => DocumentDownloadFormat::Json, - DownloadFormat::Plain => DocumentDownloadFormat::Plain, - DownloadFormat::Commonmark => DocumentDownloadFormat::Commonmark, - DownloadFormat::CommonmarkX => DocumentDownloadFormat::CommonmarkX, - DownloadFormat::MarkdownStrict => DocumentDownloadFormat::MarkdownStrict, - DownloadFormat::MarkdownPhpextra => DocumentDownloadFormat::MarkdownPhpextra, - DownloadFormat::MarkdownGithub => DocumentDownloadFormat::MarkdownGithub, - DownloadFormat::Rst => DocumentDownloadFormat::Rst, - DownloadFormat::Native => DocumentDownloadFormat::Native, - DownloadFormat::Haddock => DocumentDownloadFormat::Haddock, - } - } -} - -#[derive(Debug, Deserialize, ToSchema, Default)] -pub struct DownloadDocumentQuery { - pub token: Option, + /// Base64 encoded signature for integrity verification (optional for E2EE) #[serde(default)] - pub format: DownloadFormat, -} - -#[derive(Debug, Serialize, ToSchema)] -pub struct SearchResult { - pub id: Uuid, - pub title: String, - pub document_type: String, - pub path: Option, - pub updated_at: chrono::DateTime, -} - -#[derive(Debug, Deserialize)] -pub struct SearchQuery { - pub q: Option, + #[schema(value_type = Option, format = "byte")] + pub signature: Option, } #[derive(Debug, Default, Deserialize)] @@ -470,3 +506,45 @@ pub struct OutgoingLinksResponse { pub links: Vec, pub total_count: usize, } + +/// Encrypted update entry for E2EE documents +#[derive(Debug, Serialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct EncryptedUpdateEntry { + /// Sequence number of the update + pub seq: i64, + /// Base64 encoded encrypted update data + #[schema(value_type = String, format = "byte")] + pub data: String, + /// Base64 encoded nonce for decryption + #[serde(skip_serializing_if = "Option::is_none")] + #[schema(value_type = Option, format = "byte")] + pub nonce: Option, + /// Base64 encoded signature + #[serde(skip_serializing_if = "Option::is_none")] + #[schema(value_type = Option, format = "byte")] + pub signature: Option, + /// Base64 encoded public key of the signer + #[serde(skip_serializing_if = "Option::is_none")] + #[schema(value_type = Option, format = "byte")] + pub public_key: Option, +} + +/// Response for GET /api/documents/{id}/content (E2EE encrypted) +#[derive(Debug, Serialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct GetContentResponse { + /// Base64 encoded encrypted Yjs snapshot bytes + #[schema(value_type = String, format = "byte")] + pub content: String, + /// Base64 encoded nonce for decryption + #[serde(skip_serializing_if = "Option::is_none")] + #[schema(value_type = Option, format = "byte")] + pub nonce: Option, + /// Sequence number at which the snapshot was taken + #[serde(skip_serializing_if = "Option::is_none")] + pub seq_at_snapshot: Option, + /// Pending encrypted updates since the snapshot + #[serde(skip_serializing_if = "Option::is_none")] + pub updates: Option>, +} diff --git a/api/crates/presentation/src/http/git/config.rs b/api/crates/presentation/src/http/git/config.rs index 0d34963e..2a9f4587 100644 --- a/api/crates/presentation/src/http/git/config.rs +++ b/api/crates/presentation/src/http/git/config.rs @@ -1,4 +1,4 @@ -use axum::{Json, extract::State, http::StatusCode}; +use axum::{extract::State, http::StatusCode, Json}; use crate::context::GitContext; use crate::http::error::ApiError; @@ -8,9 +8,7 @@ use application::git::dtos::GitConfigDto; use application::git::dtos::UpsertGitConfigInput; use domain::access::permissions::{PERM_GIT_CONFIGURE, PERM_GIT_INIT, PERM_GIT_SYNC}; -use super::types::{ - CreateGitConfigRequest, GitConfigResponse, GitRemoteCheckResponse, map_git_error, -}; +use super::types::{CreateGitConfigRequest, GitConfigResponse, map_git_error}; #[utoipa::path(get, path = "/api/git/config", tag = "Git", responses((status = 200, body = Option)))] pub async fn get_config( @@ -25,16 +23,7 @@ pub async fn get_config( .get_config(auth.workspace_id) .await .map_err(map_git_error)?; - let mut out: Option = resp.map(Into::into); - if let Some(ref mut cfg) = out - && let Some(check) = service - .check_remote(auth.workspace_id) - .await - .map_err(map_git_error)? - { - cfg.remote_check = Some(GitRemoteCheckResponse::from(check)); - } - Ok(Json(out)) + Ok(Json(resp.map(Into::into))) } #[utoipa::path(post, path = "/api/git/config", tag = "Git", request_body = CreateGitConfigRequest, responses((status = 200, body = GitConfigResponse)))] @@ -55,15 +44,7 @@ pub async fn create_or_update_config( ServiceError::BadRequest(code) => ApiError::bad_request(code).with_message(code), other => map_git_error(other), })?; - let mut out: GitConfigResponse = resp.into(); - if let Some(check) = service - .check_remote(auth.workspace_id) - .await - .map_err(map_git_error)? - { - out.remote_check = Some(check.into()); - } - Ok(Json(out)) + Ok(Json(resp.into())) } #[utoipa::path(delete, path = "/api/git/config", tag = "Git", responses((status = 204, description = "Deleted")))] diff --git a/api/crates/presentation/src/http/git/ignore.rs b/api/crates/presentation/src/http/git/ignore.rs deleted file mode 100644 index 2a6ddd37..00000000 --- a/api/crates/presentation/src/http/git/ignore.rs +++ /dev/null @@ -1,90 +0,0 @@ -use axum::{Json, extract::State}; -use uuid::Uuid; - -use crate::context::GitContext; -use crate::http::error::ApiError; -use crate::http::extractors::WorkspaceUser; -use application::core::services::errors::ServiceError; - -use super::types::{ - AddPatternsRequest, CheckIgnoredRequest, GitignoreUpdateResponse, map_git_error, -}; - -#[utoipa::path(post, path = "/api/git/ignore/doc/{id}", params(("id" = String, Path, description = "Document ID")), tag = "Git", responses((status = 200, description = "OK")))] -pub async fn ignore_document( - State(ctx): State, - auth: WorkspaceUser, - axum::extract::Path(id): axum::extract::Path, -) -> Result, ApiError> { - let doc_id = Uuid::parse_str(&id).map_err(|_| ApiError::bad_request("invalid_document_id"))?; - let service = ctx.git_service(); - let res = service - .ignore_document(auth.workspace_id, doc_id) - .await - .map_err(|err| match err { - ServiceError::NotFound => ApiError::not_found("not_found"), - other => map_git_error(other), - })?; - Ok(Json(res.into())) -} - -#[utoipa::path(post, path = "/api/git/ignore/folder/{id}", params(("id" = String, Path, description = "Folder ID")), tag = "Git", responses((status = 200, description = "OK")))] -pub async fn ignore_folder( - State(ctx): State, - auth: WorkspaceUser, - axum::extract::Path(id): axum::extract::Path, -) -> Result, ApiError> { - let folder_id = Uuid::parse_str(&id).map_err(|_| ApiError::bad_request("invalid_folder_id"))?; - let service = ctx.git_service(); - let res = service - .ignore_folder(auth.workspace_id, folder_id) - .await - .map_err(|err| match err { - ServiceError::NotFound => ApiError::not_found("not_found"), - other => map_git_error(other), - })?; - Ok(Json(res.into())) -} - -#[utoipa::path(post, path = "/api/git/gitignore/patterns", tag = "Git", request_body = AddPatternsRequest, responses((status = 200, description = "OK")))] -pub async fn add_gitignore_patterns( - State(ctx): State, - auth: WorkspaceUser, - Json(req): Json, -) -> Result, ApiError> { - let service = ctx.git_service(); - let added = service - .add_gitignore_patterns(auth.workspace_id, req.patterns) - .await - .map_err(map_git_error)?; - Ok(Json(serde_json::json!({"added": added}))) -} - -#[utoipa::path(get, path = "/api/git/gitignore/patterns", tag = "Git", responses((status = 200, description = "OK")))] -pub async fn get_gitignore_patterns( - State(ctx): State, - auth: WorkspaceUser, -) -> Result, ApiError> { - let service = ctx.git_service(); - let patterns = service - .get_gitignore_patterns(auth.workspace_id) - .await - .map_err(map_git_error)?; - Ok(Json(serde_json::json!({"patterns": patterns}))) -} - -#[utoipa::path(post, path = "/api/git/gitignore/check", tag = "Git", request_body = CheckIgnoredRequest, responses((status = 200, description = "OK")))] -pub async fn check_path_ignored( - State(ctx): State, - auth: WorkspaceUser, - Json(req): Json, -) -> Result, ApiError> { - let service = ctx.git_service(); - let is_ignored = service - .check_path_ignored(auth.workspace_id, &req.path) - .await - .map_err(map_git_error)?; - Ok(Json( - serde_json::json!({"path": req.path, "is_ignored": is_ignored}), - )) -} diff --git a/api/crates/presentation/src/http/git/mod.rs b/api/crates/presentation/src/http/git/mod.rs index 836a129e..31436e5f 100644 --- a/api/crates/presentation/src/http/git/mod.rs +++ b/api/crates/presentation/src/http/git/mod.rs @@ -1,36 +1,20 @@ mod config; -mod ignore; -mod pull; -mod status; -mod sync; +mod proxy; +mod ssh_tunnel; pub mod types; use axum::{ Router, - routing::{get, post}, + routing::{get, options, post}, }; use crate::context::AppContext; pub use config::{create_or_update_config, delete_config, get_config}; -pub use ignore::{ - add_gitignore_patterns, check_path_ignored, get_gitignore_patterns, ignore_document, - ignore_folder, -}; -pub use pull::{ - finalize_pull_session, get_pull_session, pull_repository, resolve_pull_session, - start_pull_session, -}; -pub use status::{get_changes, get_commit_diff, get_history, get_status, get_working_diff}; -pub use sync::{deinit_repository, import_repository, init_repository, sync_now}; pub use types::*; pub mod openapi { pub use super::config::*; - pub use super::ignore::*; - pub use super::pull::*; - pub use super::status::*; - pub use super::sync::*; } pub fn routes(ctx: AppContext) -> Router { @@ -41,29 +25,14 @@ pub fn routes(ctx: AppContext) -> Router { .post(create_or_update_config) .delete(delete_config), ) - .route("/git/status", get(get_status)) - .route("/git/changes", get(get_changes)) - .route("/git/history", get(get_history)) - .route("/git/diff/working", get(get_working_diff)) - .route("/git/diff/commits/:from/:to", get(get_commit_diff)) - .route("/git/sync", post(sync_now)) - .route("/git/import", post(import_repository)) - .route("/git/pull", post(pull_repository)) - .route("/git/pull/start", post(start_pull_session)) - .route("/git/pull/session/:id", get(get_pull_session)) - .route("/git/pull/session/:id/resolve", post(resolve_pull_session)) - .route( - "/git/pull/session/:id/finalize", - post(finalize_pull_session), - ) - .route("/git/init", post(init_repository)) - .route("/git/deinit", post(deinit_repository)) - .route("/git/ignore/doc/:id", post(ignore_document)) - .route("/git/ignore/folder/:id", post(ignore_folder)) + // Git HTTPS proxy for isomorphic-git .route( - "/git/gitignore/patterns", - get(get_gitignore_patterns).post(add_gitignore_patterns), + "/git/proxy/https/*path", + options(proxy::proxy_git_https_options) + .get(proxy::proxy_git_https) + .post(proxy::proxy_git_https), ) - .route("/git/gitignore/check", post(check_path_ignored)) + // Git SSH tunnel for isomorphic-git + .route("/git/proxy/ssh", post(ssh_tunnel::tunnel_git_ssh)) .with_state(ctx) } diff --git a/api/crates/presentation/src/http/git/proxy.rs b/api/crates/presentation/src/http/git/proxy.rs new file mode 100644 index 00000000..407a502b --- /dev/null +++ b/api/crates/presentation/src/http/git/proxy.rs @@ -0,0 +1,156 @@ +use axum::{ + extract::{Path, Query}, + http::{HeaderMap, HeaderValue, Method, StatusCode}, + response::{IntoResponse, Response}, +}; +use bytes::Bytes; +use std::collections::HashMap; + +/// Git HTTP protocol CORS proxy +/// Forwards requests from isomorphic-git to remote Git servers +/// Backend does not process Git operations, only relays network requests + +/// HTTPS Git proxy - forwards GET/POST requests to remote +pub async fn proxy_git_https( + method: Method, + Path(remote_path): Path, + Query(query): Query>, + headers: HeaderMap, + body: Bytes, +) -> Result { + // Build remote URL with query string + let remote_url = if query.is_empty() { + format!("https://{}", remote_path) + } else { + let query_string: String = query + .iter() + .map(|(k, v)| format!("{}={}", k, v)) + .collect::>() + .join("&"); + format!("https://{}?{}", remote_path, query_string) + }; + + // Security: only allow known Git hosting providers + let allowed_hosts = ["github.com", "gitlab.com", "bitbucket.org"]; + let host = remote_path + .split('/') + .next() + .ok_or(ProxyError::InvalidUrl)?; + + if !allowed_hosts + .iter() + .any(|h| host == *h || host.ends_with(&format!(".{}", h))) + { + return Err(ProxyError::HostNotAllowed); + } + + let client = reqwest::Client::builder() + .user_agent("RefMD-Git-Proxy/1.0") + .build() + .map_err(|_| ProxyError::ClientError)?; + + let mut req_builder = match method { + Method::GET => client.get(&remote_url), + Method::POST => client.post(&remote_url), + _ => return Err(ProxyError::MethodNotAllowed), + }; + + // Forward necessary headers + if let Some(auth) = headers.get("authorization") { + req_builder = req_builder.header("Authorization", auth.to_str().unwrap_or("")); + } + if let Some(ct) = headers.get("content-type") { + req_builder = req_builder.header("Content-Type", ct.to_str().unwrap_or("")); + } + if let Some(git_protocol) = headers.get("git-protocol") { + req_builder = req_builder.header("Git-Protocol", git_protocol.to_str().unwrap_or("")); + } + + if !body.is_empty() { + req_builder = req_builder.body(body.to_vec()); + } + + let response = req_builder + .send() + .await + .map_err(|_| ProxyError::UpstreamError)?; + + let status = StatusCode::from_u16(response.status().as_u16()) + .unwrap_or(StatusCode::INTERNAL_SERVER_ERROR); + + let mut response_headers = HeaderMap::new(); + + if let Some(ct) = response.headers().get("content-type") { + response_headers.insert("content-type", ct.clone()); + } + if let Some(cl) = response.headers().get("content-length") { + response_headers.insert("content-length", cl.clone()); + } + if let Some(cache) = response.headers().get("cache-control") { + response_headers.insert("cache-control", cache.clone()); + } + + // Add CORS headers + response_headers.insert( + "access-control-allow-origin", + HeaderValue::from_static("*"), + ); + response_headers.insert( + "access-control-allow-methods", + HeaderValue::from_static("GET, POST, OPTIONS"), + ); + response_headers.insert( + "access-control-allow-headers", + HeaderValue::from_static("Authorization, Content-Type, Git-Protocol"), + ); + + let body_bytes = response + .bytes() + .await + .map_err(|_| ProxyError::UpstreamError)?; + + Ok((status, response_headers, body_bytes).into_response()) +} + +/// CORS preflight request handler +pub async fn proxy_git_https_options() -> impl IntoResponse { + let mut headers = HeaderMap::new(); + headers.insert( + "access-control-allow-origin", + HeaderValue::from_static("*"), + ); + headers.insert( + "access-control-allow-methods", + HeaderValue::from_static("GET, POST, OPTIONS"), + ); + headers.insert( + "access-control-allow-headers", + HeaderValue::from_static("Authorization, Content-Type, Git-Protocol"), + ); + headers.insert("access-control-max-age", HeaderValue::from_static("86400")); + + (StatusCode::NO_CONTENT, headers) +} + +#[derive(Debug)] +pub enum ProxyError { + InvalidUrl, + HostNotAllowed, + MethodNotAllowed, + ClientError, + UpstreamError, +} + +impl IntoResponse for ProxyError { + fn into_response(self) -> Response { + let (status, message) = match self { + ProxyError::InvalidUrl => (StatusCode::BAD_REQUEST, "Invalid URL"), + ProxyError::HostNotAllowed => (StatusCode::FORBIDDEN, "Host not allowed"), + ProxyError::MethodNotAllowed => (StatusCode::METHOD_NOT_ALLOWED, "Method not allowed"), + ProxyError::ClientError => (StatusCode::INTERNAL_SERVER_ERROR, "Client error"), + ProxyError::UpstreamError => (StatusCode::BAD_GATEWAY, "Upstream error"), + }; + + (status, message).into_response() + } +} diff --git a/api/crates/presentation/src/http/git/pull.rs b/api/crates/presentation/src/http/git/pull.rs deleted file mode 100644 index 9074c302..00000000 --- a/api/crates/presentation/src/http/git/pull.rs +++ /dev/null @@ -1,415 +0,0 @@ -use axum::{Json, extract::State, http::StatusCode}; -use uuid::Uuid; - -use crate::context::GitContext; -use crate::http::error::ApiError; -use crate::http::extractors::WorkspaceAuth; -use application::core::services::errors::ServiceError; -use application::git::dtos::{GitPullRequestDto, GitPullResolutionDto}; -use application::git::services::FinalizePullSessionResult; -use domain::access::permissions::PERM_GIT_SYNC; -use domain::git::pull_session::GitPullSessionStatus; - -use super::types::{ - GitPullConflictItem, GitPullRequest, GitPullResolution, GitPullResponse, - GitPullSessionResponse, map_git_error, -}; - -#[utoipa::path( - post, - path = "/api/git/pull", - tag = "Git", - request_body = GitPullRequest, - responses( - (status = 200, body = GitPullResponse), - (status = 409, body = GitPullResponse, description = "Conflicts detected") - ) -)] -pub async fn pull_repository( - State(ctx): State, - auth: WorkspaceAuth, - Json(req): Json, -) -> Result<(StatusCode, Json), ApiError> { - auth.ensure_permission(PERM_GIT_SYNC)?; - let service = ctx.git_service(); - let dto = service - .pull_repository( - auth.workspace_id, - auth.user_id, - GitPullRequestDto { - resolutions: req - .resolutions - .unwrap_or_default() - .into_iter() - .map(|r| GitPullResolutionDto { - path: r.path, - choice: r.choice, - content: r.content, - }) - .collect(), - }, - ) - .await - .map_err(|err| { - let message = match &err { - ServiceError::BadRequest("workspace_has_pending_changes") => { - "Workspace has pending changes. Commit, sync, or discard them before pulling." - .to_string() - } - _ => err.to_string(), - }; - let status = map_git_error(err).status(); - let body = GitPullResponse { - success: false, - message, - files_changed: 0, - commit_hash: None, - conflicts: None, - git_status: None, - }; - (status, body) - }); - let dto = match dto { - Ok(v) => v, - Err((status, body)) => return Ok((status, Json(body))), - }; - let conflicts = dto - .conflicts - .map(|items| items.into_iter().map(Into::into).collect::>()) - .unwrap_or_default(); - let has_conflicts = !conflicts.is_empty(); - let status = if has_conflicts { - StatusCode::CONFLICT - } else { - StatusCode::OK - }; - Ok(( - status, - Json(GitPullResponse { - success: dto.success, - message: dto.message, - files_changed: dto.files_changed as i32, - commit_hash: dto.commit_hash, - conflicts: if has_conflicts { Some(conflicts) } else { None }, - git_status: None, - }), - )) -} - -#[utoipa::path( - post, - path = "/api/git/pull/start", - tag = "Git", - responses( - (status = 200, body = GitPullSessionResponse), - (status = 400, body = GitPullSessionResponse), - (status = 409, body = GitPullSessionResponse, description = "Conflicts detected") - ) -)] -pub async fn start_pull_session( - State(ctx): State, - auth: WorkspaceAuth, -) -> Result<(StatusCode, Json), ApiError> { - auth.ensure_permission(PERM_GIT_SYNC)?; - - let service = ctx.git_service(); - let session = match service - .start_pull_session_flow(auth.workspace_id, auth.user_id) - .await - { - Ok(v) => v, - Err(err) => { - let message = match &err { - ServiceError::BadRequest("workspace_has_pending_changes") => { - "Workspace has pending changes. Commit, sync, or discard them before pulling." - .to_string() - } - other => other.to_string(), - }; - let status = map_git_error(err).status(); - return Ok(( - status, - Json(GitPullSessionResponse { - session_id: Uuid::nil(), - status: "error".to_string(), - conflicts: Vec::new(), - resolutions: Vec::new(), - message: Some(message), - }), - )); - } - }; - if session.status == GitPullSessionStatus::Error { - return Ok(( - StatusCode::BAD_REQUEST, - Json(GitPullSessionResponse { - session_id: session.id, - status: session.status.as_str().to_string(), - conflicts: Vec::new(), - resolutions: Vec::new(), - message: session.message, - }), - )); - } - let conflicts = session - .conflicts - .clone() - .into_iter() - .map(Into::into) - .collect::>(); - let has_conflicts = !conflicts.is_empty(); - let status = if has_conflicts { - StatusCode::CONFLICT - } else { - StatusCode::OK - }; - Ok(( - status, - Json(GitPullSessionResponse { - session_id: session.id, - status: session.status.as_str().to_string(), - conflicts, - resolutions: Vec::new(), - message: session.message, - }), - )) -} - -#[utoipa::path( - get, - path = "/api/git/pull/session/{id}", - tag = "Git", - responses((status = 200, body = GitPullSessionResponse)) -)] -pub async fn get_pull_session( - State(ctx): State, - auth: WorkspaceAuth, - axum::extract::Path(id): axum::extract::Path, -) -> Result, ApiError> { - auth.ensure_permission(PERM_GIT_SYNC)?; - - let service = ctx.git_service(); - let state = service - .load_pull_session_with_stale_check(auth.workspace_id, id) - .await - .map_err(map_git_error)? - .ok_or(ApiError::not_found("not_found"))?; - Ok(Json(GitPullSessionResponse { - session_id: state.id, - status: state.status.as_str().to_string(), - conflicts: state.conflicts.into_iter().map(Into::into).collect(), - resolutions: state - .resolutions - .into_iter() - .map(|r| GitPullResolution { - path: r.path, - choice: r.choice, - content: r.content, - }) - .collect(), - message: state.message, - })) -} - -#[utoipa::path( - post, - path = "/api/git/pull/session/{id}/resolve", - tag = "Git", - request_body = GitPullRequest, - responses( - (status = 200, body = GitPullSessionResponse), - (status = 400, body = GitPullSessionResponse), - (status = 409, body = GitPullSessionResponse) - ) -)] -pub async fn resolve_pull_session( - State(ctx): State, - auth: WorkspaceAuth, - axum::extract::Path(id): axum::extract::Path, - Json(req): Json, -) -> Result<(StatusCode, Json), ApiError> { - auth.ensure_permission(PERM_GIT_SYNC)?; - - let service = ctx.git_service(); - let existing_session = service - .load_pull_session_with_stale_check(auth.workspace_id, id) - .await - .map_err(map_git_error)? - .ok_or(ApiError::not_found("not_found"))?; - let resolutions = req.resolutions.unwrap_or_default(); - let session = match service - .resolve_pull_session_flow( - auth.workspace_id, - auth.user_id, - id, - resolutions - .iter() - .cloned() - .map(|r| GitPullResolutionDto { - path: r.path, - choice: r.choice, - content: r.content, - }) - .collect(), - ) - .await - { - Ok(v) => v, - Err(err) => { - let message = match &err { - ServiceError::BadRequest("workspace_has_pending_changes") => { - "Workspace has pending changes. Commit, sync, or discard them before pulling." - .to_string() - } - other => other.to_string(), - }; - let status = map_git_error(err).status(); - return Ok(( - status, - Json(GitPullSessionResponse { - session_id: id, - status: "error".to_string(), - conflicts: existing_session - .conflicts - .into_iter() - .map(Into::into) - .collect(), - resolutions: existing_session - .resolutions - .into_iter() - .map(|r| GitPullResolution { - path: r.path, - choice: r.choice, - content: r.content, - }) - .collect(), - message: Some(message), - }), - )); - } - }; - - let mut status_code = StatusCode::OK; - - let conflicts: Vec = session - .conflicts - .clone() - .into_iter() - .map(Into::into) - .collect(); - if !conflicts.is_empty() { - status_code = StatusCode::CONFLICT; - } - if session.status == GitPullSessionStatus::Stale { - status_code = StatusCode::CONFLICT; - } - if session.status == GitPullSessionStatus::Error { - status_code = StatusCode::BAD_REQUEST; - } - let session_status = session.status; - - Ok(( - status_code, - Json(GitPullSessionResponse { - session_id: id, - status: session_status.as_str().to_string(), - conflicts, - resolutions, - message: if session_status == GitPullSessionStatus::Error { - session.message - } else if status_code == StatusCode::CONFLICT - && session_status == GitPullSessionStatus::Stale - { - Some("Pull session is stale. Please start a new pull.".to_string()) - } else { - session.message - }, - }), - )) -} - -#[utoipa::path( - post, - path = "/api/git/pull/session/{id}/finalize", - tag = "Git", - responses( - (status = 200, body = GitPullResponse), - (status = 400, body = GitPullResponse), - (status = 409, body = GitPullResponse) - ) -)] -pub async fn finalize_pull_session( - State(ctx): State, - auth: WorkspaceAuth, - axum::extract::Path(id): axum::extract::Path, -) -> Result<(StatusCode, Json), ApiError> { - auth.ensure_permission(PERM_GIT_SYNC)?; - - let service = ctx.git_service(); - let FinalizePullSessionResult { - session, - git_status, - } = service - .finalize_pull_session_flow(auth.workspace_id, id) - .await - .map_err(map_git_error)?; - if session.status == GitPullSessionStatus::Error { - return Ok(( - StatusCode::BAD_REQUEST, - Json(GitPullResponse { - success: false, - message: session - .message - .clone() - .unwrap_or_else(|| "pull failed".to_string()), - files_changed: 0, - commit_hash: None, - conflicts: Some(session.conflicts.into_iter().map(Into::into).collect()), - git_status: None, - }), - )); - } - if session.status == GitPullSessionStatus::Stale { - return Ok(( - StatusCode::CONFLICT, - Json(GitPullResponse { - success: false, - message: session - .message - .clone() - .unwrap_or_else(|| "pull session stale".to_string()), - files_changed: 0, - commit_hash: None, - conflicts: Some(session.conflicts.into_iter().map(Into::into).collect()), - git_status: None, - }), - )); - } - if !session.conflicts.is_empty() { - return Ok(( - StatusCode::CONFLICT, - Json(GitPullResponse { - success: false, - message: "conflicts remaining".to_string(), - files_changed: 0, - commit_hash: None, - conflicts: Some(session.conflicts.into_iter().map(Into::into).collect()), - git_status: None, - }), - )); - } - Ok(( - StatusCode::OK, - Json(GitPullResponse { - success: true, - message: session - .message - .clone() - .unwrap_or_else(|| "merge completed".to_string()), - files_changed: 0, - commit_hash: None, - conflicts: None, - git_status: git_status.map(Into::into), - }), - )) -} diff --git a/api/crates/presentation/src/http/git/ssh_tunnel.rs b/api/crates/presentation/src/http/git/ssh_tunnel.rs new file mode 100644 index 00000000..dff829d4 --- /dev/null +++ b/api/crates/presentation/src/http/git/ssh_tunnel.rs @@ -0,0 +1,158 @@ +use axum::{ + extract::State, + http::StatusCode, + response::{IntoResponse, Response}, + Json, +}; +use serde::{Deserialize, Serialize}; +use ssh2::Session; +use std::io::{Read, Write}; +use std::net::TcpStream; + +use crate::context::AppContext; + +/// SSH tunnel for Git protocol +/// Relays Git protocol data over SSH to remote servers +/// Used for isomorphic-git SSH support + +#[derive(Debug, Deserialize)] +pub struct SshTunnelRequest { + /// Git host (e.g., "github.com") + pub host: String, + /// Repository path (e.g., "user/repo.git") + pub repo: String, + /// Git service: "git-upload-pack" (fetch/clone) or "git-receive-pack" (push) + pub service: String, + /// SSH private key (PEM format) + pub private_key: String, + /// Optional passphrase for encrypted private key + pub passphrase: Option, + /// Git protocol data to send + pub data: Vec, +} + +#[derive(Debug, Serialize)] +pub struct SshTunnelResponse { + /// Response data from Git server + pub data: Vec, +} + +pub async fn tunnel_git_ssh( + State(_ctx): State, + Json(req): Json, +) -> Result, SshTunnelError> { + // Validate service + if req.service != "git-upload-pack" && req.service != "git-receive-pack" { + return Err(SshTunnelError::InvalidService); + } + + // Security: only allow known Git hosting providers + let allowed_hosts = ["github.com", "gitlab.com", "bitbucket.org"]; + if !allowed_hosts.contains(&req.host.as_str()) { + return Err(SshTunnelError::HostNotAllowed); + } + + // Run SSH operation in blocking task + let result = tokio::task::spawn_blocking(move || { + execute_ssh_tunnel(req) + }) + .await + .map_err(|_| SshTunnelError::InternalError)??; + + Ok(Json(result)) +} + +fn execute_ssh_tunnel(req: SshTunnelRequest) -> Result { + // Connect to SSH server + let tcp = TcpStream::connect(format!("{}:22", req.host)) + .map_err(|_| SshTunnelError::ConnectionFailed)?; + + let mut session = Session::new().map_err(|_| SshTunnelError::SessionError)?; + session.set_tcp_stream(tcp); + session + .handshake() + .map_err(|_| SshTunnelError::HandshakeFailed)?; + + // Authenticate with private key + let passphrase = req.passphrase.as_deref(); + session + .userauth_pubkey_memory("git", None, &req.private_key, passphrase) + .map_err(|_| SshTunnelError::AuthFailed)?; + + if !session.authenticated() { + return Err(SshTunnelError::AuthFailed); + } + + // Execute Git command + let command = format!("{} '{}'", req.service, req.repo); + let mut channel = session + .channel_session() + .map_err(|_| SshTunnelError::ChannelError)?; + + channel + .exec(&command) + .map_err(|_| SshTunnelError::ExecFailed)?; + + // Send Git protocol data + if !req.data.is_empty() { + channel + .write_all(&req.data) + .map_err(|_| SshTunnelError::WriteError)?; + channel.flush().map_err(|_| SshTunnelError::WriteError)?; + } + + // Signal end of input + channel + .send_eof() + .map_err(|_| SshTunnelError::WriteError)?; + + // Read response + let mut response_data = Vec::new(); + channel + .read_to_end(&mut response_data) + .map_err(|_| SshTunnelError::ReadError)?; + + // Wait for channel to close + channel + .wait_close() + .map_err(|_| SshTunnelError::ChannelError)?; + + Ok(SshTunnelResponse { + data: response_data, + }) +} + +#[derive(Debug)] +pub enum SshTunnelError { + InvalidService, + HostNotAllowed, + ConnectionFailed, + SessionError, + HandshakeFailed, + AuthFailed, + ChannelError, + ExecFailed, + WriteError, + ReadError, + InternalError, +} + +impl IntoResponse for SshTunnelError { + fn into_response(self) -> Response { + let (status, message) = match self { + SshTunnelError::InvalidService => (StatusCode::BAD_REQUEST, "Invalid Git service"), + SshTunnelError::HostNotAllowed => (StatusCode::FORBIDDEN, "Host not allowed"), + SshTunnelError::ConnectionFailed => (StatusCode::BAD_GATEWAY, "Connection failed"), + SshTunnelError::SessionError => (StatusCode::INTERNAL_SERVER_ERROR, "Session error"), + SshTunnelError::HandshakeFailed => (StatusCode::BAD_GATEWAY, "SSH handshake failed"), + SshTunnelError::AuthFailed => (StatusCode::UNAUTHORIZED, "SSH authentication failed"), + SshTunnelError::ChannelError => (StatusCode::INTERNAL_SERVER_ERROR, "Channel error"), + SshTunnelError::ExecFailed => (StatusCode::BAD_GATEWAY, "Command execution failed"), + SshTunnelError::WriteError => (StatusCode::BAD_GATEWAY, "Write error"), + SshTunnelError::ReadError => (StatusCode::BAD_GATEWAY, "Read error"), + SshTunnelError::InternalError => (StatusCode::INTERNAL_SERVER_ERROR, "Internal error"), + }; + + (status, message).into_response() + } +} diff --git a/api/crates/presentation/src/http/git/status.rs b/api/crates/presentation/src/http/git/status.rs deleted file mode 100644 index 9babbe87..00000000 --- a/api/crates/presentation/src/http/git/status.rs +++ /dev/null @@ -1,89 +0,0 @@ -use axum::{Json, extract::State}; - -use crate::context::GitContext; -use crate::http::error::ApiError; -use crate::http::extractors::WorkspaceUser; -use application::git::dtos::{GitCommitInfo, GitStatusDto}; -use contracts::core::dtos::TextDiffResult; - -use super::types::{GitChangesResponse, GitHistoryResponse, GitStatus, map_git_error}; - -#[utoipa::path(get, path = "/api/git/status", tag = "Git", responses((status = 200, body = GitStatus)))] -pub async fn get_status( - State(ctx): State, - auth: WorkspaceUser, -) -> Result, ApiError> { - let service = ctx.git_service(); - let dto: GitStatusDto = service - .get_status(auth.workspace_id) - .await - .map_err(map_git_error)?; - let out: GitStatus = dto.into(); - Ok(Json(out)) -} - -#[utoipa::path(get, path = "/api/git/changes", tag = "Git", responses((status = 200, body = GitChangesResponse)))] -pub async fn get_changes( - State(ctx): State, - auth: WorkspaceUser, -) -> Result, ApiError> { - let service = ctx.git_service(); - let files = service - .get_changes(auth.workspace_id) - .await - .map_err(map_git_error)?; - let items = files.into_iter().map(Into::into).collect(); - Ok(Json(GitChangesResponse { files: items })) -} - -#[utoipa::path(get, path = "/api/git/history", tag = "Git", responses((status = 200, body = GitHistoryResponse)))] -pub async fn get_history( - State(ctx): State, - auth: WorkspaceUser, -) -> Result, ApiError> { - let service = ctx.git_service(); - let commits: Vec = service - .get_history(auth.workspace_id) - .await - .map_err(map_git_error)?; - let out = commits.into_iter().map(Into::into).collect(); - Ok(Json(GitHistoryResponse { commits: out })) -} - -#[utoipa::path( - get, - path = "/api/git/diff/working", - tag = "Git", - responses((status = 200, body = [TextDiffResult])) -)] -pub async fn get_working_diff( - State(ctx): State, - auth: WorkspaceUser, -) -> Result>, ApiError> { - let service = ctx.git_service(); - let diffs = service - .get_working_diff(auth.workspace_id) - .await - .map_err(map_git_error)?; - Ok(Json(diffs)) -} - -#[utoipa::path( - get, - path = "/api/git/diff/commits/{from}/{to}", - params(("from" = String, Path, description = "From"), ("to" = String, Path, description = "To")), - tag = "Git", - responses((status = 200, body = [TextDiffResult])) -)] -pub async fn get_commit_diff( - State(ctx): State, - auth: WorkspaceUser, - axum::extract::Path((from, to)): axum::extract::Path<(String, String)>, -) -> Result>, ApiError> { - let service = ctx.git_service(); - let diffs = service - .get_commit_diff(auth.workspace_id, &from, &to) - .await - .map_err(map_git_error)?; - Ok(Json(diffs)) -} diff --git a/api/crates/presentation/src/http/git/sync.rs b/api/crates/presentation/src/http/git/sync.rs deleted file mode 100644 index 9d0b2857..00000000 --- a/api/crates/presentation/src/http/git/sync.rs +++ /dev/null @@ -1,100 +0,0 @@ -use axum::{Json, extract::State}; - -use crate::context::GitContext; -use crate::http::error::ApiError; -use crate::http::extractors::{WorkspaceAuth, WorkspaceUser}; -use application::git::dtos::{GitSyncRequestDto, UpsertGitConfigInput}; -use domain::access::permissions::PERM_GIT_INIT; - -use super::types::{ - CreateGitConfigRequest, GitImportResponse, GitSyncRequest, GitSyncResponse, map_git_error, -}; - -#[utoipa::path(post, path = "/api/git/sync", tag = "Git", request_body = GitSyncRequest, responses((status = 200, body = GitSyncResponse), (status = 409, description = "Conflicts during rebase/pull")))] -pub async fn sync_now( - State(ctx): State, - auth: WorkspaceUser, - Json(req): Json, -) -> Result, ApiError> { - let service = ctx.git_service(); - let out = service - .sync_now( - auth.workspace_id, - GitSyncRequestDto { - message: req.message.clone(), - force: req.force, - full_scan: req.full_scan, - skip_push: req.skip_push, - }, - ) - .await - .map_err(map_git_error)?; - Ok(Json(GitSyncResponse { - success: out.success, - message: out.message, - commit_hash: out.commit_hash, - files_changed: out.files_changed, - })) -} - -#[utoipa::path( - post, - path = "/api/git/import", - tag = "Git", - request_body = CreateGitConfigRequest, - responses((status = 200, body = GitImportResponse)) -)] -pub async fn import_repository( - State(ctx): State, - auth: WorkspaceAuth, - Json(req): Json, -) -> Result, ApiError> { - if req.repository_url.trim().is_empty() { - return Err(ApiError::bad_request("invalid_repository_url")); - } - auth.ensure_permission(PERM_GIT_INIT)?; - - let service = ctx.git_service(); - let dto = service - .import_repository( - auth.workspace_id, - auth.user_id, - &UpsertGitConfigInput::from(req), - ) - .await - .map_err(map_git_error)?; - Ok(Json(GitImportResponse { - success: true, - message: dto.message, - files_changed: dto.files_changed as i32, - commit_hash: dto.commit_hash, - docs_created: dto.docs_created as i32, - attachments_created: dto.attachments_created as i32, - })) -} - -#[utoipa::path(post, path = "/api/git/init", tag = "Git", responses((status = 200, description = "OK")))] -pub async fn init_repository( - State(ctx): State, - auth: WorkspaceUser, -) -> Result, ApiError> { - let service = ctx.git_service(); - service - .init_repository(auth.workspace_id) - .await - .map_err(map_git_error)?; - Ok(Json(serde_json::json!({"success":true}))) -} - -#[utoipa::path(post, path = "/api/git/deinit", tag = "Git", responses((status = 200, description = "OK")))] -pub async fn deinit_repository( - State(ctx): State, - auth: WorkspaceUser, -) -> Result, ApiError> { - let service = ctx.git_service(); - service - .deinit_repository(auth.workspace_id) - .await - .map_err(map_git_error)?; - Ok(Json(serde_json::json!({"success":true}))) -} diff --git a/api/crates/presentation/src/http/git/types.rs b/api/crates/presentation/src/http/git/types.rs index d79e1513..b5f8ab2d 100644 --- a/api/crates/presentation/src/http/git/types.rs +++ b/api/crates/presentation/src/http/git/types.rs @@ -1,9 +1,5 @@ use application::core::services::errors::ServiceError; -use application::git::dtos::UpsertGitConfigInput; -use application::git::dtos::{ - GitChangeItem as GitChangeDto, GitCommitInfo, GitConfigDto, GitPullConflictItemDto, - GitPullResolutionDto, GitPullSessionDto, GitStatusDto, GitignoreUpdateDto, -}; +use application::git::dtos::{GitConfigDto, UpsertGitConfigInput}; use serde::{Deserialize, Serialize}; use utoipa::ToSchema; @@ -11,21 +7,6 @@ pub fn map_git_error(err: ServiceError) -> crate::http::error::ApiError { crate::http::error::map_service_error(err, "git_service_error") } -#[derive(Debug, Serialize, ToSchema)] -pub struct GitignoreUpdateResponse { - pub added: usize, - pub patterns: Vec, -} - -impl From for GitignoreUpdateResponse { - fn from(value: GitignoreUpdateDto) -> Self { - Self { - added: value.added, - patterns: value.patterns, - } - } -} - #[derive(Debug, Serialize, Deserialize, ToSchema, Clone)] pub struct GitConfigResponse { pub id: uuid::Uuid, @@ -36,6 +17,9 @@ pub struct GitConfigResponse { pub created_at: chrono::DateTime, pub updated_at: chrono::DateTime, pub remote_check: Option, + /// E2EE encrypted auth data (only present for E2EE clients) + #[serde(skip_serializing_if = "Option::is_none")] + pub encrypted_auth_data: Option, } #[derive(Debug, Serialize, Deserialize, ToSchema, Clone)] @@ -66,6 +50,7 @@ impl From for GitConfigResponse { created_at: d.created_at, updated_at: d.updated_at, remote_check: None, + encrypted_auth_data: d.encrypted_auth_data, } } } @@ -90,209 +75,3 @@ impl From for UpsertGitConfigInput { } } } - -#[derive(Debug, Serialize, Deserialize, ToSchema)] -pub struct UpdateGitConfigRequest { - pub repository_url: Option, - pub branch_name: Option, - pub auth_type: Option, - pub auth_data: Option, - pub auto_sync: Option, -} - -#[derive(Debug, Serialize, Deserialize, ToSchema, Clone)] -pub struct GitPullResolution { - pub path: String, - pub choice: String, - pub content: Option, -} - -#[derive(Debug, Serialize, Deserialize, ToSchema)] -pub struct GitPullRequest { - pub resolutions: Option>, -} - -#[derive(Debug, Serialize, Deserialize, ToSchema, Clone)] -pub struct GitPullConflictItem { - pub path: String, - pub is_binary: bool, - pub ours: Option, - pub theirs: Option, - pub base: Option, - pub document_id: Option, -} - -impl From for GitPullConflictItem { - fn from(value: GitPullConflictItemDto) -> Self { - Self { - path: value.path, - is_binary: value.is_binary, - ours: value.ours, - theirs: value.theirs, - base: value.base, - document_id: value.document_id, - } - } -} - -#[derive(Debug, Serialize, Deserialize, ToSchema, Clone)] -pub struct GitPullResponse { - pub success: bool, - pub message: String, - pub files_changed: i32, - pub commit_hash: Option, - pub conflicts: Option>, - pub git_status: Option, -} - -#[derive(Debug, Serialize, Deserialize, ToSchema, Clone)] -pub struct GitImportResponse { - pub success: bool, - pub message: String, - pub files_changed: i32, - pub commit_hash: Option, - pub docs_created: i32, - pub attachments_created: i32, -} - -#[derive(Debug, Serialize, Deserialize, ToSchema, Clone)] -pub struct GitPullSessionResponse { - pub session_id: uuid::Uuid, - pub status: String, - pub conflicts: Vec, - pub resolutions: Vec, - pub message: Option, -} - -impl From for GitPullSessionResponse { - fn from(value: GitPullSessionDto) -> Self { - Self { - session_id: value.id, - status: value.status.as_str().to_string(), - conflicts: value.conflicts.into_iter().map(Into::into).collect(), - resolutions: value - .resolutions - .into_iter() - .map(|r| GitPullResolution { - path: r.path, - choice: r.choice, - content: r.content, - }) - .collect(), - message: value.message, - } - } -} - -#[derive(Debug, Serialize, Deserialize, ToSchema, Clone)] -pub struct GitStatus { - pub repository_initialized: bool, - pub has_remote: bool, - pub current_branch: Option, - pub uncommitted_changes: u32, - pub untracked_files: u32, - pub last_sync: Option>, - pub last_sync_status: Option, - pub last_sync_message: Option, - pub last_sync_commit_hash: Option, - pub sync_enabled: bool, -} - -impl From for GitStatus { - fn from(d: GitStatusDto) -> Self { - GitStatus { - repository_initialized: d.repository_initialized, - has_remote: d.has_remote, - current_branch: d.current_branch, - uncommitted_changes: d.uncommitted_changes, - untracked_files: d.untracked_files, - last_sync: d.last_sync, - last_sync_status: d.last_sync_status, - last_sync_message: d.last_sync_message, - last_sync_commit_hash: d.last_sync_commit_hash, - sync_enabled: d.sync_enabled, - } - } -} - -#[derive(Debug, Deserialize, ToSchema)] -pub struct GitSyncRequest { - pub message: Option, - pub force: Option, - pub full_scan: Option, - pub skip_push: Option, -} - -#[derive(Debug, Serialize, ToSchema)] -pub struct GitSyncResponse { - pub success: bool, - pub message: String, - pub commit_hash: Option, - pub files_changed: u32, -} - -#[derive(Debug, Serialize, ToSchema)] -pub struct GitChangeItem { - pub path: String, - pub status: String, -} - -impl From for GitChangeItem { - fn from(value: GitChangeDto) -> Self { - GitChangeItem { - path: value.path, - status: value.status, - } - } -} - -#[derive(Debug, Serialize, ToSchema)] -pub struct GitChangesResponse { - pub files: Vec, -} - -#[derive(Debug, Serialize, ToSchema)] -pub struct GitCommitItem { - pub hash: String, - pub message: String, - pub author_name: String, - pub author_email: String, - pub time: chrono::DateTime, -} - -impl From for GitCommitItem { - fn from(value: GitCommitInfo) -> Self { - GitCommitItem { - hash: value.hash, - message: value.message, - author_name: value.author_name, - author_email: value.author_email, - time: value.time, - } - } -} - -#[derive(Debug, Serialize, ToSchema)] -pub struct GitHistoryResponse { - pub commits: Vec, -} - -#[derive(Debug, Deserialize, ToSchema)] -pub struct AddPatternsRequest { - pub patterns: Vec, -} - -#[derive(Debug, Deserialize, ToSchema)] -pub struct CheckIgnoredRequest { - pub path: String, -} - -impl From for GitPullResolutionDto { - fn from(value: GitPullResolution) -> Self { - GitPullResolutionDto { - path: value.path, - choice: value.choice, - content: value.content, - } - } -} diff --git a/api/crates/presentation/src/http/identity/keys/handlers/mod.rs b/api/crates/presentation/src/http/identity/keys/handlers/mod.rs new file mode 100644 index 00000000..770984cc --- /dev/null +++ b/api/crates/presentation/src/http/identity/keys/handlers/mod.rs @@ -0,0 +1,245 @@ +use axum::{extract::State, http::StatusCode, Json}; +use uuid::Uuid; + +use crate::context::IdentityContext; +use crate::http::error::ApiError; +use crate::http::extractors::AuthedUser; +use application::core::services::errors::ServiceError; + +use super::types::{ + EncryptedPrivateKeyResponse, MasterKeyBackupResponse, RegisterPublicKeyRequest, + StoreEncryptedPrivateKeyRequest, StoreMasterKeyBackupRequest, UserPublicKeyResponse, +}; + +fn map_keys_error(err: ServiceError) -> ApiError { + crate::http::error::map_service_error(err, "user_keys_service_error") +} + +// ============================================================================ +// Public Key Endpoints +// ============================================================================ + +#[utoipa::path( + post, + path = "/api/me/keys", + tag = "E2EE", + request_body = RegisterPublicKeyRequest, + responses((status = 200, body = UserPublicKeyResponse)) +)] +pub async fn register_public_key( + State(ctx): State, + auth: AuthedUser, + Json(payload): Json, +) -> Result, ApiError> { + let (public_key, key_type) = payload + .decode() + .map_err(|e| ApiError::bad_request(e))?; + + let service = ctx.user_keys_service(); + let dto = service + .register_public_key(auth.user_id, public_key, key_type) + .await + .map_err(map_keys_error)?; + + Ok(Json(UserPublicKeyResponse::from(dto))) +} + +#[utoipa::path( + get, + path = "/api/me/keys", + tag = "E2EE", + responses( + (status = 200, body = UserPublicKeyResponse), + (status = 404, description = "Public key not found") + ) +)] +pub async fn get_my_public_key( + State(ctx): State, + auth: AuthedUser, +) -> Result, ApiError> { + let service = ctx.user_keys_service(); + let dto = service + .get_public_key(auth.user_id) + .await + .map_err(map_keys_error)? + .ok_or_else(|| ApiError::not_found("public_key_not_found"))?; + + Ok(Json(UserPublicKeyResponse::from(dto))) +} + +#[utoipa::path( + get, + path = "/api/users/{user_id}/keys", + tag = "E2EE", + params(("user_id" = Uuid, Path, description = "User ID")), + responses( + (status = 200, body = UserPublicKeyResponse), + (status = 404, description = "Public key not found") + ) +)] +pub async fn get_user_public_key( + State(ctx): State, + _auth: AuthedUser, + axum::extract::Path(user_id): axum::extract::Path, +) -> Result, ApiError> { + let service = ctx.user_keys_service(); + let dto = service + .get_public_key(user_id) + .await + .map_err(map_keys_error)? + .ok_or_else(|| ApiError::not_found("public_key_not_found"))?; + + Ok(Json(UserPublicKeyResponse::from(dto))) +} + +// ============================================================================ +// Master Key Backup Endpoints +// ============================================================================ + +#[utoipa::path( + post, + path = "/api/me/master-key/backup", + tag = "E2EE", + request_body = StoreMasterKeyBackupRequest, + responses((status = 200, body = MasterKeyBackupResponse)) +)] +pub async fn store_master_key_backup( + State(ctx): State, + auth: AuthedUser, + Json(payload): Json, +) -> Result, ApiError> { + let (encrypted_key, salt, kdf_type, kdf_params) = payload + .decode() + .map_err(|e| ApiError::bad_request(e))?; + + let service = ctx.user_keys_service(); + let dto = service + .store_master_key_backup(auth.user_id, encrypted_key, salt, kdf_type, kdf_params) + .await + .map_err(map_keys_error)?; + + Ok(Json(MasterKeyBackupResponse::from(dto))) +} + +#[utoipa::path( + get, + path = "/api/me/master-key/backup", + tag = "E2EE", + responses( + (status = 200, body = MasterKeyBackupResponse), + (status = 404, description = "Master key backup not found") + ) +)] +pub async fn get_master_key_backup( + State(ctx): State, + auth: AuthedUser, +) -> Result, ApiError> { + let service = ctx.user_keys_service(); + let dto = service + .get_master_key_backup(auth.user_id) + .await + .map_err(map_keys_error)? + .ok_or_else(|| ApiError::not_found("master_key_backup_not_found"))?; + + Ok(Json(MasterKeyBackupResponse::from(dto))) +} + +// ============================================================================ +// Encrypted Private Key Endpoints +// ============================================================================ + +#[utoipa::path( + post, + path = "/api/me/private-key/encrypted", + tag = "E2EE", + request_body = StoreEncryptedPrivateKeyRequest, + responses((status = 200, body = EncryptedPrivateKeyResponse)) +)] +pub async fn store_encrypted_private_key( + State(ctx): State, + auth: AuthedUser, + Json(payload): Json, +) -> Result, ApiError> { + let (encrypted_private_key, nonce) = payload + .decode() + .map_err(|e| ApiError::bad_request(e))?; + + let service = ctx.user_keys_service(); + let dto = service + .store_encrypted_private_key(auth.user_id, encrypted_private_key, nonce) + .await + .map_err(map_keys_error)?; + + Ok(Json(EncryptedPrivateKeyResponse::from(dto))) +} + +#[utoipa::path( + get, + path = "/api/me/private-key/encrypted", + tag = "E2EE", + responses( + (status = 200, body = EncryptedPrivateKeyResponse), + (status = 404, description = "Encrypted private key not found") + ) +)] +pub async fn get_encrypted_private_key( + State(ctx): State, + auth: AuthedUser, +) -> Result, ApiError> { + let service = ctx.user_keys_service(); + let dto = service + .get_encrypted_private_key(auth.user_id) + .await + .map_err(map_keys_error)? + .ok_or_else(|| ApiError::not_found("encrypted_private_key_not_found"))?; + + Ok(Json(EncryptedPrivateKeyResponse::from(dto))) +} + +// ============================================================================ +// E2EE Setup Status +// ============================================================================ + +#[utoipa::path( + post, + path = "/api/me/encryption/setup-complete", + tag = "E2EE", + responses((status = 204)) +)] +pub async fn mark_encryption_setup_complete( + State(ctx): State, + auth: AuthedUser, +) -> Result { + let service = ctx.user_keys_service(); + service + .mark_encryption_setup_completed(auth.user_id) + .await + .map_err(map_keys_error)?; + + Ok(StatusCode::NO_CONTENT) +} + +#[utoipa::path( + get, + path = "/api/me/encryption/status", + tag = "E2EE", + responses((status = 200, body = EncryptionStatusResponse)) +)] +pub async fn get_encryption_status( + State(ctx): State, + auth: AuthedUser, +) -> Result, ApiError> { + let service = ctx.user_keys_service(); + let is_setup = service + .is_encryption_setup_completed(auth.user_id) + .await + .map_err(map_keys_error)?; + + Ok(Json(EncryptionStatusResponse { is_setup_completed: is_setup })) +} + +#[derive(Debug, serde::Serialize, utoipa::ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct EncryptionStatusResponse { + pub is_setup_completed: bool, +} diff --git a/api/crates/presentation/src/http/identity/keys/mod.rs b/api/crates/presentation/src/http/identity/keys/mod.rs new file mode 100644 index 00000000..4706426d --- /dev/null +++ b/api/crates/presentation/src/http/identity/keys/mod.rs @@ -0,0 +1,39 @@ +mod handlers; +pub mod types; + +use axum::routing::{get, post}; +use axum::Router; + +use crate::context::AppContext; + +pub use handlers::{ + get_encryption_status, get_encrypted_private_key, get_master_key_backup, get_my_public_key, + get_user_public_key, mark_encryption_setup_complete, register_public_key, + store_encrypted_private_key, store_master_key_backup, EncryptionStatusResponse, +}; +pub use types::*; + +pub mod openapi { + pub use super::handlers::*; +} + +pub fn routes(ctx: AppContext) -> Router { + Router::new() + // Public key endpoints + .route("/me/keys", get(get_my_public_key).post(register_public_key)) + .route("/users/:user_id/keys", get(get_user_public_key)) + // Master key backup endpoints + .route( + "/me/master-key/backup", + get(get_master_key_backup).post(store_master_key_backup), + ) + // Encrypted private key endpoints + .route( + "/me/private-key/encrypted", + get(get_encrypted_private_key).post(store_encrypted_private_key), + ) + // E2EE setup status + .route("/me/encryption/setup-complete", post(mark_encryption_setup_complete)) + .route("/me/encryption/status", get(get_encryption_status)) + .with_state(ctx) +} diff --git a/api/crates/presentation/src/http/identity/keys/types.rs b/api/crates/presentation/src/http/identity/keys/types.rs new file mode 100644 index 00000000..b5d661dd --- /dev/null +++ b/api/crates/presentation/src/http/identity/keys/types.rs @@ -0,0 +1,200 @@ +use serde::{Deserialize, Serialize}; +use utoipa::ToSchema; + +use application::identity::dtos::{ + UserEncryptedMasterKeyDto, UserEncryptedPrivateKeyDto, UserPublicKeyDto, +}; +use domain::identity::keys::{KdfParams, KdfType, KeyType}; + +// ============================================================================ +// Public Key Types +// ============================================================================ + +#[derive(Debug, Serialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct UserPublicKeyResponse { + #[schema(value_type = String, format = "byte")] + pub public_key: String, // base64 encoded + pub key_type: String, + pub created_at: chrono::DateTime, + pub updated_at: chrono::DateTime, +} + +impl From for UserPublicKeyResponse { + fn from(dto: UserPublicKeyDto) -> Self { + use base64::Engine; + Self { + public_key: base64::engine::general_purpose::STANDARD.encode(&dto.public_key), + key_type: dto.key_type.as_str().to_string(), + created_at: dto.created_at, + updated_at: dto.updated_at, + } + } +} + +#[derive(Debug, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct RegisterPublicKeyRequest { + /// Base64 encoded public key + #[schema(value_type = String, format = "byte")] + pub public_key: String, + /// Key type (x25519, ed25519, or ecdh-p256) + #[schema(example = "x25519")] + pub key_type: String, +} + +impl RegisterPublicKeyRequest { + pub fn decode(&self) -> Result<(Vec, KeyType), &'static str> { + use base64::Engine; + let public_key = base64::engine::general_purpose::STANDARD + .decode(&self.public_key) + .map_err(|_| "invalid_base64")?; + let key_type = KeyType::parse(&self.key_type).ok_or("invalid_key_type")?; + Ok((public_key, key_type)) + } +} + +// ============================================================================ +// Master Key Backup Types +// ============================================================================ + +#[derive(Debug, Serialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct MasterKeyBackupResponse { + #[schema(value_type = String, format = "byte")] + pub encrypted_key: String, // base64 encoded + #[schema(value_type = String, format = "byte")] + pub salt: String, // base64 encoded + pub kdf_type: String, + pub kdf_params: KdfParamsResponse, + pub created_at: chrono::DateTime, + pub updated_at: chrono::DateTime, +} + +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct KdfParamsResponse { + #[serde(skip_serializing_if = "Option::is_none")] + pub memory: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub iterations: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub parallelism: Option, +} + +impl From<&KdfParams> for KdfParamsResponse { + fn from(params: &KdfParams) -> Self { + Self { + memory: params.memory, + iterations: params.iterations, + parallelism: params.parallelism, + } + } +} + +impl From for KdfParams { + fn from(resp: KdfParamsResponse) -> Self { + Self { + memory: resp.memory, + iterations: resp.iterations, + parallelism: resp.parallelism, + } + } +} + +impl From for MasterKeyBackupResponse { + fn from(dto: UserEncryptedMasterKeyDto) -> Self { + use base64::Engine; + Self { + encrypted_key: base64::engine::general_purpose::STANDARD.encode(&dto.encrypted_key), + salt: base64::engine::general_purpose::STANDARD.encode(&dto.salt), + kdf_type: dto.kdf_type.as_str().to_string(), + kdf_params: KdfParamsResponse::from(&dto.kdf_params), + created_at: dto.created_at, + updated_at: dto.updated_at, + } + } +} + +#[derive(Debug, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct StoreMasterKeyBackupRequest { + /// Base64 encoded encrypted master key + #[schema(value_type = String, format = "byte")] + pub encrypted_key: String, + /// Base64 encoded salt + #[schema(value_type = String, format = "byte")] + pub salt: String, + /// KDF type (e.g., "argon2id", "pbkdf2") + #[schema(example = "argon2id")] + pub kdf_type: String, + /// KDF parameters + pub kdf_params: KdfParamsResponse, +} + +impl StoreMasterKeyBackupRequest { + pub fn decode(&self) -> Result<(Vec, Vec, KdfType, KdfParams), &'static str> { + use base64::Engine; + let encrypted_key = base64::engine::general_purpose::STANDARD + .decode(&self.encrypted_key) + .map_err(|_| "invalid_encrypted_key_base64")?; + let salt = base64::engine::general_purpose::STANDARD + .decode(&self.salt) + .map_err(|_| "invalid_salt_base64")?; + let kdf_type = KdfType::parse(&self.kdf_type).ok_or("invalid_kdf_type")?; + let kdf_params = KdfParams::from(self.kdf_params.clone()); + Ok((encrypted_key, salt, kdf_type, kdf_params)) + } +} + +// ============================================================================ +// Encrypted Private Key Types +// ============================================================================ + +#[derive(Debug, Serialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct EncryptedPrivateKeyResponse { + #[schema(value_type = String, format = "byte")] + pub encrypted_private_key: String, // base64 encoded + #[schema(value_type = String, format = "byte")] + pub nonce: String, // base64 encoded + pub created_at: chrono::DateTime, + pub updated_at: chrono::DateTime, +} + +impl From for EncryptedPrivateKeyResponse { + fn from(dto: UserEncryptedPrivateKeyDto) -> Self { + use base64::Engine; + Self { + encrypted_private_key: base64::engine::general_purpose::STANDARD + .encode(&dto.encrypted_private_key), + nonce: base64::engine::general_purpose::STANDARD.encode(&dto.nonce), + created_at: dto.created_at, + updated_at: dto.updated_at, + } + } +} + +#[derive(Debug, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct StoreEncryptedPrivateKeyRequest { + /// Base64 encoded encrypted private key + #[schema(value_type = String, format = "byte")] + pub encrypted_private_key: String, + /// Base64 encoded nonce + #[schema(value_type = String, format = "byte")] + pub nonce: String, +} + +impl StoreEncryptedPrivateKeyRequest { + pub fn decode(&self) -> Result<(Vec, Vec), &'static str> { + use base64::Engine; + let encrypted_private_key = base64::engine::general_purpose::STANDARD + .decode(&self.encrypted_private_key) + .map_err(|_| "invalid_encrypted_private_key_base64")?; + let nonce = base64::engine::general_purpose::STANDARD + .decode(&self.nonce) + .map_err(|_| "invalid_nonce_base64")?; + Ok((encrypted_private_key, nonce)) + } +} diff --git a/api/crates/presentation/src/http/identity/migration/handlers/mod.rs b/api/crates/presentation/src/http/identity/migration/handlers/mod.rs new file mode 100644 index 00000000..16522ccb --- /dev/null +++ b/api/crates/presentation/src/http/identity/migration/handlers/mod.rs @@ -0,0 +1,80 @@ +//! HTTP handlers for E2EE migration. + +use axum::{extract::State, Json}; + +use crate::context::IdentityContext; +use crate::http::error::ApiError; +use crate::http::extractors::AuthedUser; +use application::core::services::errors::ServiceError; + +use super::types::{MigrateRequest, MigrationResponse}; + +fn map_migration_error(err: ServiceError) -> ApiError { + match &err { + ServiceError::Conflict => ApiError::conflict("migration_already_completed"), + ServiceError::BadRequest(msg) => ApiError::bad_request(*msg), + _ => crate::http::error::map_service_error(err, "migration_service_error"), + } +} + +/// Migrate user data to E2EE. +/// +/// This endpoint receives encryption keys from the client and encrypts +/// all of the user's existing plaintext data on the server. +/// +/// The operation is atomic - either all data is encrypted or none is. +#[utoipa::path( + post, + path = "/api/me/encryption/migrate", + tag = "E2EE", + request_body = MigrateRequest, + responses( + (status = 200, body = MigrationResponse, description = "Migration completed successfully"), + (status = 400, description = "Invalid request (e.g., missing DEK for document)"), + (status = 409, description = "Migration already completed"), + (status = 500, description = "Migration failed") + ) +)] +pub async fn migrate( + State(ctx): State, + auth: AuthedUser, + Json(payload): Json, +) -> Result, ApiError> { + let request = payload.decode().map_err(|e| ApiError::bad_request(e))?; + + let service = ctx.migration_service(); + let result = service + .migrate_user_data(auth.user_id, request) + .await + .map_err(map_migration_error)?; + + Ok(Json(MigrationResponse::from(result))) +} + +/// Check if migration is needed for the current user. +#[utoipa::path( + get, + path = "/api/me/encryption/needs-migration", + tag = "E2EE", + responses( + (status = 200, body = NeedsMigrationResponse) + ) +)] +pub async fn needs_migration( + State(ctx): State, + auth: AuthedUser, +) -> Result, ApiError> { + let service = ctx.migration_service(); + let needs = service + .needs_migration(auth.user_id) + .await + .map_err(map_migration_error)?; + + Ok(Json(NeedsMigrationResponse { needs_migration: needs })) +} + +#[derive(Debug, serde::Serialize, utoipa::ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct NeedsMigrationResponse { + pub needs_migration: bool, +} diff --git a/api/crates/presentation/src/http/identity/migration/mod.rs b/api/crates/presentation/src/http/identity/migration/mod.rs new file mode 100644 index 00000000..c50d2eea --- /dev/null +++ b/api/crates/presentation/src/http/identity/migration/mod.rs @@ -0,0 +1,22 @@ +//! E2EE migration HTTP module. + +pub mod handlers; +pub mod types; + +use axum::{routing::{get, post}, Router}; + +use crate::context::AppContext; + +pub use handlers::{migrate, needs_migration, NeedsMigrationResponse}; +pub use types::*; + +pub mod openapi { + pub use super::handlers::*; +} + +pub fn routes(ctx: AppContext) -> Router { + Router::new() + .route("/me/encryption/migrate", post(handlers::migrate)) + .route("/me/encryption/needs-migration", get(handlers::needs_migration)) + .with_state(ctx) +} diff --git a/api/crates/presentation/src/http/identity/migration/types.rs b/api/crates/presentation/src/http/identity/migration/types.rs new file mode 100644 index 00000000..548b6a82 --- /dev/null +++ b/api/crates/presentation/src/http/identity/migration/types.rs @@ -0,0 +1,165 @@ +//! HTTP request/response types for E2EE migration. + +use std::collections::HashMap; + +use base64::Engine; +use serde::{Deserialize, Serialize}; +use utoipa::ToSchema; +use uuid::Uuid; + +use application::identity::services::migration::{ + EncryptedDek, MemberEncryptedKek, MigrationRequest, MigrationResult, MigrationStatus, +}; + +// ============================================================================ +// Request Types +// ============================================================================ + +/// Request to migrate user data to E2EE. +#[derive(Debug, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct MigrateRequest { + /// Workspace KEKs (Key Encryption Keys). + /// Maps workspace_id (string) -> base64-encoded raw KEK. + pub workspace_keks: HashMap, + + /// Document DEKs (Data Encryption Keys). + /// Maps document_id (string) -> base64-encoded raw DEK. + pub document_deks: HashMap, + + /// Encrypted workspace KEKs to store for each member. + /// Maps workspace_id (string) -> array of member encrypted KEKs. + pub encrypted_workspace_keks: HashMap>, + + /// Encrypted DEKs to store for each document. + /// Maps document_id (string) -> encrypted DEK with nonce. + pub encrypted_document_deks: HashMap, +} + +/// Encrypted KEK for a workspace member (request). +#[derive(Debug, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct MemberEncryptedKekRequest { + /// User ID. + pub user_id: String, + /// Base64-encoded encrypted KEK. + #[schema(value_type = String, format = "byte")] + pub encrypted_kek: String, +} + +/// Encrypted DEK for a document (request). +#[derive(Debug, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct EncryptedDekRequest { + /// Base64-encoded encrypted DEK. + #[schema(value_type = String, format = "byte")] + pub encrypted_dek: String, + /// Base64-encoded nonce. + #[schema(value_type = String, format = "byte")] + pub nonce: String, +} + +impl MigrateRequest { + /// Decode the request into domain types. + pub fn decode(&self) -> Result { + let b64 = base64::engine::general_purpose::STANDARD; + + // Decode workspace KEKs + let mut workspace_keks = HashMap::new(); + for (ws_id, kek_b64) in &self.workspace_keks { + let ws_uuid = Uuid::parse_str(ws_id).map_err(|_| "invalid_workspace_id")?; + let kek = b64.decode(kek_b64).map_err(|_| "invalid_base64_kek")?; + if kek.len() != 32 { + return Err("invalid_kek_length"); + } + workspace_keks.insert(ws_uuid, kek); + } + + // Decode document DEKs + let mut document_deks = HashMap::new(); + for (doc_id, dek_b64) in &self.document_deks { + let doc_uuid = Uuid::parse_str(doc_id).map_err(|_| "invalid_document_id")?; + let dek = b64.decode(dek_b64).map_err(|_| "invalid_base64_dek")?; + if dek.len() != 32 { + return Err("invalid_dek_length"); + } + document_deks.insert(doc_uuid, dek); + } + + // Decode encrypted workspace KEKs + let mut encrypted_workspace_keks = HashMap::new(); + for (ws_id, members) in &self.encrypted_workspace_keks { + let ws_uuid = Uuid::parse_str(ws_id).map_err(|_| "invalid_workspace_id")?; + let mut member_keks = Vec::new(); + for m in members { + let user_uuid = Uuid::parse_str(&m.user_id).map_err(|_| "invalid_user_id")?; + let encrypted_kek = b64 + .decode(&m.encrypted_kek) + .map_err(|_| "invalid_base64_encrypted_kek")?; + member_keks.push(MemberEncryptedKek { + user_id: user_uuid, + encrypted_kek, + }); + } + encrypted_workspace_keks.insert(ws_uuid, member_keks); + } + + // Decode encrypted document DEKs + let mut encrypted_document_deks = HashMap::new(); + for (doc_id, enc_dek) in &self.encrypted_document_deks { + let doc_uuid = Uuid::parse_str(doc_id).map_err(|_| "invalid_document_id")?; + let encrypted_dek = b64 + .decode(&enc_dek.encrypted_dek) + .map_err(|_| "invalid_base64_encrypted_dek")?; + let nonce = b64 + .decode(&enc_dek.nonce) + .map_err(|_| "invalid_base64_nonce")?; + encrypted_document_deks.insert( + doc_uuid, + EncryptedDek { + encrypted_dek, + nonce, + }, + ); + } + + Ok(MigrationRequest { + workspace_keks, + document_deks, + encrypted_workspace_keks, + encrypted_document_deks, + }) + } +} + +// ============================================================================ +// Response Types +// ============================================================================ + +/// Response for migration result. +#[derive(Debug, Serialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct MigrationResponse { + /// Number of documents encrypted. + pub documents_encrypted: usize, + /// Number of files with encrypted metadata. + pub files_encrypted: usize, + /// Total number of Yjs updates cleared. + pub updates_cleared: u64, + /// Migration status. + pub status: String, +} + +impl From for MigrationResponse { + fn from(result: MigrationResult) -> Self { + Self { + documents_encrypted: result.documents_encrypted, + files_encrypted: result.files_encrypted, + updates_cleared: result.updates_cleared, + status: match result.status { + MigrationStatus::Completed => "completed".to_string(), + MigrationStatus::AlreadyCompleted => "already_completed".to_string(), + }, + } + } +} diff --git a/api/crates/presentation/src/http/identity/mod.rs b/api/crates/presentation/src/http/identity/mod.rs index f221d7e6..fa091d53 100644 --- a/api/crates/presentation/src/http/identity/mod.rs +++ b/api/crates/presentation/src/http/identity/mod.rs @@ -1,3 +1,5 @@ pub mod api_tokens; pub mod auth; +pub mod keys; +pub mod migration; pub mod shortcuts; diff --git a/api/crates/presentation/src/http/plugins/exec.rs b/api/crates/presentation/src/http/plugins/exec.rs deleted file mode 100644 index 66baf252..00000000 --- a/api/crates/presentation/src/http/plugins/exec.rs +++ /dev/null @@ -1,103 +0,0 @@ -use axum::{ - Json, - extract::{Path, State}, - http::HeaderMap, -}; -use serde_json::json; - -use crate::context::PluginsContext; -use crate::http::error::ApiError; -use crate::http::identity::auth::Bearer; -use application::core::services::access; -use domain::access::permissions::PERM_PLUGIN_RUN; -use domain::documents::doc_type::DocumentType; - -use super::types::{ExecBody, ExecResultResponse, ensure_valid_plugin_id, extract_doc_id}; -use super::util::{map_plugin_service_error, resolve_plugin_user_context}; - -#[utoipa::path( - post, - path = "/api/plugins/{plugin}/exec/{action}", - request_body = ExecBody, - params( - ("plugin" = String, Path, description = "Plugin ID"), - ("action" = String, Path, description = "Action") - ), - responses((status = 200, body = ExecResultResponse)), - tag = "Plugins", - operation_id = "pluginsExecAction" -)] -pub async fn exec_action( - State(ctx): State, - bearer: Bearer, - headers: HeaderMap, - Path((plugin, action)): Path<(String, String)>, - Json(body): Json, -) -> Result, ApiError> { - ensure_valid_plugin_id(&plugin)?; - let bearer_token = bearer.0; - let plugin_ctx = - resolve_plugin_user_context(&ctx, &headers, bearer_token.as_str(), Some(PERM_PLUGIN_RUN)) - .await?; - let actor = plugin_ctx.actor.clone(); - let doc_id_from_payload = body.payload.as_ref().and_then(extract_doc_id); - let doc_id_from_share = if doc_id_from_payload.is_none() { - if let access::Actor::ShareToken(token) = &actor { - ctx.share_service() - .resolve_share_context(token) - .await - .map_err(map_plugin_service_error)? - .and_then(|ctx| { - if ctx.shared_type == DocumentType::Document { - Some(ctx.shared_id) - } else { - None - } - }) - } else { - None - } - } else { - None - }; - let effective_doc_id = doc_id_from_payload.or(doc_id_from_share); - if let Some(doc_id) = effective_doc_id { - let auth = ctx.authorization(); - if let access::Actor::ShareToken(_) = &actor { - auth.require_view(&actor, doc_id) - .await - .map_err(|_| ApiError::forbidden("forbidden"))?; - } else { - auth.require_edit(&actor, doc_id) - .await - .map_err(|_| ApiError::forbidden("forbidden"))?; - } - } - let allowed_doc_id = match &actor { - access::Actor::ShareToken(_) => effective_doc_id, - _ => None, - }; - let exec_service = ctx.plugin_execution_service(); - match exec_service - .execute_action( - plugin_ctx.workspace_id, - plugin_ctx.user_id, - &plugin_ctx.permissions, - &plugin, - &action, - body.payload.clone(), - allowed_doc_id, - &actor, - ) - .await - .map_err(map_plugin_service_error)? - { - Some(result) => Ok(Json(ExecResultResponse::from(result))), - None => Ok(Json(ExecResultResponse { - ok: false, - data: None, - effects: vec![], - error: Some(json!({ "code": "UNKNOWN_ACTION" })), - })), - } -} diff --git a/api/crates/presentation/src/http/plugins/mod.rs b/api/crates/presentation/src/http/plugins/mod.rs index cd713a4d..e9e3dd2a 100644 --- a/api/crates/presentation/src/http/plugins/mod.rs +++ b/api/crates/presentation/src/http/plugins/mod.rs @@ -1,5 +1,4 @@ mod assets; -mod exec; mod install; mod kv; mod manifest; @@ -16,7 +15,6 @@ use axum::{ use crate::context::AppContext; pub use assets::get_plugin_asset; -pub use exec::exec_action; pub use install::{install_from_url, uninstall}; pub use kv::{get_kv_value, put_kv_value}; pub use manifest::get_manifest; @@ -26,7 +24,6 @@ pub use updates::sse_updates; pub mod openapi { pub use super::assets::*; - pub use super::exec::*; pub use super::install::*; pub use super::kv::*; pub use super::manifest::*; @@ -40,8 +37,6 @@ pub fn routes(ctx: AppContext) -> Router { .route("/me/plugins/manifest", get(get_manifest)) // SSE updates (stubbed) .route("/me/plugins/updates", get(sse_updates)) - // Generic exec endpoint - .route("/plugins/:plugin/exec/:action", post(exec_action)) .route("/me/plugins/install-from-url", post(install_from_url)) .route("/me/plugins/uninstall", post(uninstall)) // Generic records API diff --git a/api/crates/presentation/src/http/plugins/types.rs b/api/crates/presentation/src/http/plugins/types.rs index 9cb7f588..96a571e1 100644 --- a/api/crates/presentation/src/http/plugins/types.rs +++ b/api/crates/presentation/src/http/plugins/types.rs @@ -48,9 +48,11 @@ pub struct ManifestItem { pub scope: String, pub mounts: Vec, pub frontend: serde_json::Value, + pub backend: serde_json::Value, pub permissions: Vec, pub config: serde_json::Value, pub ui: serde_json::Value, + pub renderers: serde_json::Value, pub author: Option, pub repository: Option, } @@ -64,9 +66,11 @@ impl From for ManifestItem { scope: value.scope.as_str().to_string(), mounts: value.mounts, frontend: value.frontend, + backend: value.backend, permissions: value.permissions, config: value.config, ui: value.ui, + renderers: value.renderers, author: value.author, repository: value.repository, } diff --git a/api/crates/presentation/src/http/workspaces/invitations.rs b/api/crates/presentation/src/http/workspaces/invitations.rs index 4fd70b52..20a3ce98 100644 --- a/api/crates/presentation/src/http/workspaces/invitations.rs +++ b/api/crates/presentation/src/http/workspaces/invitations.rs @@ -12,8 +12,9 @@ use application::core::services::errors::ServiceError; use domain::access::permissions::PERM_MEMBER_INVITE; use super::types::{ - CreateWorkspaceInvitationRequest, WorkspaceInvitationResponse, invitation_response_from, - map_service_error, parse_role_kind, parse_system_role, require_permission, + AcceptInvitationResponse, CreateWorkspaceInvitationRequest, UpdateInvitationKekRequest, + WorkspaceInvitationResponse, invitation_response_from, map_service_error, parse_role_kind, + parse_system_role, require_permission, }; #[utoipa::path( @@ -117,13 +118,13 @@ pub async fn revoke_invitation( path = "/api/workspace-invitations/{token}/accept", tag = "Workspaces", params(("token" = String, Path, description = "Invitation token")), - responses((status = 204)) + responses((status = 200, body = AcceptInvitationResponse)) )] pub async fn accept_invitation( State(ctx): State, auth: AuthedUser, Path(token): Path, -) -> Result { +) -> Result, ApiError> { let user = ctx .account_service() .get_me(auth.user_id) @@ -142,10 +143,48 @@ pub async fn accept_invitation( })? .ok_or(ApiError::unauthorized("unauthorized"))?; - ctx.workspace_service() + let record = ctx + .workspace_service() .accept_invitation(&token, auth.user_id, &user.email) .await .map_err(map_service_error)?; - Ok(StatusCode::NO_CONTENT) + Ok(Json(AcceptInvitationResponse { + workspace_id: record.workspace_id, + encrypted_kek_for_invite: record.encrypted_kek_for_invite, + kek_version: record.kek_version, + })) +} + +#[utoipa::path( + patch, + path = "/api/workspaces/{id}/invitations/{invitation_id}/kek", + tag = "Workspaces", + params( + ("id" = Uuid, Path, description = "Workspace ID"), + ("invitation_id" = Uuid, Path, description = "Invitation ID"), + ), + request_body = UpdateInvitationKekRequest, + responses((status = 200, body = WorkspaceInvitationResponse)) +)] +pub async fn update_invitation_kek( + State(ctx): State, + auth: AuthedUser, + Path((workspace_id, invitation_id)): Path<(Uuid, Uuid)>, + Json(body): Json, +) -> Result, ApiError> { + require_permission(&ctx, workspace_id, auth.user_id, PERM_MEMBER_INVITE).await?; + + let record = ctx + .workspace_service() + .update_invitation_kek( + workspace_id, + invitation_id, + &body.encrypted_kek_for_invite, + body.kek_version, + ) + .await + .map_err(map_service_error)?; + + Ok(Json(invitation_response_from(record))) } diff --git a/api/crates/presentation/src/http/workspaces/keys.rs b/api/crates/presentation/src/http/workspaces/keys.rs new file mode 100644 index 00000000..67155e5f --- /dev/null +++ b/api/crates/presentation/src/http/workspaces/keys.rs @@ -0,0 +1,213 @@ +use axum::{extract::State, Json}; +use uuid::Uuid; + +use crate::context::WorkspacesContext; +use crate::http::error::ApiError; +use crate::http::extractors::WorkspaceAuth; +use application::core::services::errors::ServiceError; + +use super::types::{ + RotateWorkspaceKeyRequest, RotateWorkspaceKeyResponse, StoreWorkspaceKeyRequest, + WorkspaceKeyResponse, WorkspaceKeyVersionResponse, +}; + +fn map_keys_error(err: ServiceError) -> ApiError { + crate::http::error::map_service_error(err, "workspace_keys_service_error") +} + +#[utoipa::path( + get, + path = "/api/workspaces/{id}/keys/me", + tag = "E2EE", + params(("id" = Uuid, Path, description = "Workspace ID")), + responses( + (status = 200, body = WorkspaceKeyResponse), + (status = 404, description = "Key not found") + ) +)] +pub async fn get_my_workspace_key( + State(ctx): State, + auth: WorkspaceAuth, +) -> Result, ApiError> { + let service = ctx.workspace_keys_service(); + let dto = service + .get_encrypted_kek(auth.workspace_id, auth.user_id) + .await + .map_err(map_keys_error)? + .ok_or_else(|| ApiError::not_found("workspace_key_not_found"))?; + + Ok(Json(WorkspaceKeyResponse::from(dto))) +} + +#[utoipa::path( + post, + path = "/api/workspaces/{id}/keys", + tag = "E2EE", + params(("id" = Uuid, Path, description = "Workspace ID")), + request_body = StoreWorkspaceKeyRequest, + responses((status = 200, body = WorkspaceKeyResponse)) +)] +pub async fn store_workspace_key( + State(ctx): State, + auth: WorkspaceAuth, + axum::extract::Path(workspace_id): axum::extract::Path, + Json(payload): Json, +) -> Result, ApiError> { + // Verify the workspace_id matches the auth context + if workspace_id != auth.workspace_id { + return Err(ApiError::forbidden("workspace_mismatch")); + } + + let encrypted_kek = payload + .decode() + .map_err(|e| ApiError::bad_request(e))?; + + let service = ctx.workspace_keys_service(); + let dto = service + .store_encrypted_kek( + auth.workspace_id, + auth.user_id, + encrypted_kek, + payload.key_version, + ) + .await + .map_err(map_keys_error)?; + + Ok(Json(WorkspaceKeyResponse::from(dto))) +} + +#[utoipa::path( + get, + path = "/api/workspaces/{id}/keys", + tag = "E2EE", + params(("id" = Uuid, Path, description = "Workspace ID")), + responses((status = 200, body = Vec)) +)] +pub async fn list_workspace_keys( + State(ctx): State, + auth: WorkspaceAuth, +) -> Result>, ApiError> { + // Check permission for listing all keys (admin operation) + auth.ensure_permission("workspace:manage")?; + + let service = ctx.workspace_keys_service(); + let dtos = service + .list_encrypted_keks(auth.workspace_id) + .await + .map_err(map_keys_error)?; + + Ok(Json(dtos.into_iter().map(WorkspaceKeyResponse::from).collect())) +} + +#[utoipa::path( + get, + path = "/api/workspaces/{id}/keys/version", + tag = "E2EE", + params(("id" = Uuid, Path, description = "Workspace ID")), + responses((status = 200, body = WorkspaceKeyVersionResponse)) +)] +pub async fn get_workspace_key_version( + State(ctx): State, + auth: WorkspaceAuth, +) -> Result, ApiError> { + let service = ctx.workspace_keys_service(); + let version = service + .get_current_key_version(auth.workspace_id) + .await + .map_err(map_keys_error)?; + + Ok(Json(WorkspaceKeyVersionResponse { + workspace_id: auth.workspace_id, + key_version: version, + })) +} + +#[derive(serde::Serialize, utoipa::ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct DeleteKeyVersionResponse { + pub workspace_id: Uuid, + pub key_version: i32, + pub deleted_count: u64, +} + +#[utoipa::path( + delete, + path = "/api/workspaces/{id}/keys/{version}", + tag = "E2EE", + params( + ("id" = Uuid, Path, description = "Workspace ID"), + ("version" = i32, Path, description = "Key version to delete") + ), + responses( + (status = 200, body = DeleteKeyVersionResponse), + (status = 403, description = "Permission denied") + ) +)] +pub async fn delete_key_version( + State(ctx): State, + auth: WorkspaceAuth, + axum::extract::Path((workspace_id, key_version)): axum::extract::Path<(Uuid, i32)>, +) -> Result, ApiError> { + // Verify the workspace_id matches the auth context + if workspace_id != auth.workspace_id { + return Err(ApiError::forbidden("workspace_mismatch")); + } + + // Check permission for deleting keys (admin operation) + auth.ensure_permission("workspace:manage")?; + + let service = ctx.workspace_keys_service(); + let deleted_count = service + .delete_key_version(auth.workspace_id, key_version) + .await + .map_err(map_keys_error)?; + + Ok(Json(DeleteKeyVersionResponse { + workspace_id: auth.workspace_id, + key_version, + deleted_count, + })) +} + +#[utoipa::path( + post, + path = "/api/workspaces/{id}/keys/rotate", + tag = "E2EE", + params(("id" = Uuid, Path, description = "Workspace ID")), + request_body = RotateWorkspaceKeyRequest, + responses( + (status = 200, body = RotateWorkspaceKeyResponse), + (status = 400, description = "Invalid request"), + (status = 403, description = "Permission denied") + ) +)] +pub async fn rotate_workspace_key( + State(ctx): State, + auth: WorkspaceAuth, + axum::extract::Path(workspace_id): axum::extract::Path, + Json(payload): Json, +) -> Result, ApiError> { + // Verify the workspace_id matches the auth context + if workspace_id != auth.workspace_id { + return Err(ApiError::forbidden("workspace_mismatch")); + } + + // Check permission for key rotation (admin operation) + auth.ensure_permission("workspace:manage")?; + + let member_keys = payload + .decode() + .map_err(|e| ApiError::bad_request(e))?; + + let service = ctx.workspace_keys_service(); + let (new_version, keys_updated) = service + .rotate_keys(auth.workspace_id, member_keys) + .await + .map_err(map_keys_error)?; + + Ok(Json(RotateWorkspaceKeyResponse { + workspace_id: auth.workspace_id, + new_key_version: new_version, + keys_updated, + })) +} diff --git a/api/crates/presentation/src/http/workspaces/mod.rs b/api/crates/presentation/src/http/workspaces/mod.rs index 994209fe..4de6a71e 100644 --- a/api/crates/presentation/src/http/workspaces/mod.rs +++ b/api/crates/presentation/src/http/workspaces/mod.rs @@ -1,4 +1,5 @@ mod invitations; +mod keys; mod members; mod permissions; mod roles; @@ -11,18 +12,26 @@ use axum::routing::{delete, get, patch, post}; use crate::context::AppContext; -pub use invitations::{accept_invitation, create_invitation, list_invitations, revoke_invitation}; +pub use invitations::{ + accept_invitation, create_invitation, list_invitations, revoke_invitation, + update_invitation_kek, +}; +pub use keys::{ + delete_key_version, get_my_workspace_key, get_workspace_key_version, list_workspace_keys, + rotate_workspace_key, store_workspace_key, DeleteKeyVersionResponse, +}; pub use members::{list_members, remove_member, update_member_role}; pub use permissions::get_workspace_permissions; pub use roles::{create_role, delete_role, list_roles, update_role}; pub use types::*; pub use workspace::{ - create_workspace, delete_workspace, download_workspace_archive, get_workspace_detail, - leave_workspace, list_workspaces, switch_workspace, update_workspace, + create_workspace, delete_workspace, get_workspace_detail, leave_workspace, list_workspaces, + switch_workspace, update_workspace, }; pub mod openapi { pub use super::invitations::*; + pub use super::keys::*; pub use super::members::*; pub use super::permissions::*; pub use super::roles::*; @@ -62,10 +71,25 @@ pub fn routes(ctx: AppContext) -> Router { "/workspaces/:id/invitations/:invitation_id", delete(revoke_invitation), ) - .route("/workspaces/:id/download", get(download_workspace_archive)) + .route( + "/workspaces/:id/invitations/:invitation_id/kek", + patch(update_invitation_kek), + ) .route( "/workspace-invitations/:token/accept", post(accept_invitation), ) + // E2EE workspace keys + .route( + "/workspaces/:id/keys", + get(list_workspace_keys).post(store_workspace_key), + ) + .route("/workspaces/:id/keys/me", get(get_my_workspace_key)) + .route("/workspaces/:id/keys/version", get(get_workspace_key_version)) + .route("/workspaces/:id/keys/rotate", post(rotate_workspace_key)) + .route( + "/workspaces/:id/keys/:version", + delete(delete_key_version), + ) .with_state(ctx) } diff --git a/api/crates/presentation/src/http/workspaces/types.rs b/api/crates/presentation/src/http/workspaces/types.rs index 12d4f6d0..e06712e3 100644 --- a/api/crates/presentation/src/http/workspaces/types.rs +++ b/api/crates/presentation/src/http/workspaces/types.rs @@ -4,7 +4,6 @@ use utoipa::ToSchema; use uuid::Uuid; use crate::context::HasWorkspaceService; -use crate::http::documents::DownloadFormat; use application::core::services::errors::ServiceError; use application::workspaces::ports::workspace_repository::{ WorkspaceInvitationRecord, WorkspaceListItem, WorkspaceMemberDetail, WorkspaceRoleRecord, @@ -64,12 +63,6 @@ pub struct PermissionOverridePayload { pub allowed: bool, } -#[derive(Debug, Deserialize, ToSchema, Default)] -pub struct DownloadWorkspaceQuery { - #[serde(default)] - pub format: DownloadFormat, -} - #[derive(Debug, Serialize, ToSchema)] pub struct WorkspaceRoleResponse { pub id: Uuid, @@ -103,6 +96,23 @@ pub struct WorkspaceInvitationResponse { #[serde(skip_serializing_if = "Option::is_none")] pub revoked_at: Option>, pub created_at: DateTime, + // E2EE fields (camelCase) + #[serde(skip_serializing_if = "Option::is_none", rename = "encryptedKekForInvite")] + pub encrypted_kek_for_invite: Option, + #[serde(skip_serializing_if = "Option::is_none", rename = "kekVersion")] + pub kek_version: Option, +} + +/// Response for accepting a workspace invitation +#[derive(Debug, Serialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct AcceptInvitationResponse { + pub workspace_id: Uuid, + // E2EE: encrypted KEK (if available) + #[serde(skip_serializing_if = "Option::is_none")] + pub encrypted_kek_for_invite: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub kek_version: Option, } #[derive(Debug, Deserialize, ToSchema)] @@ -150,6 +160,16 @@ pub struct CreateWorkspaceInvitationRequest { pub expires_at: Option>, } +/// Request to update invitation with encrypted KEK +#[derive(Debug, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct UpdateInvitationKekRequest { + /// Base64 encoded encrypted KEK (encrypted with key derived from invitation token) + pub encrypted_kek_for_invite: String, + /// KEK version at the time of invitation + pub kek_version: i32, +} + pub fn to_response(row: WorkspaceListItem) -> WorkspaceResponse { WorkspaceResponse { id: row.id, @@ -216,6 +236,8 @@ pub fn invitation_response_from(record: WorkspaceInvitationRecord) -> WorkspaceI accepted_at: record.accepted_at, revoked_at: record.revoked_at, created_at: record.created_at, + encrypted_kek_for_invite: record.encrypted_kek_for_invite, + kek_version: record.kek_version, } } @@ -310,3 +332,106 @@ pub fn normalize_overrides( } Ok(out) } + +// ============================================================================ +// E2EE Workspace Key Types +// ============================================================================ + +#[derive(Debug, Serialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct WorkspaceKeyResponse { + pub id: Uuid, + pub workspace_id: Uuid, + pub user_id: Uuid, + #[schema(value_type = String, format = "byte")] + pub encrypted_kek: String, // base64 encoded + pub key_version: i32, + pub created_at: DateTime, +} + +impl From for WorkspaceKeyResponse { + fn from(dto: application::workspaces::dtos::WorkspaceEncryptedKeyDto) -> Self { + use base64::Engine; + Self { + id: dto.id, + workspace_id: dto.workspace_id, + user_id: dto.user_id, + encrypted_kek: base64::engine::general_purpose::STANDARD.encode(&dto.encrypted_kek), + key_version: dto.key_version, + created_at: dto.created_at, + } + } +} + +#[derive(Debug, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct StoreWorkspaceKeyRequest { + /// Base64 encoded encrypted KEK + #[schema(value_type = String, format = "byte")] + pub encrypted_kek: String, + /// Key version (for key rotation tracking) + pub key_version: i32, +} + +impl StoreWorkspaceKeyRequest { + pub fn decode(&self) -> Result, &'static str> { + use base64::Engine; + base64::engine::general_purpose::STANDARD + .decode(&self.encrypted_kek) + .map_err(|_| "invalid_base64") + } +} + +#[derive(Debug, Serialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct WorkspaceKeyVersionResponse { + pub workspace_id: Uuid, + pub key_version: Option, +} + +// ============================================================================ +// E2EE Key Rotation Types +// ============================================================================ + +/// A single member's encrypted KEK for key rotation +#[derive(Debug, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct RotationMemberKey { + /// User ID of the member + pub user_id: Uuid, + /// Base64 encoded encrypted KEK for this member + #[schema(value_type = String, format = "byte")] + pub encrypted_kek: String, +} + +/// Request body for KEK rotation +#[derive(Debug, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct RotateWorkspaceKeyRequest { + /// Encrypted KEKs for all workspace members + pub member_keys: Vec, +} + +impl RotateWorkspaceKeyRequest { + pub fn decode(&self) -> Result)>, &'static str> { + use base64::Engine; + self.member_keys + .iter() + .map(|mk| { + base64::engine::general_purpose::STANDARD + .decode(&mk.encrypted_kek) + .map(|bytes| (mk.user_id, bytes)) + .map_err(|_| "invalid_base64") + }) + .collect() + } +} + +/// Response for KEK rotation +#[derive(Debug, Serialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct RotateWorkspaceKeyResponse { + pub workspace_id: Uuid, + pub new_key_version: i32, + pub keys_updated: usize, +} diff --git a/api/crates/presentation/src/http/workspaces/workspace.rs b/api/crates/presentation/src/http/workspaces/workspace.rs index 218ceb6a..53610a83 100644 --- a/api/crates/presentation/src/http/workspaces/workspace.rs +++ b/api/crates/presentation/src/http/workspaces/workspace.rs @@ -1,29 +1,25 @@ use axum::{ Json, - extract::{Path, Query, State}, - http::{HeaderMap, HeaderValue, StatusCode}, - response::{IntoResponse, Response}, + extract::{Path, State}, + http::{HeaderMap, StatusCode}, }; use uuid::Uuid; use crate::context::WorkspacesContext; -#[allow(unused_imports)] -use crate::http::documents::DocumentDownloadBinary; use crate::http::error::ApiError; use crate::http::extractors::AuthedUser; use crate::http::identity::auth::{ self, apply_session_cookies, extract_client_ip, extract_refresh_token, extract_user_agent, }; -use application::core::services::access; use application::core::services::errors::ServiceError; use application::identity::services::auth::user_sessions::SessionMetadata; use application::workspaces::ports::workspace_repository::WorkspaceListItem; -use domain::access::permissions::{PERM_DOC_VIEW, PERM_WORKSPACE_DELETE, PERM_WORKSPACE_UPDATE}; +use domain::access::permissions::{PERM_WORKSPACE_DELETE, PERM_WORKSPACE_UPDATE}; use domain::workspaces::roles::{WorkspaceRoleKind, WorkspaceSystemRole}; use super::types::{ - CreateWorkspaceRequest, DownloadWorkspaceQuery, SwitchWorkspaceResponse, - UpdateWorkspaceRequest, WorkspaceResponse, map_service_error, require_permission, to_response, + CreateWorkspaceRequest, SwitchWorkspaceResponse, UpdateWorkspaceRequest, WorkspaceResponse, + map_service_error, require_permission, to_response, }; #[utoipa::path(get, path = "/api/workspaces", tag = "Workspaces", responses((status = 200, body = [WorkspaceResponse])))] @@ -300,64 +296,3 @@ pub async fn switch_workspace( )) } -#[utoipa::path( - get, - path = "/api/workspaces/{id}/download", - tag = "Workspaces", - params( - ("id" = Uuid, Path, description = "Workspace ID"), - ("format" = Option, Query, description = "Download format (archive only)") - ), - responses( - (status = 200, description = "Workspace download", body = DocumentDownloadBinary, content_type = "application/octet-stream"), - (status = 401, description = "Unauthorized"), - (status = 404, description = "Workspace not found") - ) -)] -pub async fn download_workspace_archive( - State(ctx): State, - auth: AuthedUser, - Path(id): Path, - Query(params): Query, -) -> Result { - require_permission(&ctx, id, auth.user_id, PERM_DOC_VIEW).await?; - - let workspace = ctx - .workspace_service() - .get_workspace(id) - .await - .map_err(map_service_error)? - .ok_or(ApiError::not_found("workspace_not_found"))?; - - let actor = access::Actor::User(auth.user_id); - let download = ctx - .document_service() - .download_workspace_root(&actor, id, &workspace.name, params.format.into()) - .await - .map_err(|err| match err { - ServiceError::Unauthorized | ServiceError::TokenExpired | ServiceError::Forbidden => { - ApiError::forbidden("forbidden") - } - ServiceError::Conflict | ServiceError::NotFound => ApiError::not_found("not_found"), - ServiceError::BadRequest(code) => ApiError::bad_request(code).with_message(code), - ServiceError::Unexpected(inner) => { - tracing::error!(error = ?inner, workspace_id = %id, "workspace_download_failed"); - ApiError::new(StatusCode::INTERNAL_SERVER_ERROR, "internal_error") - } - })?; - - let mut headers = HeaderMap::new(); - let content_type = HeaderValue::from_str(&download.content_type) - .map_err(|_| ApiError::new(StatusCode::INTERNAL_SERVER_ERROR, "invalid_header"))?; - headers.insert(axum::http::header::CONTENT_TYPE, content_type); - headers.insert( - axum::http::header::HeaderName::from_static("x-content-type-options"), - HeaderValue::from_static("nosniff"), - ); - let disposition = format!("attachment; filename=\"{}\"", download.filename); - let content_disposition = HeaderValue::from_str(&disposition) - .map_err(|_| ApiError::new(StatusCode::INTERNAL_SERVER_ERROR, "invalid_header"))?; - headers.insert(axum::http::header::CONTENT_DISPOSITION, content_disposition); - - Ok((headers, download.bytes).into_response()) -} diff --git a/api/crates/presentation/src/openapi.rs b/api/crates/presentation/src/openapi.rs index a1cb4c80..0d0f0e9b 100644 --- a/api/crates/presentation/src/openapi.rs +++ b/api/crates/presentation/src/openapi.rs @@ -1,9 +1,11 @@ +use contracts::core::dtos::{TextDiffLine, TextDiffLineType, TextDiffResult}; use utoipa::OpenApi; -use crate::http::core::{health, markdown, storage_ingest}; +use crate::http::core::{health, storage_ingest}; use crate::http::documents::files; +use crate::http::documents::keys as document_keys; use crate::http::documents::{publishing as public, sharing as shares, tagging as tags}; -use crate::http::identity::{api_tokens, auth, shortcuts}; +use crate::http::identity::{api_tokens, auth, keys, migration, shortcuts}; use crate::http::{documents, git, plugins, workspaces}; use crate::ws; @@ -25,9 +27,29 @@ use crate::ws; api_tokens::openapi::revoke_api_token, shortcuts::openapi::get_user_shortcuts, shortcuts::openapi::update_user_shortcuts, + keys::openapi::register_public_key, + keys::openapi::get_my_public_key, + keys::openapi::get_user_public_key, + keys::openapi::store_master_key_backup, + keys::openapi::get_master_key_backup, + keys::openapi::store_encrypted_private_key, + keys::openapi::get_encrypted_private_key, + keys::openapi::mark_encryption_setup_complete, + keys::openapi::get_encryption_status, + migration::openapi::migrate, + migration::openapi::needs_migration, + document_keys::openapi::get_document_key, + document_keys::openapi::store_document_key, + document_keys::openapi::rotate_document_key, + document_keys::openapi::get_share_key, + document_keys::openapi::get_share_salt, + document_keys::openapi::store_share_key, + document_keys::openapi::store_password_protected_share_key, auth::openapi::delete_account, ws::documents::yjs::openapi::axum_ws_entry, tags::openapi::list_tags, + tags::openapi::get_document_tags, + tags::openapi::update_document_tags, documents::openapi::list_documents, documents::openapi::create_document, documents::openapi::get_document, @@ -39,21 +61,20 @@ use crate::ws; documents::openapi::patch_document_content, documents::openapi::archive_document, documents::openapi::unarchive_document, - documents::openapi::download_document, documents::openapi::list_document_snapshots, + documents::openapi::get_document_snapshot, documents::openapi::get_document_snapshot_diff, documents::openapi::restore_document_snapshot, - documents::openapi::download_document_snapshot, - documents::openapi::search_documents, documents::openapi::get_backlinks, documents::openapi::get_outgoing_links, files::openapi::upload_file, files::openapi::get_file, - files::openapi::get_file_by_name, + files::openapi::list_files, shares::openapi::create_share, shares::openapi::delete_share, shares::openapi::list_document_shares, shares::openapi::validate_share_token, + shares::openapi::get_share_salt, shares::openapi::browse_share, shares::openapi::list_active_shares, shares::openapi::create_share_mount, @@ -64,36 +85,18 @@ use crate::ws; public::openapi::publish_document, public::openapi::unpublish_document, public::openapi::get_publish_status, + public::openapi::update_publish_settings, public::openapi::list_workspace_public_documents, public::openapi::get_public_by_workspace_and_id, public::openapi::get_public_content_by_workspace_and_id, + public::openapi::upload_public_file, + public::openapi::list_public_files, + public::openapi::get_public_file, git::openapi::get_config, git::openapi::create_or_update_config, git::openapi::delete_config, - git::openapi::get_status, - git::openapi::get_changes, - git::openapi::get_history, - git::openapi::get_working_diff, - git::openapi::get_commit_diff, - git::openapi::sync_now, - git::openapi::import_repository, - git::openapi::pull_repository, - git::openapi::start_pull_session, - git::openapi::get_pull_session, - git::openapi::resolve_pull_session, - git::openapi::finalize_pull_session, - git::openapi::init_repository, - git::openapi::deinit_repository, - git::openapi::ignore_document, - git::openapi::ignore_folder, - git::openapi::get_gitignore_patterns, - git::openapi::add_gitignore_patterns, - git::openapi::check_path_ignored, storage_ingest::openapi::enqueue_ingest_events, - markdown::openapi::render_markdown, - markdown::openapi::render_markdown_many, plugins::openapi::get_manifest, - plugins::openapi::exec_action, plugins::openapi::list_records, plugins::openapi::create_record, plugins::openapi::update_record, @@ -122,8 +125,14 @@ use crate::ws; workspaces::openapi::list_invitations, workspaces::openapi::create_invitation, workspaces::openapi::revoke_invitation, + workspaces::openapi::update_invitation_kek, workspaces::openapi::accept_invitation, - workspaces::openapi::download_workspace_archive, + workspaces::openapi::get_my_workspace_key, + workspaces::openapi::store_workspace_key, + workspaces::openapi::list_workspace_keys, + workspaces::openapi::get_workspace_key_version, + workspaces::openapi::delete_key_version, + workspaces::openapi::rotate_workspace_key, health::openapi::health, ), components(schemas( @@ -143,24 +152,49 @@ use crate::ws; api_tokens::ApiTokenCreateResponse, shortcuts::UserShortcutResponse, shortcuts::UpdateUserShortcutRequest, - tags::TagItem, + keys::UserPublicKeyResponse, + keys::RegisterPublicKeyRequest, + keys::MasterKeyBackupResponse, + keys::StoreMasterKeyBackupRequest, + keys::KdfParamsResponse, + keys::EncryptedPrivateKeyResponse, + keys::StoreEncryptedPrivateKeyRequest, + keys::EncryptionStatusResponse, + migration::MigrateRequest, + migration::MemberEncryptedKekRequest, + migration::EncryptedDekRequest, + migration::MigrationResponse, + migration::NeedsMigrationResponse, + document_keys::DocumentKeyResponse, + document_keys::StoreDocumentKeyRequest, + document_keys::RotateDocumentKeyRequest, + document_keys::RotateDocumentKeyResponse, + document_keys::ShareKeyResponse, + document_keys::ShareSaltResponse, + document_keys::StoreShareKeyRequest, + document_keys::StorePasswordProtectedShareKeyRequest, + document_keys::KdfParamsResponse, + tags::TagEntry, + tags::ListTagsResponse, + tags::DocumentTagEntry, + tags::DocumentTagsResponse, + tags::EncryptedTagInput, + tags::UpdateDocumentTagsRequest, documents::Document, documents::DocumentListResponse, documents::CreateDocumentRequest, + documents::CreateDocumentDekPayload, documents::UpdateDocumentRequest, documents::DuplicateDocumentRequest, + documents::GetContentResponse, + documents::EncryptedUpdateEntry, documents::UpdateDocumentContentRequest, documents::DocumentPatchOperationRequest, documents::PatchDocumentContentRequest, - documents::SearchResult, documents::BacklinkInfo, documents::BacklinksResponse, documents::OutgoingLink, documents::OutgoingLinksResponse, - documents::DocumentDownloadBinary, - documents::DocumentArchiveBinary, - documents::DownloadFormat, - documents::DownloadDocumentQuery, documents::SnapshotSummary, documents::SnapshotListResponse, documents::SnapshotDiffKind, @@ -168,57 +202,40 @@ use crate::ws; documents::SnapshotDiffResponse, documents::SnapshotDiffBaseParam, documents::SnapshotRestoreResponse, + TextDiffResult, + TextDiffLine, + TextDiffLineType, + documents::SnapshotDetailResponse, files::UploadFileResponse, files::UploadFileMultipart, + files::ListFileResponse, shares::CreateShareRequest, shares::CreateShareResponse, shares::CreateShareMountRequest, shares::ShareItem, shares::ShareDocumentResponse, + shares::ShareSaltResponse, shares::ShareBrowseTreeItem, shares::ShareBrowseResponse, shares::ApplicableShareItem, shares::ActiveShareItem, shares::ShareMountItem, shares::MaterializeResponse, + public::PublishRequest, public::PublishResponse, + public::UpdatePublishSettingsRequest, public::PublicDocumentSummary, + public::UploadPublicFileRequest, + public::PublicFile, git::GitConfigResponse, git::GitRemoteCheckResponse, git::CreateGitConfigRequest, - git::UpdateGitConfigRequest, - git::GitStatus, - git::GitSyncRequest, - git::GitSyncResponse, - git::GitPullRequest, - git::GitPullResponse, - git::GitImportResponse, - git::GitPullSessionResponse, - git::GitPullResolution, - git::GitPullConflictItem, - git::GitChangeItem, - git::GitChangesResponse, - git::GitCommitItem, - git::GitHistoryResponse, - contracts::core::dtos::TextDiffLineType, - contracts::core::dtos::TextDiffLine, - contracts::core::dtos::TextDiffResult, - git::AddPatternsRequest, - git::CheckIgnoredRequest, - markdown::RenderOptionsPayload, - markdown::PlaceholderItemPayload, - markdown::RenderResponseBody, - markdown::RenderRequest, - markdown::RenderManyRequest, - markdown::RenderManyResponse, plugins::ManifestItem, plugins::RecordsResponse, plugins::CreateRecordBody, plugins::UpdateRecordBody, plugins::KvValueResponse, plugins::KvValueBody, - plugins::ExecBody, - plugins::ExecResultResponse, plugins::InstallFromUrlBody, plugins::InstallResponse, plugins::UninstallBody, @@ -235,7 +252,15 @@ use crate::ws; workspaces::WorkspacePermissionsResponse, workspaces::WorkspaceInvitationResponse, workspaces::CreateWorkspaceInvitationRequest, - workspaces::DownloadWorkspaceQuery, + workspaces::AcceptInvitationResponse, + workspaces::UpdateInvitationKekRequest, + workspaces::WorkspaceKeyResponse, + workspaces::StoreWorkspaceKeyRequest, + workspaces::WorkspaceKeyVersionResponse, + workspaces::DeleteKeyVersionResponse, + workspaces::RotationMemberKey, + workspaces::RotateWorkspaceKeyRequest, + workspaces::RotateWorkspaceKeyResponse, storage_ingest::IngestBatchRequest, storage_ingest::IngestEventRequest, storage_ingest::IngestKindParam, @@ -243,13 +268,13 @@ use crate::ws; )), tags( (name = "Auth", description = "Authentication"), + (name = "E2EE", description = "End-to-end encryption key management"), (name = "Documents", description = "Documents management"), (name = "Files", description = "File management"), (name = "Sharing", description = "Document sharing"), (name = "Public Documents", description = "Public pages"), (name = "Realtime", description = "Yjs WebSocket endpoint (/yjs/:id)"), - (name = "Git", description = "Git integration"), - (name = "Markdown", description = "Markdown rendering"), + (name = "Git", description = "Git config storage (git operations are client-side)"), (name = "Plugins", description = "Plugins management & data APIs"), (name = "Storage", description = "Storage ingest APIs"), (name = "Health", description = "System health checks") diff --git a/api/crates/presentation/src/ws/documents/yjs.rs b/api/crates/presentation/src/ws/documents/yjs.rs index a4913468..7a1d9faa 100644 --- a/api/crates/presentation/src/ws/documents/yjs.rs +++ b/api/crates/presentation/src/ws/documents/yjs.rs @@ -156,7 +156,10 @@ impl Stream for WsBinaryStream { std::task::Poll::Ready(Some(Ok(AxumMessage::Binary(b)))) => { return std::task::Poll::Ready(Some(Ok(b))); } - std::task::Poll::Ready(Some(Ok(AxumMessage::Text(_)))) => continue, + // E2EE messages are sent as JSON text - convert to bytes + std::task::Poll::Ready(Some(Ok(AxumMessage::Text(t)))) => { + return std::task::Poll::Ready(Some(Ok(t.into_bytes()))); + } std::task::Poll::Ready(Some(Ok(AxumMessage::Ping(_)))) => continue, std::task::Poll::Ready(Some(Ok(AxumMessage::Pong(_)))) => continue, std::task::Poll::Ready(Some(Ok(AxumMessage::Close(_)))) => { diff --git a/api/migrations/202701020001_add_e2ee_tables.sql b/api/migrations/202701020001_add_e2ee_tables.sql new file mode 100644 index 00000000..e4bdfdfc --- /dev/null +++ b/api/migrations/202701020001_add_e2ee_tables.sql @@ -0,0 +1,152 @@ +-- E2EE (End-to-End Encryption) Schema Migration +-- Phase 0: Database schema changes for E2EE support + +-------------------------------------------------------------------------------- +-- Part 1: New tables for key management +-------------------------------------------------------------------------------- + +-- User public keys (ECDH P-256) +CREATE TABLE IF NOT EXISTS user_public_keys ( + user_id UUID PRIMARY KEY REFERENCES users(id) ON DELETE CASCADE, + public_key BYTEA NOT NULL, + key_type TEXT NOT NULL DEFAULT 'ecdh-p256', + created_at TIMESTAMPTZ NOT NULL DEFAULT now(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT now() +); + +-- User encrypted master keys (for recovery) +CREATE TABLE IF NOT EXISTS user_encrypted_master_keys ( + user_id UUID PRIMARY KEY REFERENCES users(id) ON DELETE CASCADE, + encrypted_key BYTEA NOT NULL, + salt BYTEA NOT NULL, + kdf_type TEXT NOT NULL DEFAULT 'argon2id', + kdf_params JSONB NOT NULL, + created_at TIMESTAMPTZ NOT NULL DEFAULT now(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT now() +); + +-- User encrypted private keys (encrypted with UMK) +CREATE TABLE IF NOT EXISTS user_encrypted_private_keys ( + user_id UUID PRIMARY KEY REFERENCES users(id) ON DELETE CASCADE, + encrypted_private_key BYTEA NOT NULL, + nonce BYTEA NOT NULL, + created_at TIMESTAMPTZ NOT NULL DEFAULT now(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT now() +); + +-- Workspace encrypted keys (KEK encrypted with each member's public key) +CREATE TABLE IF NOT EXISTS workspace_encrypted_keys ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + workspace_id UUID NOT NULL REFERENCES workspaces(id) ON DELETE CASCADE, + user_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE, + encrypted_kek BYTEA NOT NULL, + key_version INT NOT NULL DEFAULT 1, + created_at TIMESTAMPTZ NOT NULL DEFAULT now(), + UNIQUE (workspace_id, user_id, key_version) +); + +-- Document encrypted keys (DEK encrypted with workspace KEK) +CREATE TABLE IF NOT EXISTS document_encrypted_keys ( + document_id UUID PRIMARY KEY REFERENCES documents(id) ON DELETE CASCADE, + encrypted_dek BYTEA NOT NULL, + nonce BYTEA NOT NULL, + key_version INT NOT NULL DEFAULT 1, + created_at TIMESTAMPTZ NOT NULL DEFAULT now(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT now() +); + +-- Share encrypted keys (DEK encrypted with share key) +CREATE TABLE IF NOT EXISTS share_encrypted_keys ( + share_id UUID PRIMARY KEY REFERENCES shares(id) ON DELETE CASCADE, + encrypted_dek BYTEA NOT NULL, + salt BYTEA, + kdf_params JSONB, + created_at TIMESTAMPTZ NOT NULL DEFAULT now() +); + +-- Public document contents (plaintext for published documents) +CREATE TABLE IF NOT EXISTS public_document_contents ( + document_id UUID PRIMARY KEY REFERENCES documents(id) ON DELETE CASCADE, + content TEXT NOT NULL, + title TEXT NOT NULL, + content_hash TEXT NOT NULL, + updated_at TIMESTAMPTZ NOT NULL DEFAULT now() +); + +-- Encrypted tag index (deterministic encryption for searchable tags) +CREATE TABLE IF NOT EXISTS encrypted_tag_index ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + workspace_id UUID NOT NULL REFERENCES workspaces(id) ON DELETE CASCADE, + document_id UUID NOT NULL REFERENCES documents(id) ON DELETE CASCADE, + encrypted_tag BYTEA NOT NULL, + created_at TIMESTAMPTZ NOT NULL DEFAULT now() +); + +-------------------------------------------------------------------------------- +-- Part 2: Add columns to existing tables +-------------------------------------------------------------------------------- + +-- Users: E2EE setup tracking +ALTER TABLE users + ADD COLUMN IF NOT EXISTS e2ee_setup_completed_at TIMESTAMPTZ; + +-- Documents: encrypted title +ALTER TABLE documents + ADD COLUMN IF NOT EXISTS encrypted_title BYTEA, + ADD COLUMN IF NOT EXISTS encrypted_title_nonce BYTEA; + +-- Document updates: encryption metadata and signature +ALTER TABLE document_updates + ADD COLUMN IF NOT EXISTS nonce BYTEA, + ADD COLUMN IF NOT EXISTS signature BYTEA, + ADD COLUMN IF NOT EXISTS public_key BYTEA; + +-- Document snapshots: encryption metadata, signature, and seq tracking +ALTER TABLE document_snapshots + ADD COLUMN IF NOT EXISTS nonce BYTEA, + ADD COLUMN IF NOT EXISTS signature BYTEA, + ADD COLUMN IF NOT EXISTS seq_at_snapshot BIGINT; + +-- Files: encrypted metadata +ALTER TABLE files + ADD COLUMN IF NOT EXISTS encrypted_metadata BYTEA, + ADD COLUMN IF NOT EXISTS encrypted_metadata_nonce BYTEA, + ADD COLUMN IF NOT EXISTS encrypted_hash TEXT; + +-- Git configs: encrypted auth data +ALTER TABLE git_configs + ADD COLUMN IF NOT EXISTS encrypted_auth_data BYTEA, + ADD COLUMN IF NOT EXISTS encrypted_auth_nonce BYTEA; + +-- Plugin KV: encrypted value +ALTER TABLE plugin_kv + ADD COLUMN IF NOT EXISTS encrypted_value BYTEA, + ADD COLUMN IF NOT EXISTS nonce BYTEA; + +-- Plugin records: encrypted data +ALTER TABLE plugin_records + ADD COLUMN IF NOT EXISTS encrypted_data BYTEA, + ADD COLUMN IF NOT EXISTS nonce BYTEA; + +-------------------------------------------------------------------------------- +-- Part 3: Indexes +-------------------------------------------------------------------------------- + +-- Workspace encrypted keys lookup +CREATE INDEX IF NOT EXISTS idx_workspace_encrypted_keys_workspace + ON workspace_encrypted_keys(workspace_id); + +CREATE INDEX IF NOT EXISTS idx_workspace_encrypted_keys_user + ON workspace_encrypted_keys(user_id); + +-- Encrypted tag index lookup (for deterministic encryption search) +CREATE INDEX IF NOT EXISTS idx_encrypted_tag_index_workspace_tag + ON encrypted_tag_index(workspace_id, encrypted_tag); + +CREATE INDEX IF NOT EXISTS idx_encrypted_tag_index_document + ON encrypted_tag_index(document_id); + +-- E2EE setup status lookup +CREATE INDEX IF NOT EXISTS idx_users_e2ee_setup + ON users(e2ee_setup_completed_at) + WHERE e2ee_setup_completed_at IS NOT NULL; diff --git a/api/migrations/202701020002_add_e2ee_columns_to_archives.sql b/api/migrations/202701020002_add_e2ee_columns_to_archives.sql new file mode 100644 index 00000000..f2390608 --- /dev/null +++ b/api/migrations/202701020002_add_e2ee_columns_to_archives.sql @@ -0,0 +1,6 @@ +-- Add E2EE columns to document_snapshot_archives +-- These columns are needed for encrypted snapshot archives + +ALTER TABLE document_snapshot_archives + ADD COLUMN IF NOT EXISTS nonce BYTEA, + ADD COLUMN IF NOT EXISTS signature BYTEA; diff --git a/api/migrations/202701140001_add_invitation_kek.sql b/api/migrations/202701140001_add_invitation_kek.sql new file mode 100644 index 00000000..484dfd31 --- /dev/null +++ b/api/migrations/202701140001_add_invitation_kek.sql @@ -0,0 +1,9 @@ +-- Add E2EE KEK fields to workspace_invitations +-- These fields store the workspace KEK encrypted with a key derived from the invitation token + +ALTER TABLE workspace_invitations + ADD COLUMN IF NOT EXISTS encrypted_kek_for_invite TEXT, + ADD COLUMN IF NOT EXISTS kek_version INT; + +COMMENT ON COLUMN workspace_invitations.encrypted_kek_for_invite IS 'Workspace KEK encrypted with a key derived from the invitation token (Base64)'; +COMMENT ON COLUMN workspace_invitations.kek_version IS 'Version of the KEK at the time of invitation'; diff --git a/api/migrations/202701160001_add_creator_encrypted_share_key.sql b/api/migrations/202701160001_add_creator_encrypted_share_key.sql new file mode 100644 index 00000000..6eea847d --- /dev/null +++ b/api/migrations/202701160001_add_creator_encrypted_share_key.sql @@ -0,0 +1,10 @@ +-- Add creator_encrypted_share_key to share_encrypted_keys table +-- This stores the share key encrypted with the creator's KEK, +-- allowing the share creator to recover the full share URL later. + +ALTER TABLE share_encrypted_keys +ADD COLUMN IF NOT EXISTS creator_encrypted_share_key BYTEA, +ADD COLUMN IF NOT EXISTS creator_share_key_nonce BYTEA; + +COMMENT ON COLUMN share_encrypted_keys.creator_encrypted_share_key IS 'Share key encrypted with creator KEK (for URL recovery)'; +COMMENT ON COLUMN share_encrypted_keys.creator_share_key_nonce IS 'Nonce for creator_encrypted_share_key'; diff --git a/api/migrations/202701170001_create_public_document_files.sql b/api/migrations/202701170001_create_public_document_files.sql new file mode 100644 index 00000000..560a88c5 --- /dev/null +++ b/api/migrations/202701170001_create_public_document_files.sql @@ -0,0 +1,26 @@ +-- Public document files table for storing decrypted attachments +-- When an E2EE document is published, its attachments are decrypted and stored here +-- for public access without requiring encryption keys. + +CREATE TABLE IF NOT EXISTS public_document_files ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + document_id UUID NOT NULL REFERENCES documents(id) ON DELETE CASCADE, + workspace_id UUID NOT NULL REFERENCES workspaces(id) ON DELETE CASCADE, + file_id UUID NOT NULL, -- Reference to the original encrypted file + original_filename TEXT NOT NULL, + mime_type TEXT NOT NULL, + size BIGINT NOT NULL, + storage_path TEXT NOT NULL, -- Path in storage: public/{workspace_id}/{document_id}/{file_id} + content_hash TEXT NOT NULL, -- SHA-256 hash for integrity + created_at TIMESTAMPTZ NOT NULL DEFAULT now(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT now(), + UNIQUE (document_id, file_id) +); + +-- Index for efficient lookup by document +CREATE INDEX IF NOT EXISTS idx_public_document_files_document_id + ON public_document_files(document_id); + +-- Index for cleanup when document is unpublished +CREATE INDEX IF NOT EXISTS idx_public_document_files_workspace_document + ON public_document_files(workspace_id, document_id); diff --git a/api/migrations/202701170002_add_logical_filename_to_public_files.sql b/api/migrations/202701170002_add_logical_filename_to_public_files.sql new file mode 100644 index 00000000..065c0816 --- /dev/null +++ b/api/migrations/202701170002_add_logical_filename_to_public_files.sql @@ -0,0 +1,19 @@ +-- Add logical_filename column to public_document_files +-- This stores the filename as it appears in markdown references (e.g., "image.png" from "./attachments/image.png") +-- Used to lookup files when serving public documents + +ALTER TABLE public_document_files +ADD COLUMN IF NOT EXISTS logical_filename TEXT; + +-- Update existing rows to use original_filename as logical_filename +UPDATE public_document_files +SET logical_filename = original_filename +WHERE logical_filename IS NULL; + +-- Make it NOT NULL after populating +ALTER TABLE public_document_files +ALTER COLUMN logical_filename SET NOT NULL; + +-- Index for efficient lookup by document and logical filename +CREATE INDEX IF NOT EXISTS idx_public_document_files_logical_filename + ON public_document_files(document_id, logical_filename); diff --git a/api/migrations/202701170003_add_noindex_to_public_documents.sql b/api/migrations/202701170003_add_noindex_to_public_documents.sql new file mode 100644 index 00000000..47b6a9ce --- /dev/null +++ b/api/migrations/202701170003_add_noindex_to_public_documents.sql @@ -0,0 +1,7 @@ +-- Add noindex column to public_documents table +-- Default is true (noindex = prevent search engine indexing) + +ALTER TABLE public_documents +ADD COLUMN IF NOT EXISTS noindex BOOLEAN NOT NULL DEFAULT true; + +COMMENT ON COLUMN public_documents.noindex IS 'If true, adds noindex meta tag to prevent search engine indexing'; diff --git a/api/openapi.json b/api/openapi.json index a296188a..bd7065e1 100644 --- a/api/openapi.json +++ b/api/openapi.json @@ -1 +1 @@ -{"openapi":"3.0.3","info":{"title":"presentation","description":"","license":{"name":""},"version":"0.1.0"},"paths":{"/api/auth/login":{"post":{"tags":["Auth"],"operationId":"login","requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/LoginRequest"}}},"required":true},"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/LoginResponse"}}}}},"security":[{}]}},"/api/auth/logout":{"post":{"tags":["Auth"],"operationId":"logout","responses":{"204":{"description":""}}}},"/api/auth/me":{"get":{"tags":["Auth"],"operationId":"me","responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/UserResponse"}}}}}},"delete":{"tags":["Auth"],"operationId":"delete_account","responses":{"204":{"description":""}}}},"/api/auth/oauth/{provider}":{"post":{"tags":["Auth"],"operationId":"oauth_login","parameters":[{"name":"provider","in":"path","description":"OAuth provider identifier (e.g., google)","required":true,"schema":{"type":"string"}}],"requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/OAuthLoginRequest"}}},"required":true},"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/LoginResponse"}}}}},"security":[{}]}},"/api/auth/oauth/{provider}/state":{"post":{"tags":["Auth"],"operationId":"oauth_state","parameters":[{"name":"provider","in":"path","description":"OAuth provider identifier","required":true,"schema":{"type":"string"}}],"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/OAuthStateResponse"}}}}},"security":[{}]}},"/api/auth/providers":{"get":{"tags":["Auth"],"operationId":"list_oauth_providers","responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/AuthProvidersResponse"}}}}},"security":[{}]}},"/api/auth/refresh":{"post":{"tags":["Auth"],"operationId":"refresh_session","responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/RefreshResponse"}}}}}}},"/api/auth/register":{"post":{"tags":["Auth"],"operationId":"register","requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/RegisterRequest"}}},"required":true},"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/UserResponse"}}}}},"security":[{}]}},"/api/auth/sessions":{"get":{"tags":["Auth"],"operationId":"list_sessions","responses":{"200":{"description":"","content":{"application/json":{"schema":{"type":"array","items":{"$ref":"#/components/schemas/SessionResponse"}}}}}}}},"/api/auth/sessions/{id}":{"delete":{"tags":["Auth"],"operationId":"revoke_session","parameters":[{"name":"id","in":"path","description":"Session ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"204":{"description":""}}}},"/api/documents":{"get":{"tags":["Documents"],"operationId":"list_documents","parameters":[{"name":"query","in":"query","description":"Search query","required":false,"schema":{"type":"string","nullable":true}},{"name":"tag","in":"query","description":"Filter by tag","required":false,"schema":{"type":"string","nullable":true}},{"name":"state","in":"query","description":"Filter by document state (active|archived|all)","required":false,"schema":{"type":"string","nullable":true}}],"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/DocumentListResponse"}}}}}},"post":{"tags":["Documents"],"operationId":"create_document","requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/CreateDocumentRequest"}}},"required":true},"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/Document"}}}}}}},"/api/documents/search":{"get":{"tags":["Documents"],"operationId":"search_documents","parameters":[{"name":"q","in":"query","description":"Query","required":false,"schema":{"type":"string","nullable":true}}],"responses":{"200":{"description":"","content":{"application/json":{"schema":{"type":"array","items":{"$ref":"#/components/schemas/SearchResult"}}}}}}}},"/api/documents/{id}":{"get":{"tags":["Documents"],"operationId":"get_document","parameters":[{"name":"id","in":"path","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}},{"name":"token","in":"query","description":"Share token (optional)","required":false,"schema":{"type":"string","nullable":true}}],"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/Document"}}}}}},"delete":{"tags":["Documents"],"operationId":"delete_document","parameters":[{"name":"id","in":"path","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"204":{"description":""}}},"patch":{"tags":["Documents"],"operationId":"update_document","parameters":[{"name":"id","in":"path","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}}],"requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/UpdateDocumentRequest"}}},"required":true},"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/Document"}}}}}}},"/api/documents/{id}/archive":{"post":{"tags":["Documents"],"operationId":"archive_document","parameters":[{"name":"id","in":"path","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/Document"}}}},"404":{"description":"Document not found"},"409":{"description":"Document already archived"}}}},"/api/documents/{id}/backlinks":{"get":{"tags":["Documents"],"operationId":"getBacklinks","parameters":[{"name":"id","in":"path","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/BacklinksResponse"}}}}}}},"/api/documents/{id}/content":{"get":{"tags":["Documents"],"operationId":"get_document_content","parameters":[{"name":"id","in":"path","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"200":{"description":""}}},"put":{"tags":["Documents"],"operationId":"update_document_content","parameters":[{"name":"id","in":"path","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}},{"name":"token","in":"query","description":"Share token (optional)","required":false,"schema":{"type":"string","nullable":true}}],"requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/UpdateDocumentContentRequest"}}},"required":true},"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/Document"}}}}}},"patch":{"tags":["Documents"],"operationId":"patch_document_content","parameters":[{"name":"id","in":"path","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}},{"name":"token","in":"query","description":"Share token (optional)","required":false,"schema":{"type":"string","nullable":true}}],"requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/PatchDocumentContentRequest"}}},"required":true},"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/Document"}}}}}}},"/api/documents/{id}/download":{"get":{"tags":["Documents"],"operationId":"download_document","parameters":[{"name":"id","in":"path","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}},{"name":"token","in":"query","description":"Share token (optional)","required":false,"schema":{"type":"string","nullable":true}},{"name":"format","in":"query","description":"Download format (see schema for supported values)","required":false,"schema":{"allOf":[{"$ref":"#/components/schemas/DownloadFormat"}],"nullable":true}}],"responses":{"200":{"description":"Document download","content":{"application/octet-stream":{"schema":{"$ref":"#/components/schemas/DocumentDownloadBinary"}}}},"401":{"description":"Unauthorized"},"404":{"description":"Document not found"}}}},"/api/documents/{id}/duplicate":{"post":{"tags":["Documents"],"operationId":"duplicate_document","parameters":[{"name":"id","in":"path","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}}],"requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/DuplicateDocumentRequest"}}},"required":true},"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/Document"}}}}}}},"/api/documents/{id}/links":{"get":{"tags":["Documents"],"operationId":"getOutgoingLinks","parameters":[{"name":"id","in":"path","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/OutgoingLinksResponse"}}}}}}},"/api/documents/{id}/snapshots":{"get":{"tags":["Documents"],"operationId":"list_document_snapshots","parameters":[{"name":"id","in":"path","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}},{"name":"token","in":"query","description":"Share token (optional)","required":false,"schema":{"type":"string","nullable":true}},{"name":"limit","in":"query","description":"Maximum number of snapshots to return","required":false,"schema":{"type":"integer","format":"int64","nullable":true}},{"name":"offset","in":"query","description":"Offset for pagination","required":false,"schema":{"type":"integer","format":"int64","nullable":true}}],"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/SnapshotListResponse"}}}}}}},"/api/documents/{id}/snapshots/{snapshot_id}/diff":{"get":{"tags":["Documents"],"operationId":"get_document_snapshot_diff","parameters":[{"name":"id","in":"path","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}},{"name":"snapshot_id","in":"path","description":"Snapshot ID","required":true,"schema":{"type":"string","format":"uuid"}},{"name":"token","in":"query","description":"Share token (optional)","required":false,"schema":{"type":"string","nullable":true}},{"name":"compare","in":"query","description":"Snapshot ID to compare against (defaults to current document state)","required":false,"schema":{"type":"string","format":"uuid","nullable":true}},{"name":"base","in":"query","description":"Base comparison to use when compare is not provided (auto|current|previous)","required":false,"schema":{"allOf":[{"$ref":"#/components/schemas/SnapshotDiffBaseParam"}],"nullable":true}}],"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/SnapshotDiffResponse"}}}}}}},"/api/documents/{id}/snapshots/{snapshot_id}/download":{"get":{"tags":["Documents"],"operationId":"download_document_snapshot","parameters":[{"name":"id","in":"path","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}},{"name":"snapshot_id","in":"path","description":"Snapshot ID","required":true,"schema":{"type":"string","format":"uuid"}},{"name":"token","in":"query","description":"Share token (optional)","required":false,"schema":{"type":"string","nullable":true}}],"responses":{"200":{"description":"Snapshot archive","content":{"application/zip":{"schema":{"$ref":"#/components/schemas/DocumentArchiveBinary"}}}},"401":{"description":"Unauthorized"},"404":{"description":"Snapshot not found"}}}},"/api/documents/{id}/snapshots/{snapshot_id}/restore":{"post":{"tags":["Documents"],"operationId":"restore_document_snapshot","parameters":[{"name":"id","in":"path","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}},{"name":"snapshot_id","in":"path","description":"Snapshot ID","required":true,"schema":{"type":"string","format":"uuid"}},{"name":"token","in":"query","description":"Share token (optional)","required":false,"schema":{"type":"string","nullable":true}}],"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/SnapshotRestoreResponse"}}}}}}},"/api/documents/{id}/unarchive":{"post":{"tags":["Documents"],"operationId":"unarchive_document","parameters":[{"name":"id","in":"path","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/Document"}}}},"404":{"description":"Document not found"},"409":{"description":"Document is not archived"}}}},"/api/files":{"post":{"tags":["Files"],"operationId":"upload_file","requestBody":{"content":{"multipart/form-data":{"schema":{"$ref":"#/components/schemas/UploadFileMultipart"}}},"required":true},"responses":{"201":{"description":"File uploaded","content":{"application/json":{"schema":{"$ref":"#/components/schemas/UploadFileResponse"}}}}}}},"/api/files/documents/{filename}":{"get":{"tags":["Files"],"operationId":"get_file_by_name","parameters":[{"name":"filename","in":"path","description":"File name","required":true,"schema":{"type":"string"}},{"name":"document_id","in":"query","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"200":{"description":"OK","content":{"application/octet-stream":{"schema":{"type":"string","format":"binary"}}}}}}},"/api/files/{id}":{"get":{"tags":["Files"],"operationId":"get_file","parameters":[{"name":"id","in":"path","description":"File ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"200":{"description":"OK","content":{"application/octet-stream":{"schema":{"type":"string","format":"binary"}}}}}}},"/api/git/changes":{"get":{"tags":["Git"],"operationId":"get_changes","responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/GitChangesResponse"}}}}}}},"/api/git/config":{"get":{"tags":["Git"],"operationId":"get_config","responses":{"200":{"description":"","content":{"application/json":{"schema":{"allOf":[{"$ref":"#/components/schemas/GitConfigResponse"}],"nullable":true}}}}}},"post":{"tags":["Git"],"operationId":"create_or_update_config","requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/CreateGitConfigRequest"}}},"required":true},"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/GitConfigResponse"}}}}}},"delete":{"tags":["Git"],"operationId":"delete_config","responses":{"204":{"description":"Deleted"}}}},"/api/git/deinit":{"post":{"tags":["Git"],"operationId":"deinit_repository","responses":{"200":{"description":"OK"}}}},"/api/git/diff/commits/{from}/{to}":{"get":{"tags":["Git"],"operationId":"get_commit_diff","parameters":[{"name":"from","in":"path","description":"From","required":true,"schema":{"type":"string"}},{"name":"to","in":"path","description":"To","required":true,"schema":{"type":"string"}}],"responses":{"200":{"description":"","content":{"application/json":{"schema":{"type":"array","items":{"$ref":"#/components/schemas/TextDiffResult"}}}}}}}},"/api/git/diff/working":{"get":{"tags":["Git"],"operationId":"get_working_diff","responses":{"200":{"description":"","content":{"application/json":{"schema":{"type":"array","items":{"$ref":"#/components/schemas/TextDiffResult"}}}}}}}},"/api/git/gitignore/check":{"post":{"tags":["Git"],"operationId":"check_path_ignored","requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/CheckIgnoredRequest"}}},"required":true},"responses":{"200":{"description":"OK"}}}},"/api/git/gitignore/patterns":{"get":{"tags":["Git"],"operationId":"get_gitignore_patterns","responses":{"200":{"description":"OK"}}},"post":{"tags":["Git"],"operationId":"add_gitignore_patterns","requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/AddPatternsRequest"}}},"required":true},"responses":{"200":{"description":"OK"}}}},"/api/git/history":{"get":{"tags":["Git"],"operationId":"get_history","responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/GitHistoryResponse"}}}}}}},"/api/git/ignore/doc/{id}":{"post":{"tags":["Git"],"operationId":"ignore_document","parameters":[{"name":"id","in":"path","description":"Document ID","required":true,"schema":{"type":"string"}}],"responses":{"200":{"description":"OK"}}}},"/api/git/ignore/folder/{id}":{"post":{"tags":["Git"],"operationId":"ignore_folder","parameters":[{"name":"id","in":"path","description":"Folder ID","required":true,"schema":{"type":"string"}}],"responses":{"200":{"description":"OK"}}}},"/api/git/import":{"post":{"tags":["Git"],"operationId":"import_repository","requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/CreateGitConfigRequest"}}},"required":true},"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/GitImportResponse"}}}}}}},"/api/git/init":{"post":{"tags":["Git"],"operationId":"init_repository","responses":{"200":{"description":"OK"}}}},"/api/git/pull":{"post":{"tags":["Git"],"operationId":"pull_repository","requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/GitPullRequest"}}},"required":true},"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/GitPullResponse"}}}},"409":{"description":"Conflicts detected","content":{"application/json":{"schema":{"$ref":"#/components/schemas/GitPullResponse"}}}}}}},"/api/git/pull/session/{id}":{"get":{"tags":["Git"],"operationId":"get_pull_session","parameters":[{"name":"id","in":"path","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/GitPullSessionResponse"}}}}}}},"/api/git/pull/session/{id}/finalize":{"post":{"tags":["Git"],"operationId":"finalize_pull_session","parameters":[{"name":"id","in":"path","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/GitPullResponse"}}}},"400":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/GitPullResponse"}}}},"409":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/GitPullResponse"}}}}}}},"/api/git/pull/session/{id}/resolve":{"post":{"tags":["Git"],"operationId":"resolve_pull_session","parameters":[{"name":"id","in":"path","required":true,"schema":{"type":"string","format":"uuid"}}],"requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/GitPullRequest"}}},"required":true},"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/GitPullSessionResponse"}}}},"400":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/GitPullSessionResponse"}}}},"409":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/GitPullSessionResponse"}}}}}}},"/api/git/pull/start":{"post":{"tags":["Git"],"operationId":"start_pull_session","responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/GitPullSessionResponse"}}}},"400":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/GitPullSessionResponse"}}}},"409":{"description":"Conflicts detected","content":{"application/json":{"schema":{"$ref":"#/components/schemas/GitPullSessionResponse"}}}}}}},"/api/git/status":{"get":{"tags":["Git"],"operationId":"get_status","responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/GitStatus"}}}}}}},"/api/git/sync":{"post":{"tags":["Git"],"operationId":"sync_now","requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/GitSyncRequest"}}},"required":true},"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/GitSyncResponse"}}}},"409":{"description":"Conflicts during rebase/pull"}}}},"/api/health":{"get":{"tags":["Health"],"operationId":"health","responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/HealthResp"}}}}}}},"/api/markdown/render":{"post":{"tags":["Markdown"],"operationId":"render_markdown","requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/RenderRequest"}}},"required":true},"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/RenderResponseBody"}}}}}}},"/api/markdown/render-many":{"post":{"tags":["Markdown"],"operationId":"render_markdown_many","requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/RenderManyRequest"}}},"required":true},"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/RenderManyResponse"}}}}}}},"/api/me/api-tokens":{"get":{"tags":["Auth"],"operationId":"list_api_tokens","responses":{"200":{"description":"","content":{"application/json":{"schema":{"type":"array","items":{"$ref":"#/components/schemas/ApiTokenItem"}}}}}}},"post":{"tags":["Auth"],"operationId":"create_api_token","requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/ApiTokenCreateRequest"}}},"required":true},"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/ApiTokenCreateResponse"}}}}}}},"/api/me/api-tokens/{id}":{"delete":{"tags":["Auth"],"operationId":"revoke_api_token","parameters":[{"name":"id","in":"path","description":"Token ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"204":{"description":""}}}},"/api/me/plugins/install-from-url":{"post":{"tags":["Plugins"],"operationId":"pluginsInstallFromUrl","requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/InstallFromUrlBody"}}},"required":true},"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/InstallResponse"}}}}}}},"/api/me/plugins/manifest":{"get":{"tags":["Plugins"],"operationId":"pluginsGetManifest","responses":{"200":{"description":"","content":{"application/json":{"schema":{"type":"array","items":{"$ref":"#/components/schemas/ManifestItem"}}}}}}}},"/api/me/plugins/uninstall":{"post":{"tags":["Plugins"],"operationId":"pluginsUninstall","requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/UninstallBody"}}},"required":true},"responses":{"204":{"description":""}}}},"/api/me/plugins/updates":{"get":{"tags":["Plugins"],"operationId":"sse_updates","responses":{"200":{"description":"Plugin event stream"}}}},"/api/me/shortcuts":{"get":{"tags":["Auth"],"operationId":"get_user_shortcuts","responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/UserShortcutResponse"}}}}}},"put":{"tags":["Auth"],"operationId":"update_user_shortcuts","requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/UpdateUserShortcutRequest"}}},"required":true},"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/UserShortcutResponse"}}}}}}},"/api/plugin-assets":{"get":{"tags":["Plugins"],"operationId":"pluginsGetAsset","parameters":[{"name":"token","in":"query","description":"Share token (optional)","required":false,"schema":{"type":"string","nullable":true}}],"responses":{"200":{"description":"Plugin asset"}}}},"/api/plugins/{plugin}/docs/{doc_id}/kv/{key}":{"get":{"tags":["Plugins"],"operationId":"pluginsGetKv","parameters":[{"name":"plugin","in":"path","description":"Plugin ID","required":true,"schema":{"type":"string"}},{"name":"doc_id","in":"path","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}},{"name":"key","in":"path","description":"Key","required":true,"schema":{"type":"string"}}],"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/KvValueResponse"}}}}}},"put":{"tags":["Plugins"],"operationId":"pluginsPutKv","parameters":[{"name":"plugin","in":"path","description":"Plugin ID","required":true,"schema":{"type":"string"}},{"name":"doc_id","in":"path","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}},{"name":"key","in":"path","description":"Key","required":true,"schema":{"type":"string"}}],"requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/KvValueBody"}}},"required":true},"responses":{"204":{"description":""}}}},"/api/plugins/{plugin}/docs/{doc_id}/records/{kind}":{"get":{"tags":["Plugins"],"operationId":"list_records","parameters":[{"name":"plugin","in":"path","description":"Plugin ID","required":true,"schema":{"type":"string"}},{"name":"doc_id","in":"path","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}},{"name":"kind","in":"path","description":"Record kind","required":true,"schema":{"type":"string"}},{"name":"limit","in":"query","description":"Limit","required":false,"schema":{"type":"integer","format":"int64","nullable":true}},{"name":"offset","in":"query","description":"Offset","required":false,"schema":{"type":"integer","format":"int64","nullable":true}}],"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/RecordsResponse"}}}}}},"post":{"tags":["Plugins"],"operationId":"pluginsCreateRecord","parameters":[{"name":"plugin","in":"path","description":"Plugin ID","required":true,"schema":{"type":"string"}},{"name":"doc_id","in":"path","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}},{"name":"kind","in":"path","description":"Record kind","required":true,"schema":{"type":"string"}}],"requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/CreateRecordBody"}}},"required":true},"responses":{"200":{"description":"","content":{"application/json":{"schema":{}}}}}}},"/api/plugins/{plugin}/exec/{action}":{"post":{"tags":["Plugins"],"operationId":"pluginsExecAction","parameters":[{"name":"plugin","in":"path","description":"Plugin ID","required":true,"schema":{"type":"string"}},{"name":"action","in":"path","description":"Action","required":true,"schema":{"type":"string"}}],"requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/ExecBody"}}},"required":true},"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/ExecResultResponse"}}}}}}},"/api/plugins/{plugin}/records/{id}":{"delete":{"tags":["Plugins"],"operationId":"pluginsDeleteRecord","parameters":[{"name":"plugin","in":"path","description":"Plugin ID","required":true,"schema":{"type":"string"}},{"name":"id","in":"path","description":"Record ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"204":{"description":""}}},"patch":{"tags":["Plugins"],"operationId":"pluginsUpdateRecord","parameters":[{"name":"plugin","in":"path","description":"Plugin ID","required":true,"schema":{"type":"string"}},{"name":"id","in":"path","description":"Record ID","required":true,"schema":{"type":"string","format":"uuid"}}],"requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/UpdateRecordBody"}}},"required":true},"responses":{"200":{"description":"","content":{"application/json":{"schema":{}}}}}}},"/api/public/documents/{id}":{"get":{"tags":["Public Documents"],"operationId":"get_publish_status","parameters":[{"name":"id","in":"path","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"200":{"description":"Published status","content":{"application/json":{"schema":{"$ref":"#/components/schemas/PublishResponse"}}}}}},"post":{"tags":["Public Documents"],"operationId":"publish_document","parameters":[{"name":"id","in":"path","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"200":{"description":"Published","content":{"application/json":{"schema":{"$ref":"#/components/schemas/PublishResponse"}}}}}},"delete":{"tags":["Public Documents"],"operationId":"unpublish_document","parameters":[{"name":"id","in":"path","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"204":{"description":"Unpublished"}}}},"/api/public/workspaces/{slug}":{"get":{"tags":["Public Documents"],"operationId":"list_workspace_public_documents","parameters":[{"name":"slug","in":"path","description":"Workspace slug","required":true,"schema":{"type":"string"}}],"responses":{"200":{"description":"Public documents for workspace","content":{"application/json":{"schema":{"type":"array","items":{"$ref":"#/components/schemas/PublicDocumentSummary"}}}}}}}},"/api/public/workspaces/{slug}/{id}":{"get":{"tags":["Public Documents"],"operationId":"get_public_by_workspace_and_id","parameters":[{"name":"slug","in":"path","description":"Workspace slug","required":true,"schema":{"type":"string"}},{"name":"id","in":"path","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"200":{"description":"Document metadata","content":{"application/json":{"schema":{"$ref":"#/components/schemas/Document"}}}}}}},"/api/public/workspaces/{slug}/{id}/content":{"get":{"tags":["Public Documents"],"operationId":"get_public_content_by_workspace_and_id","parameters":[{"name":"slug","in":"path","description":"Workspace slug","required":true,"schema":{"type":"string"}},{"name":"id","in":"path","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"200":{"description":"Document content"}}}},"/api/shares":{"post":{"tags":["Sharing"],"operationId":"create_share","requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/CreateShareRequest"}}},"required":true},"responses":{"200":{"description":"Share link created","content":{"application/json":{"schema":{"$ref":"#/components/schemas/CreateShareResponse"}}}}}}},"/api/shares/active":{"get":{"tags":["Sharing"],"operationId":"list_active_shares","responses":{"200":{"description":"Active shares","content":{"application/json":{"schema":{"type":"array","items":{"$ref":"#/components/schemas/ActiveShareItem"}}}}}}}},"/api/shares/applicable":{"get":{"tags":["Sharing"],"operationId":"list_applicable_shares","parameters":[{"name":"doc_id","in":"query","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"200":{"description":"Shares that include the document","content":{"application/json":{"schema":{"type":"array","items":{"$ref":"#/components/schemas/ApplicableShareItem"}}}}}}}},"/api/shares/browse":{"get":{"tags":["Sharing"],"operationId":"browse_share","parameters":[{"name":"token","in":"query","description":"Share token","required":true,"schema":{"type":"string"}}],"responses":{"200":{"description":"Share tree","content":{"application/json":{"schema":{"$ref":"#/components/schemas/ShareBrowseResponse"}}}}}}},"/api/shares/documents/{id}":{"get":{"tags":["Sharing"],"operationId":"list_document_shares","parameters":[{"name":"id","in":"path","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"200":{"description":"OK","content":{"application/json":{"schema":{"type":"array","items":{"$ref":"#/components/schemas/ShareItem"}}}}}}}},"/api/shares/folders/{token}/materialize":{"post":{"tags":["Sharing"],"operationId":"materialize_folder_share","parameters":[{"name":"token","in":"path","description":"Folder share token","required":true,"schema":{"type":"string"}}],"responses":{"200":{"description":"Created doc shares","content":{"application/json":{"schema":{"$ref":"#/components/schemas/MaterializeResponse"}}}}}}},"/api/shares/mounts":{"get":{"tags":["Sharing"],"operationId":"list_share_mounts","responses":{"200":{"description":"Share mounts","content":{"application/json":{"schema":{"type":"array","items":{"$ref":"#/components/schemas/ShareMountItem"}}}}}}},"post":{"tags":["Sharing"],"operationId":"create_share_mount","requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/CreateShareMountRequest"}}},"required":true},"responses":{"200":{"description":"Saved share mount","content":{"application/json":{"schema":{"$ref":"#/components/schemas/ShareMountItem"}}}}}}},"/api/shares/mounts/{id}":{"delete":{"tags":["Sharing"],"operationId":"delete_share_mount","parameters":[{"name":"id","in":"path","description":"Share mount ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"204":{"description":"Share mount removed"}}}},"/api/shares/validate":{"get":{"tags":["Sharing"],"operationId":"validate_share_token","parameters":[{"name":"token","in":"query","description":"Share token","required":true,"schema":{"type":"string"}}],"responses":{"200":{"description":"Document info","content":{"application/json":{"schema":{"$ref":"#/components/schemas/ShareDocumentResponse"}}}}}}},"/api/shares/{token}":{"delete":{"tags":["Sharing"],"operationId":"delete_share","parameters":[{"name":"token","in":"path","description":"Share token","required":true,"schema":{"type":"string"}}],"responses":{"204":{"description":"Share link deleted"}}}},"/api/storage/ingest":{"post":{"tags":["Storage"],"operationId":"enqueue_ingest_events","requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/IngestBatchRequest"}}},"required":true},"responses":{"202":{"description":"Events enqueued"},"400":{"description":"Invalid request"}}}},"/api/tags":{"get":{"tags":["Tags"],"operationId":"list_tags","parameters":[{"name":"q","in":"query","description":"Filter contains","required":false,"schema":{"type":"string","nullable":true}}],"responses":{"200":{"description":"","content":{"application/json":{"schema":{"type":"array","items":{"$ref":"#/components/schemas/TagItem"}}}}}}}},"/api/workspace-invitations/{token}/accept":{"post":{"tags":["Workspaces"],"operationId":"accept_invitation","parameters":[{"name":"token","in":"path","description":"Invitation token","required":true,"schema":{"type":"string"}}],"responses":{"204":{"description":""}}}},"/api/workspaces":{"get":{"tags":["Workspaces"],"operationId":"list_workspaces","responses":{"200":{"description":"","content":{"application/json":{"schema":{"type":"array","items":{"$ref":"#/components/schemas/WorkspaceResponse"}}}}}}},"post":{"tags":["Workspaces"],"operationId":"create_workspace","requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/CreateWorkspaceRequest"}}},"required":true},"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/WorkspaceResponse"}}}}}}},"/api/workspaces/{id}":{"get":{"tags":["Workspaces"],"operationId":"get_workspace_detail","parameters":[{"name":"id","in":"path","description":"Workspace ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/WorkspaceResponse"}}}}}},"put":{"tags":["Workspaces"],"operationId":"update_workspace","parameters":[{"name":"id","in":"path","description":"Workspace ID","required":true,"schema":{"type":"string","format":"uuid"}}],"requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/UpdateWorkspaceRequest"}}},"required":true},"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/WorkspaceResponse"}}}}}},"delete":{"tags":["Workspaces"],"operationId":"delete_workspace","parameters":[{"name":"id","in":"path","description":"Workspace ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"204":{"description":""}}}},"/api/workspaces/{id}/download":{"get":{"tags":["Workspaces"],"operationId":"download_workspace_archive","parameters":[{"name":"id","in":"path","description":"Workspace ID","required":true,"schema":{"type":"string","format":"uuid"}},{"name":"format","in":"query","description":"Download format (archive only)","required":false,"schema":{"allOf":[{"$ref":"#/components/schemas/DownloadFormat"}],"nullable":true}}],"responses":{"200":{"description":"Workspace download","content":{"application/octet-stream":{"schema":{"$ref":"#/components/schemas/DocumentDownloadBinary"}}}},"401":{"description":"Unauthorized"},"404":{"description":"Workspace not found"}}}},"/api/workspaces/{id}/invitations":{"get":{"tags":["Workspaces"],"operationId":"list_invitations","parameters":[{"name":"id","in":"path","description":"Workspace ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"200":{"description":"","content":{"application/json":{"schema":{"type":"array","items":{"$ref":"#/components/schemas/WorkspaceInvitationResponse"}}}}}}},"post":{"tags":["Workspaces"],"operationId":"create_invitation","parameters":[{"name":"id","in":"path","description":"Workspace ID","required":true,"schema":{"type":"string","format":"uuid"}}],"requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/CreateWorkspaceInvitationRequest"}}},"required":true},"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/WorkspaceInvitationResponse"}}}}}}},"/api/workspaces/{id}/invitations/{invitation_id}":{"delete":{"tags":["Workspaces"],"operationId":"revoke_invitation","parameters":[{"name":"id","in":"path","description":"Workspace ID","required":true,"schema":{"type":"string","format":"uuid"}},{"name":"invitation_id","in":"path","description":"Invitation ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/WorkspaceInvitationResponse"}}}}}}},"/api/workspaces/{id}/leave":{"post":{"tags":["Workspaces"],"operationId":"leave_workspace","parameters":[{"name":"id","in":"path","description":"Workspace ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"204":{"description":""}}}},"/api/workspaces/{id}/members":{"get":{"tags":["Workspaces"],"operationId":"list_members","parameters":[{"name":"id","in":"path","description":"Workspace ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"200":{"description":"","content":{"application/json":{"schema":{"type":"array","items":{"$ref":"#/components/schemas/WorkspaceMemberResponse"}}}}}}}},"/api/workspaces/{id}/members/{user_id}":{"delete":{"tags":["Workspaces"],"operationId":"remove_member","parameters":[{"name":"id","in":"path","description":"Workspace ID","required":true,"schema":{"type":"string","format":"uuid"}},{"name":"user_id","in":"path","description":"Target user ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"204":{"description":""}}},"patch":{"tags":["Workspaces"],"operationId":"update_member_role","parameters":[{"name":"id","in":"path","description":"Workspace ID","required":true,"schema":{"type":"string","format":"uuid"}},{"name":"user_id","in":"path","description":"Target user ID","required":true,"schema":{"type":"string","format":"uuid"}}],"requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/UpdateMemberRoleRequest"}}},"required":true},"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/WorkspaceMemberResponse"}}}}}}},"/api/workspaces/{id}/permissions":{"get":{"tags":["Workspaces"],"operationId":"get_workspace_permissions","parameters":[{"name":"id","in":"path","description":"Workspace ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/WorkspacePermissionsResponse"}}}}}}},"/api/workspaces/{id}/roles":{"get":{"tags":["Workspaces"],"operationId":"list_roles","parameters":[{"name":"id","in":"path","description":"Workspace ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"200":{"description":"","content":{"application/json":{"schema":{"type":"array","items":{"$ref":"#/components/schemas/WorkspaceRoleResponse"}}}}}}},"post":{"tags":["Workspaces"],"operationId":"create_role","parameters":[{"name":"id","in":"path","description":"Workspace ID","required":true,"schema":{"type":"string","format":"uuid"}}],"requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/CreateWorkspaceRoleRequest"}}},"required":true},"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/WorkspaceRoleResponse"}}}}}}},"/api/workspaces/{id}/roles/{role_id}":{"delete":{"tags":["Workspaces"],"operationId":"delete_role","parameters":[{"name":"id","in":"path","description":"Workspace ID","required":true,"schema":{"type":"string","format":"uuid"}},{"name":"role_id","in":"path","description":"Role ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"204":{"description":""}}},"patch":{"tags":["Workspaces"],"operationId":"update_role","parameters":[{"name":"id","in":"path","description":"Workspace ID","required":true,"schema":{"type":"string","format":"uuid"}},{"name":"role_id","in":"path","description":"Role ID","required":true,"schema":{"type":"string","format":"uuid"}}],"requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/UpdateWorkspaceRoleRequest"}}},"required":true},"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/WorkspaceRoleResponse"}}}}}}},"/api/workspaces/{id}/switch":{"post":{"tags":["Workspaces"],"operationId":"switch_workspace","parameters":[{"name":"id","in":"path","description":"Workspace ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/SwitchWorkspaceResponse"}}}}}}},"/api/yjs/{id}":{"get":{"tags":["Realtime"],"operationId":"axum_ws_entry","parameters":[{"name":"id","in":"path","description":"Document ID (UUID)","required":true,"schema":{"type":"string"}},{"name":"token","in":"query","description":"JWT or share token","required":false,"schema":{"type":"string","nullable":true}},{"name":"Authorization","in":"header","description":"Bearer token (JWT or share token)","required":false,"schema":{"type":"string","nullable":true}}],"responses":{"101":{"description":"Switching Protocols (WebSocket upgrade)"},"401":{"description":"Unauthorized"}}}}},"components":{"schemas":{"ActiveShareItem":{"type":"object","required":["id","token","permission","created_at","document_id","document_title","document_type","url"],"properties":{"created_at":{"type":"string","format":"date-time"},"document_id":{"type":"string","format":"uuid"},"document_title":{"type":"string"},"document_type":{"type":"string"},"expires_at":{"type":"string","format":"date-time","nullable":true},"id":{"type":"string","format":"uuid"},"parent_share_id":{"type":"string","format":"uuid","nullable":true},"permission":{"type":"string"},"token":{"type":"string"},"url":{"type":"string"}}},"AddPatternsRequest":{"type":"object","required":["patterns"],"properties":{"patterns":{"type":"array","items":{"type":"string"}}}},"ApiTokenCreateRequest":{"type":"object","properties":{"name":{"type":"string","example":"Deploy token","nullable":true}}},"ApiTokenCreateResponse":{"type":"object","required":["id","name","created_at","token"],"properties":{"created_at":{"type":"string","format":"date-time"},"id":{"type":"string","format":"uuid"},"name":{"type":"string"},"token":{"type":"string"}}},"ApiTokenItem":{"type":"object","required":["id","name","created_at"],"properties":{"created_at":{"type":"string","format":"date-time"},"id":{"type":"string","format":"uuid"},"last_used_at":{"type":"string","format":"date-time","nullable":true},"name":{"type":"string"},"revoked_at":{"type":"string","format":"date-time","nullable":true}}},"ApplicableShareItem":{"type":"object","required":["token","permission","scope","excluded"],"properties":{"excluded":{"type":"boolean"},"permission":{"type":"string"},"scope":{"type":"string"},"token":{"type":"string"}}},"AuthProviderInfoResponse":{"type":"object","required":["id","requires_state","client_ids"],"properties":{"authorization_url":{"type":"string","nullable":true},"client_ids":{"type":"array","items":{"type":"string"}},"id":{"type":"string"},"name":{"type":"string","nullable":true},"redirect_uri":{"type":"string","nullable":true},"requires_state":{"type":"boolean"},"scopes":{"type":"array","items":{"type":"string"}}}},"AuthProvidersResponse":{"type":"object","required":["providers"],"properties":{"providers":{"type":"array","items":{"$ref":"#/components/schemas/AuthProviderInfoResponse"}}}},"BacklinkInfo":{"type":"object","required":["document_id","title","document_type","link_type","link_count"],"properties":{"document_id":{"type":"string"},"document_type":{"type":"string"},"file_path":{"type":"string","nullable":true},"link_count":{"type":"integer","format":"int64"},"link_text":{"type":"string","nullable":true},"link_type":{"type":"string"},"title":{"type":"string"}}},"BacklinksResponse":{"type":"object","required":["backlinks","total_count"],"properties":{"backlinks":{"type":"array","items":{"$ref":"#/components/schemas/BacklinkInfo"}},"total_count":{"type":"integer","minimum":0}}},"CheckIgnoredRequest":{"type":"object","required":["path"],"properties":{"path":{"type":"string"}}},"CreateDocumentRequest":{"type":"object","properties":{"parent_id":{"type":"string","format":"uuid","nullable":true},"title":{"type":"string","nullable":true},"type":{"type":"string","nullable":true}}},"CreateGitConfigRequest":{"type":"object","required":["repository_url","auth_type","auth_data"],"properties":{"auth_data":{},"auth_type":{"type":"string"},"auto_sync":{"type":"boolean","nullable":true},"branch_name":{"type":"string","nullable":true},"repository_url":{"type":"string"}}},"CreateRecordBody":{"type":"object","required":["data"],"properties":{"data":{}}},"CreateShareMountRequest":{"type":"object","required":["token"],"properties":{"parent_folder_id":{"type":"string","format":"uuid","nullable":true},"token":{"type":"string"}}},"CreateShareRequest":{"type":"object","required":["document_id"],"properties":{"document_id":{"type":"string","format":"uuid"},"expires_at":{"type":"string","format":"date-time","nullable":true},"permission":{"type":"string","nullable":true}}},"CreateShareResponse":{"type":"object","required":["token","url"],"properties":{"token":{"type":"string"},"url":{"type":"string"}}},"CreateWorkspaceInvitationRequest":{"type":"object","required":["email","role_kind"],"properties":{"custom_role_id":{"type":"string","format":"uuid","nullable":true},"email":{"type":"string"},"expires_at":{"type":"string","format":"date-time","nullable":true},"role_kind":{"type":"string"},"system_role":{"type":"string","nullable":true}}},"CreateWorkspaceRequest":{"type":"object","required":["name"],"properties":{"description":{"type":"string","nullable":true},"icon":{"type":"string","nullable":true},"name":{"type":"string"}}},"CreateWorkspaceRoleRequest":{"type":"object","required":["name","base_role"],"properties":{"base_role":{"type":"string"},"description":{"type":"string","nullable":true},"name":{"type":"string"},"overrides":{"type":"array","items":{"$ref":"#/components/schemas/PermissionOverridePayload"},"nullable":true},"priority":{"type":"integer","format":"int32","nullable":true}}},"Document":{"type":"object","required":["id","owner_id","workspace_id","title","type","created_at","updated_at","slug","desired_path"],"properties":{"archived_at":{"type":"string","format":"date-time","nullable":true},"archived_by":{"type":"string","format":"uuid","nullable":true},"archived_parent_id":{"type":"string","format":"uuid","nullable":true},"created_at":{"type":"string","format":"date-time"},"created_by":{"type":"string","format":"uuid","nullable":true},"created_by_plugin":{"type":"string","nullable":true},"desired_path":{"type":"string"},"id":{"type":"string","format":"uuid"},"owner_id":{"type":"string","format":"uuid","description":"Legacy alias for `workspace_id` kept for backward compatibility with older clients."},"parent_id":{"type":"string","format":"uuid","nullable":true},"path":{"type":"string","nullable":true},"slug":{"type":"string"},"title":{"type":"string"},"type":{"type":"string"},"updated_at":{"type":"string","format":"date-time"},"workspace_id":{"type":"string","format":"uuid"}}},"DocumentArchiveBinary":{"type":"string","format":"binary"},"DocumentDownloadBinary":{"type":"string","format":"binary"},"DocumentListResponse":{"type":"object","required":["items"],"properties":{"items":{"type":"array","items":{"$ref":"#/components/schemas/Document"}}}},"DocumentPatchOperationRequest":{"oneOf":[{"type":"object","required":["offset","text","op"],"properties":{"offset":{"type":"integer","minimum":0},"op":{"type":"string","enum":["insert"]},"text":{"type":"string"}}},{"type":"object","required":["offset","length","op"],"properties":{"length":{"type":"integer","minimum":0},"offset":{"type":"integer","minimum":0},"op":{"type":"string","enum":["delete"]}}},{"type":"object","required":["offset","length","text","op"],"properties":{"length":{"type":"integer","minimum":0},"offset":{"type":"integer","minimum":0},"op":{"type":"string","enum":["replace"]},"text":{"type":"string"}}}],"discriminator":{"propertyName":"op"}},"DownloadDocumentQuery":{"type":"object","properties":{"format":{"$ref":"#/components/schemas/DownloadFormat"},"token":{"type":"string","nullable":true}}},"DownloadFormat":{"type":"string","enum":["archive","markdown","html","html5","pdf","docx","latex","beamer","context","man","mediawiki","dokuwiki","textile","org","texinfo","opml","docbook","opendocument","odt","rtf","epub","epub3","fb2","asciidoc","icml","slidy","slideous","dzslides","revealjs","s5","json","plain","commonmark","commonmark_x","markdown_strict","markdown_phpextra","markdown_github","rst","native","haddock"]},"DownloadWorkspaceQuery":{"type":"object","properties":{"format":{"$ref":"#/components/schemas/DownloadFormat"}}},"DuplicateDocumentRequest":{"type":"object","properties":{"parent_id":{"type":"string","nullable":true},"title":{"type":"string","nullable":true}}},"ExecBody":{"type":"object","properties":{"payload":{"nullable":true}}},"ExecResultResponse":{"type":"object","required":["ok","effects"],"properties":{"data":{"nullable":true},"effects":{"type":"array","items":{}},"error":{"nullable":true},"ok":{"type":"boolean"}}},"GitChangeItem":{"type":"object","required":["path","status"],"properties":{"path":{"type":"string"},"status":{"type":"string"}}},"GitChangesResponse":{"type":"object","required":["files"],"properties":{"files":{"type":"array","items":{"$ref":"#/components/schemas/GitChangeItem"}}}},"GitCommitItem":{"type":"object","required":["hash","message","author_name","author_email","time"],"properties":{"author_email":{"type":"string"},"author_name":{"type":"string"},"hash":{"type":"string"},"message":{"type":"string"},"time":{"type":"string","format":"date-time"}}},"GitConfigResponse":{"type":"object","required":["id","repository_url","branch_name","auth_type","auto_sync","created_at","updated_at"],"properties":{"auth_type":{"type":"string"},"auto_sync":{"type":"boolean"},"branch_name":{"type":"string"},"created_at":{"type":"string","format":"date-time"},"id":{"type":"string","format":"uuid"},"remote_check":{"allOf":[{"$ref":"#/components/schemas/GitRemoteCheckResponse"}],"nullable":true},"repository_url":{"type":"string"},"updated_at":{"type":"string","format":"date-time"}}},"GitHistoryResponse":{"type":"object","required":["commits"],"properties":{"commits":{"type":"array","items":{"$ref":"#/components/schemas/GitCommitItem"}}}},"GitImportResponse":{"type":"object","required":["success","message","files_changed","docs_created","attachments_created"],"properties":{"attachments_created":{"type":"integer","format":"int32"},"commit_hash":{"type":"string","nullable":true},"docs_created":{"type":"integer","format":"int32"},"files_changed":{"type":"integer","format":"int32"},"message":{"type":"string"},"success":{"type":"boolean"}}},"GitPullConflictItem":{"type":"object","required":["path","is_binary"],"properties":{"base":{"type":"string","nullable":true},"document_id":{"type":"string","format":"uuid","nullable":true},"is_binary":{"type":"boolean"},"ours":{"type":"string","nullable":true},"path":{"type":"string"},"theirs":{"type":"string","nullable":true}}},"GitPullRequest":{"type":"object","properties":{"resolutions":{"type":"array","items":{"$ref":"#/components/schemas/GitPullResolution"},"nullable":true}}},"GitPullResolution":{"type":"object","required":["path","choice"],"properties":{"choice":{"type":"string"},"content":{"type":"string","nullable":true},"path":{"type":"string"}}},"GitPullResponse":{"type":"object","required":["success","message","files_changed"],"properties":{"commit_hash":{"type":"string","nullable":true},"conflicts":{"type":"array","items":{"$ref":"#/components/schemas/GitPullConflictItem"},"nullable":true},"files_changed":{"type":"integer","format":"int32"},"git_status":{"allOf":[{"$ref":"#/components/schemas/GitStatus"}],"nullable":true},"message":{"type":"string"},"success":{"type":"boolean"}}},"GitPullSessionResponse":{"type":"object","required":["session_id","status","conflicts","resolutions"],"properties":{"conflicts":{"type":"array","items":{"$ref":"#/components/schemas/GitPullConflictItem"}},"message":{"type":"string","nullable":true},"resolutions":{"type":"array","items":{"$ref":"#/components/schemas/GitPullResolution"}},"session_id":{"type":"string","format":"uuid"},"status":{"type":"string"}}},"GitRemoteCheckResponse":{"type":"object","required":["ok","message"],"properties":{"message":{"type":"string"},"ok":{"type":"boolean"},"reason":{"type":"string","nullable":true}}},"GitStatus":{"type":"object","required":["repository_initialized","has_remote","uncommitted_changes","untracked_files","sync_enabled"],"properties":{"current_branch":{"type":"string","nullable":true},"has_remote":{"type":"boolean"},"last_sync":{"type":"string","format":"date-time","nullable":true},"last_sync_commit_hash":{"type":"string","nullable":true},"last_sync_message":{"type":"string","nullable":true},"last_sync_status":{"type":"string","nullable":true},"repository_initialized":{"type":"boolean"},"sync_enabled":{"type":"boolean"},"uncommitted_changes":{"type":"integer","format":"int32","minimum":0},"untracked_files":{"type":"integer","format":"int32","minimum":0}}},"GitSyncRequest":{"type":"object","properties":{"force":{"type":"boolean","nullable":true},"full_scan":{"type":"boolean","nullable":true},"message":{"type":"string","nullable":true},"skip_push":{"type":"boolean","nullable":true}}},"GitSyncResponse":{"type":"object","required":["success","message","files_changed"],"properties":{"commit_hash":{"type":"string","nullable":true},"files_changed":{"type":"integer","format":"int32","minimum":0},"message":{"type":"string"},"success":{"type":"boolean"}}},"HealthResp":{"type":"object","required":["status"],"properties":{"status":{"type":"string"}}},"IngestBatchRequest":{"type":"object","required":["events"],"properties":{"events":{"type":"array","items":{"$ref":"#/components/schemas/IngestEventRequest"}}}},"IngestEventRequest":{"type":"object","required":["repo_path","kind"],"properties":{"backend":{"type":"string","nullable":true},"content_hash":{"type":"string","nullable":true},"kind":{"$ref":"#/components/schemas/IngestKindParam"},"payload":{"nullable":true},"repo_path":{"type":"string"}}},"IngestKindParam":{"type":"string","enum":["upsert","delete"]},"InstallFromUrlBody":{"type":"object","required":["url"],"properties":{"token":{"type":"string","nullable":true},"url":{"type":"string"}}},"InstallResponse":{"type":"object","required":["id","version"],"properties":{"id":{"type":"string"},"version":{"type":"string"}}},"KvValueBody":{"type":"object","required":["value"],"properties":{"value":{}}},"KvValueResponse":{"type":"object","required":["value"],"properties":{"value":{}}},"LoginRequest":{"type":"object","required":["email","password"],"properties":{"email":{"type":"string"},"password":{"type":"string"},"remember_me":{"type":"boolean"}}},"LoginResponse":{"type":"object","required":["access_token","user"],"properties":{"access_token":{"type":"string"},"user":{"$ref":"#/components/schemas/UserResponse"}}},"ManifestItem":{"type":"object","required":["id","version","scope","mounts","frontend","permissions","config","ui"],"properties":{"author":{"type":"string","nullable":true},"config":{},"frontend":{},"id":{"type":"string"},"mounts":{"type":"array","items":{"type":"string"}},"name":{"type":"string","nullable":true},"permissions":{"type":"array","items":{"type":"string"}},"repository":{"type":"string","nullable":true},"scope":{"type":"string"},"ui":{},"version":{"type":"string"}}},"MaterializeResponse":{"type":"object","required":["created"],"properties":{"created":{"type":"integer","format":"int64"}}},"OAuthLoginRequest":{"type":"object","properties":{"code":{"type":"string","nullable":true},"credential":{"type":"string","nullable":true},"redirect_uri":{"type":"string","nullable":true},"remember_me":{"type":"boolean"},"state":{"type":"string","nullable":true}}},"OAuthStateResponse":{"type":"object","required":["state"],"properties":{"state":{"type":"string"}}},"OutgoingLink":{"type":"object","required":["document_id","title","document_type","link_type"],"properties":{"document_id":{"type":"string"},"document_type":{"type":"string"},"file_path":{"type":"string","nullable":true},"link_text":{"type":"string","nullable":true},"link_type":{"type":"string"},"position_end":{"type":"integer","format":"int32","nullable":true},"position_start":{"type":"integer","format":"int32","nullable":true},"title":{"type":"string"}}},"OutgoingLinksResponse":{"type":"object","required":["links","total_count"],"properties":{"links":{"type":"array","items":{"$ref":"#/components/schemas/OutgoingLink"}},"total_count":{"type":"integer","minimum":0}}},"PatchDocumentContentRequest":{"type":"object","required":["operations"],"properties":{"operations":{"type":"array","items":{"$ref":"#/components/schemas/DocumentPatchOperationRequest"}}}},"PermissionOverridePayload":{"type":"object","required":["permission","allowed"],"properties":{"allowed":{"type":"boolean"},"permission":{"type":"string"}}},"PlaceholderItemPayload":{"type":"object","required":["kind","id","code"],"properties":{"code":{"type":"string"},"id":{"type":"string"},"kind":{"type":"string"}}},"PublicDocumentSummary":{"type":"object","required":["id","title","updated_at","published_at"],"properties":{"id":{"type":"string","format":"uuid"},"published_at":{"type":"string","format":"date-time"},"title":{"type":"string"},"updated_at":{"type":"string","format":"date-time"}}},"PublishResponse":{"type":"object","required":["slug","public_url"],"properties":{"public_url":{"type":"string"},"slug":{"type":"string"}}},"RecordsResponse":{"type":"object","required":["items"],"properties":{"items":{"type":"array","items":{}}}},"RefreshResponse":{"type":"object","required":["access_token"],"properties":{"access_token":{"type":"string"}}},"RegisterRequest":{"type":"object","required":["email","name","password"],"properties":{"email":{"type":"string"},"name":{"type":"string"},"password":{"type":"string"}}},"RenderManyRequest":{"type":"object","required":["items"],"properties":{"items":{"type":"array","items":{"$ref":"#/components/schemas/RenderRequest"}}}},"RenderManyResponse":{"type":"object","required":["items"],"properties":{"items":{"type":"array","items":{"$ref":"#/components/schemas/RenderResponseBody"}}}},"RenderOptionsPayload":{"type":"object","properties":{"absolute_attachments":{"type":"boolean","default":null,"nullable":true},"base_origin":{"type":"string","default":null,"nullable":true},"doc_id":{"type":"string","format":"uuid","default":null,"nullable":true},"features":{"type":"array","items":{"type":"string"},"default":null,"nullable":true},"flavor":{"type":"string","default":null,"nullable":true},"hardbreaks":{"type":"boolean","default":null,"nullable":true},"sanitize":{"type":"boolean","default":null,"nullable":true},"theme":{"type":"string","default":null,"nullable":true},"token":{"type":"string","default":null,"nullable":true}}},"RenderRequest":{"type":"object","required":["text"],"properties":{"options":{"$ref":"#/components/schemas/RenderOptionsPayload"},"text":{"type":"string"}}},"RenderResponseBody":{"type":"object","required":["html","hash"],"properties":{"hash":{"type":"string"},"html":{"type":"string"},"placeholders":{"type":"array","items":{"$ref":"#/components/schemas/PlaceholderItemPayload"}}}},"SearchResult":{"type":"object","required":["id","title","document_type","updated_at"],"properties":{"document_type":{"type":"string"},"id":{"type":"string","format":"uuid"},"path":{"type":"string","nullable":true},"title":{"type":"string"},"updated_at":{"type":"string","format":"date-time"}}},"SessionResponse":{"type":"object","required":["id","workspace_id","remember_me","created_at","last_seen_at","expires_at","current"],"properties":{"created_at":{"type":"string","format":"date-time"},"current":{"type":"boolean"},"expires_at":{"type":"string","format":"date-time"},"id":{"type":"string","format":"uuid"},"ip_address":{"type":"string","nullable":true},"last_seen_at":{"type":"string","format":"date-time"},"remember_me":{"type":"boolean"},"user_agent":{"type":"string","nullable":true},"workspace_id":{"type":"string","format":"uuid"}}},"ShareBrowseResponse":{"type":"object","required":["tree"],"properties":{"tree":{"type":"array","items":{"$ref":"#/components/schemas/ShareBrowseTreeItem"}}}},"ShareBrowseTreeItem":{"type":"object","required":["id","title","type","created_at","updated_at"],"properties":{"created_at":{"type":"string","format":"date-time"},"id":{"type":"string","format":"uuid"},"parent_id":{"type":"string","format":"uuid","nullable":true},"title":{"type":"string"},"type":{"type":"string","example":"document"},"updated_at":{"type":"string","format":"date-time"}}},"ShareDocumentResponse":{"type":"object","required":["id","title","permission"],"properties":{"content":{"type":"string","nullable":true},"id":{"type":"string","format":"uuid"},"permission":{"type":"string"},"title":{"type":"string"}}},"ShareItem":{"type":"object","required":["id","token","permission","url","scope"],"properties":{"expires_at":{"type":"string","format":"date-time","nullable":true},"id":{"type":"string","format":"uuid"},"parent_share_id":{"type":"string","format":"uuid","nullable":true},"permission":{"type":"string"},"scope":{"type":"string"},"token":{"type":"string"},"url":{"type":"string"}}},"ShareMountItem":{"type":"object","required":["id","token","target_document_id","target_document_type","target_title","permission","created_at"],"properties":{"created_at":{"type":"string","format":"date-time"},"id":{"type":"string","format":"uuid"},"parent_folder_id":{"type":"string","format":"uuid","nullable":true},"permission":{"type":"string"},"target_document_id":{"type":"string","format":"uuid"},"target_document_type":{"type":"string"},"target_title":{"type":"string"},"token":{"type":"string"}}},"SnapshotDiffBaseParam":{"type":"string","enum":["auto","current","previous"]},"SnapshotDiffKind":{"type":"string","enum":["current","snapshot"]},"SnapshotDiffResponse":{"type":"object","required":["base","target","diff"],"properties":{"base":{"$ref":"#/components/schemas/SnapshotDiffSideResponse"},"diff":{"$ref":"#/components/schemas/TextDiffResult"},"target":{"$ref":"#/components/schemas/SnapshotDiffSideResponse"}}},"SnapshotDiffSideResponse":{"type":"object","required":["kind","markdown"],"properties":{"kind":{"$ref":"#/components/schemas/SnapshotDiffKind"},"markdown":{"type":"string"},"snapshot":{"allOf":[{"$ref":"#/components/schemas/SnapshotSummary"}],"nullable":true}}},"SnapshotListResponse":{"type":"object","required":["items"],"properties":{"items":{"type":"array","items":{"$ref":"#/components/schemas/SnapshotSummary"}}}},"SnapshotRestoreResponse":{"type":"object","required":["snapshot"],"properties":{"snapshot":{"$ref":"#/components/schemas/SnapshotSummary"}}},"SnapshotSummary":{"type":"object","required":["id","document_id","label","kind","created_at","byte_size","content_hash"],"properties":{"byte_size":{"type":"integer","format":"int64"},"content_hash":{"type":"string"},"created_at":{"type":"string","format":"date-time"},"created_by":{"type":"string","format":"uuid","nullable":true},"document_id":{"type":"string","format":"uuid"},"id":{"type":"string","format":"uuid"},"kind":{"type":"string"},"label":{"type":"string"},"notes":{"type":"string","nullable":true}}},"SwitchWorkspaceResponse":{"type":"object","required":["access_token"],"properties":{"access_token":{"type":"string"}}},"TagItem":{"type":"object","required":["name","count"],"properties":{"count":{"type":"integer","format":"int64"},"name":{"type":"string"}}},"TextDiffLine":{"type":"object","required":["line_type","content"],"properties":{"content":{"type":"string"},"line_type":{"$ref":"#/components/schemas/TextDiffLineType"},"new_line_number":{"type":"integer","format":"int32","nullable":true,"minimum":0},"old_line_number":{"type":"integer","format":"int32","nullable":true,"minimum":0}}},"TextDiffLineType":{"type":"string","enum":["added","deleted","context"]},"TextDiffResult":{"type":"object","required":["file_path","diff_lines"],"properties":{"diff_lines":{"type":"array","items":{"$ref":"#/components/schemas/TextDiffLine"}},"file_path":{"type":"string"},"new_content":{"type":"string","nullable":true},"old_content":{"type":"string","nullable":true}}},"UninstallBody":{"type":"object","required":["id"],"properties":{"id":{"type":"string"}}},"UpdateDocumentContentRequest":{"type":"object","required":["content"],"properties":{"content":{"type":"string"}}},"UpdateDocumentRequest":{"type":"object","properties":{"parent_id":{"type":"string","nullable":true},"title":{"type":"string","nullable":true}}},"UpdateGitConfigRequest":{"type":"object","properties":{"auth_data":{"nullable":true},"auth_type":{"type":"string","nullable":true},"auto_sync":{"type":"boolean","nullable":true},"branch_name":{"type":"string","nullable":true},"repository_url":{"type":"string","nullable":true}}},"UpdateMemberRoleRequest":{"type":"object","required":["role_kind"],"properties":{"custom_role_id":{"type":"string","format":"uuid","nullable":true},"role_kind":{"type":"string"},"system_role":{"type":"string","nullable":true}}},"UpdateRecordBody":{"type":"object","required":["patch"],"properties":{"patch":{}}},"UpdateUserShortcutRequest":{"type":"object","properties":{"bindings":{"type":"object"},"leader_key":{"type":"string","example":"","nullable":true}}},"UpdateWorkspaceRequest":{"type":"object","properties":{"description":{"type":"string","nullable":true},"icon":{"type":"string","nullable":true},"name":{"type":"string","nullable":true}}},"UpdateWorkspaceRoleRequest":{"type":"object","properties":{"base_role":{"type":"string","nullable":true},"description":{"type":"string","nullable":true},"name":{"type":"string","nullable":true},"overrides":{"type":"array","items":{"$ref":"#/components/schemas/PermissionOverridePayload"},"nullable":true},"priority":{"type":"integer","format":"int32","nullable":true}}},"UploadFileMultipart":{"type":"object","required":["file","document_id"],"properties":{"document_id":{"type":"string","format":"uuid"},"file":{"type":"string","format":"binary"}}},"UploadFileResponse":{"type":"object","required":["id","url","filename","size"],"properties":{"content_type":{"type":"string","nullable":true},"filename":{"type":"string"},"id":{"type":"string","format":"uuid"},"size":{"type":"integer","format":"int64"},"url":{"type":"string"}}},"UserResponse":{"type":"object","required":["id","email","name","workspaces"],"properties":{"active_workspace":{"allOf":[{"$ref":"#/components/schemas/WorkspaceMembershipResponse"}],"nullable":true},"active_workspace_id":{"type":"string","format":"uuid","nullable":true},"active_workspace_permissions":{"type":"array","items":{"type":"string"}},"email":{"type":"string"},"id":{"type":"string","format":"uuid"},"name":{"type":"string"},"workspaces":{"type":"array","items":{"$ref":"#/components/schemas/WorkspaceMembershipResponse"}}}},"UserShortcutResponse":{"type":"object","required":["bindings"],"properties":{"bindings":{"type":"object"},"leader_key":{"type":"string","example":"","nullable":true},"updated_at":{"type":"string","format":"date-time","nullable":true}}},"WorkspaceInvitationResponse":{"type":"object","required":["id","workspace_id","email","role_kind","invited_by","token","created_at"],"properties":{"accepted_at":{"type":"string","format":"date-time","nullable":true},"accepted_by":{"type":"string","format":"uuid","nullable":true},"created_at":{"type":"string","format":"date-time"},"custom_role_id":{"type":"string","format":"uuid","nullable":true},"email":{"type":"string"},"expires_at":{"type":"string","format":"date-time","nullable":true},"id":{"type":"string","format":"uuid"},"invited_by":{"type":"string","format":"uuid"},"revoked_at":{"type":"string","format":"date-time","nullable":true},"role_kind":{"type":"string"},"system_role":{"type":"string","nullable":true},"token":{"type":"string"},"workspace_id":{"type":"string","format":"uuid"}}},"WorkspaceMemberResponse":{"type":"object","required":["workspace_id","user_id","email","name","role_kind","is_default"],"properties":{"custom_role_id":{"type":"string","format":"uuid","nullable":true},"email":{"type":"string"},"is_default":{"type":"boolean"},"name":{"type":"string"},"role_kind":{"type":"string"},"system_role":{"type":"string","nullable":true},"user_id":{"type":"string","format":"uuid"},"workspace_id":{"type":"string","format":"uuid"}}},"WorkspaceMembershipResponse":{"type":"object","required":["id","name","slug","is_personal","role_kind","is_default"],"properties":{"custom_role_id":{"type":"string","format":"uuid","nullable":true},"description":{"type":"string","nullable":true},"icon":{"type":"string","nullable":true},"id":{"type":"string","format":"uuid"},"is_default":{"type":"boolean"},"is_personal":{"type":"boolean"},"name":{"type":"string"},"role_kind":{"type":"string"},"slug":{"type":"string"},"system_role":{"type":"string","nullable":true}}},"WorkspacePermissionsResponse":{"type":"object","required":["workspace_id","permissions"],"properties":{"permissions":{"type":"array","items":{"type":"string"}},"workspace_id":{"type":"string","format":"uuid"}}},"WorkspaceResponse":{"type":"object","required":["id","name","slug","is_personal","role_kind","is_default"],"properties":{"custom_role_id":{"type":"string","format":"uuid","nullable":true},"description":{"type":"string","nullable":true},"icon":{"type":"string","nullable":true},"id":{"type":"string","format":"uuid"},"is_default":{"type":"boolean"},"is_personal":{"type":"boolean"},"name":{"type":"string"},"role_kind":{"type":"string"},"slug":{"type":"string"},"system_role":{"type":"string","nullable":true}}},"WorkspaceRoleResponse":{"type":"object","required":["id","workspace_id","name","base_role","priority","overrides"],"properties":{"base_role":{"type":"string"},"description":{"type":"string","nullable":true},"id":{"type":"string","format":"uuid"},"name":{"type":"string"},"overrides":{"type":"array","items":{"$ref":"#/components/schemas/PermissionOverridePayload"}},"priority":{"type":"integer","format":"int32"},"workspace_id":{"type":"string","format":"uuid"}}}}},"tags":[{"name":"Auth","description":"Authentication"},{"name":"Documents","description":"Documents management"},{"name":"Files","description":"File management"},{"name":"Sharing","description":"Document sharing"},{"name":"Public Documents","description":"Public pages"},{"name":"Realtime","description":"Yjs WebSocket endpoint (/yjs/:id)"},{"name":"Git","description":"Git integration"},{"name":"Markdown","description":"Markdown rendering"},{"name":"Plugins","description":"Plugins management & data APIs"},{"name":"Storage","description":"Storage ingest APIs"},{"name":"Health","description":"System health checks"}]} +{"openapi":"3.0.3","info":{"title":"presentation","description":"","license":{"name":""},"version":"0.1.0"},"paths":{"/api/auth/login":{"post":{"tags":["Auth"],"operationId":"login","requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/LoginRequest"}}},"required":true},"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/LoginResponse"}}}}},"security":[{}]}},"/api/auth/logout":{"post":{"tags":["Auth"],"operationId":"logout","responses":{"204":{"description":""}}}},"/api/auth/me":{"get":{"tags":["Auth"],"operationId":"me","responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/UserResponse"}}}}}},"delete":{"tags":["Auth"],"operationId":"delete_account","responses":{"204":{"description":""}}}},"/api/auth/oauth/{provider}":{"post":{"tags":["Auth"],"operationId":"oauth_login","parameters":[{"name":"provider","in":"path","description":"OAuth provider identifier (e.g., google)","required":true,"schema":{"type":"string"}}],"requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/OAuthLoginRequest"}}},"required":true},"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/LoginResponse"}}}}},"security":[{}]}},"/api/auth/oauth/{provider}/state":{"post":{"tags":["Auth"],"operationId":"oauth_state","parameters":[{"name":"provider","in":"path","description":"OAuth provider identifier","required":true,"schema":{"type":"string"}}],"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/OAuthStateResponse"}}}}},"security":[{}]}},"/api/auth/providers":{"get":{"tags":["Auth"],"operationId":"list_oauth_providers","responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/AuthProvidersResponse"}}}}},"security":[{}]}},"/api/auth/refresh":{"post":{"tags":["Auth"],"operationId":"refresh_session","responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/RefreshResponse"}}}}}}},"/api/auth/register":{"post":{"tags":["Auth"],"operationId":"register","requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/RegisterRequest"}}},"required":true},"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/UserResponse"}}}}},"security":[{}]}},"/api/auth/sessions":{"get":{"tags":["Auth"],"operationId":"list_sessions","responses":{"200":{"description":"","content":{"application/json":{"schema":{"type":"array","items":{"$ref":"#/components/schemas/SessionResponse"}}}}}}}},"/api/auth/sessions/{id}":{"delete":{"tags":["Auth"],"operationId":"revoke_session","parameters":[{"name":"id","in":"path","description":"Session ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"204":{"description":""}}}},"/api/documents":{"get":{"tags":["Documents"],"operationId":"list_documents","parameters":[{"name":"tag","in":"query","description":"Filter by encrypted tag (Base64 encoded)","required":false,"schema":{"type":"string","nullable":true}},{"name":"state","in":"query","description":"Filter by document state (active|archived|all)","required":false,"schema":{"type":"string","nullable":true}}],"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/DocumentListResponse"}}}}}},"post":{"tags":["Documents"],"operationId":"create_document","requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/CreateDocumentRequest"}}},"required":true},"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/Document"}}}}}}},"/api/documents/{docId}/files":{"get":{"tags":["Files"],"summary":"List files for a document.","description":"Returns encrypted metadata for client-side decryption to build file map.\nSupports authentication via bearer token or share token query parameter.","operationId":"list_files","parameters":[{"name":"docId","in":"path","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}},{"name":"token","in":"query","description":"Share token for authentication","required":false,"schema":{"type":"string","nullable":true}}],"responses":{"200":{"description":"OK","content":{"application/json":{"schema":{"type":"array","items":{"$ref":"#/components/schemas/ListFileResponse"}}}}}}},"post":{"tags":["Files"],"operationId":"upload_file","parameters":[{"name":"docId","in":"path","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}}],"requestBody":{"content":{"multipart/form-data":{"schema":{"$ref":"#/components/schemas/UploadFileMultipart"}}},"required":true},"responses":{"201":{"description":"File uploaded","content":{"application/json":{"schema":{"$ref":"#/components/schemas/UploadFileResponse"}}}}}}},"/api/documents/{id}":{"get":{"tags":["Documents"],"operationId":"get_document","parameters":[{"name":"id","in":"path","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}},{"name":"token","in":"query","description":"Share token (optional)","required":false,"schema":{"type":"string","nullable":true}}],"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/Document"}}}}}},"delete":{"tags":["Documents"],"operationId":"delete_document","parameters":[{"name":"id","in":"path","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"204":{"description":""}}},"patch":{"tags":["Documents"],"operationId":"update_document","parameters":[{"name":"id","in":"path","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}}],"requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/UpdateDocumentRequest"}}},"required":true},"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/Document"}}}}}}},"/api/documents/{id}/archive":{"post":{"tags":["Documents"],"operationId":"archive_document","parameters":[{"name":"id","in":"path","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/Document"}}}},"404":{"description":"Document not found"},"409":{"description":"Document already archived"}}}},"/api/documents/{id}/backlinks":{"get":{"tags":["Documents"],"operationId":"getBacklinks","parameters":[{"name":"id","in":"path","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/BacklinksResponse"}}}}}}},"/api/documents/{id}/content":{"get":{"tags":["Documents"],"operationId":"get_document_content","parameters":[{"name":"id","in":"path","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/GetContentResponse"}}}}}},"put":{"tags":["Documents"],"operationId":"update_document_content","parameters":[{"name":"id","in":"path","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}},{"name":"token","in":"query","description":"Share token (optional)","required":false,"schema":{"type":"string","nullable":true}}],"requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/UpdateDocumentContentRequest"}}},"required":true},"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/Document"}}}}}},"patch":{"tags":["Documents"],"operationId":"patch_document_content","parameters":[{"name":"id","in":"path","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}},{"name":"token","in":"query","description":"Share token (optional)","required":false,"schema":{"type":"string","nullable":true}}],"requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/PatchDocumentContentRequest"}}},"required":true},"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/Document"}}}}}}},"/api/documents/{id}/duplicate":{"post":{"tags":["Documents"],"operationId":"duplicate_document","parameters":[{"name":"id","in":"path","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}}],"requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/DuplicateDocumentRequest"}}},"required":true},"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/Document"}}}}}}},"/api/documents/{id}/keys":{"get":{"tags":["E2EE"],"operationId":"get_document_key","parameters":[{"name":"id","in":"path","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/DocumentKeyResponse"}}}},"404":{"description":"Document key not found"}}},"post":{"tags":["E2EE"],"operationId":"store_document_key","parameters":[{"name":"id","in":"path","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}}],"requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/StoreDocumentKeyRequest"}}},"required":true},"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/DocumentKeyResponse"}}}}}}},"/api/documents/{id}/keys/rotate":{"post":{"tags":["E2EE"],"operationId":"rotate_document_key","parameters":[{"name":"id","in":"path","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}}],"requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/RotateDocumentKeyRequest"}}},"required":true},"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/RotateDocumentKeyResponse"}}}},"400":{"description":"Invalid request"},"403":{"description":"Permission denied"}}}},"/api/documents/{id}/links":{"get":{"tags":["Documents"],"operationId":"getOutgoingLinks","parameters":[{"name":"id","in":"path","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/OutgoingLinksResponse"}}}}}}},"/api/documents/{id}/snapshots":{"get":{"tags":["Documents"],"operationId":"list_document_snapshots","parameters":[{"name":"id","in":"path","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}},{"name":"token","in":"query","description":"Share token (optional)","required":false,"schema":{"type":"string","nullable":true}},{"name":"limit","in":"query","description":"Maximum number of snapshots to return","required":false,"schema":{"type":"integer","format":"int64","nullable":true}},{"name":"offset","in":"query","description":"Offset for pagination","required":false,"schema":{"type":"integer","format":"int64","nullable":true}}],"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/SnapshotListResponse"}}}}}}},"/api/documents/{id}/snapshots/{snapshot_id}":{"get":{"tags":["Documents"],"operationId":"get_document_snapshot","parameters":[{"name":"id","in":"path","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}},{"name":"snapshot_id","in":"path","description":"Snapshot ID","required":true,"schema":{"type":"string","format":"uuid"}},{"name":"token","in":"query","description":"Share token (optional)","required":false,"schema":{"type":"string","nullable":true}}],"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/SnapshotDetailResponse"}}}}}}},"/api/documents/{id}/snapshots/{snapshot_id}/diff":{"get":{"tags":["Documents"],"operationId":"get_document_snapshot_diff","parameters":[{"name":"id","in":"path","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}},{"name":"snapshot_id","in":"path","description":"Snapshot ID","required":true,"schema":{"type":"string","format":"uuid"}},{"name":"token","in":"query","description":"Share token (optional)","required":false,"schema":{"type":"string","nullable":true}},{"name":"compare","in":"query","description":"Snapshot ID to compare against (defaults to current document state)","required":false,"schema":{"type":"string","format":"uuid","nullable":true}},{"name":"base","in":"query","description":"Base comparison to use when compare is not provided (auto|current|previous)","required":false,"schema":{"allOf":[{"$ref":"#/components/schemas/SnapshotDiffBaseParam"}],"nullable":true}}],"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/SnapshotDiffResponse"}}}}}}},"/api/documents/{id}/snapshots/{snapshot_id}/restore":{"post":{"tags":["Documents"],"operationId":"restore_document_snapshot","parameters":[{"name":"id","in":"path","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}},{"name":"snapshot_id","in":"path","description":"Snapshot ID","required":true,"schema":{"type":"string","format":"uuid"}},{"name":"token","in":"query","description":"Share token (optional)","required":false,"schema":{"type":"string","nullable":true}}],"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/SnapshotRestoreResponse"}}}}}}},"/api/documents/{id}/tags":{"get":{"tags":["Tags"],"summary":"Get tags for a specific document (E2EE format)","operationId":"get_document_tags","parameters":[{"name":"id","in":"path","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/DocumentTagsResponse"}}}}}},"put":{"tags":["Tags"],"summary":"Replace tags for a document (E2EE format)","operationId":"update_document_tags","parameters":[{"name":"id","in":"path","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}}],"requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/UpdateDocumentTagsRequest"}}},"required":true},"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/DocumentTagsResponse"}}}}}}},"/api/documents/{id}/unarchive":{"post":{"tags":["Documents"],"operationId":"unarchive_document","parameters":[{"name":"id","in":"path","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/Document"}}}},"404":{"description":"Document not found"},"409":{"description":"Document is not archived"}}}},"/api/files/{id}":{"get":{"tags":["Files"],"summary":"Download a file by ID.","description":"Returns encrypted file with E2EE metadata in headers for client-side decryption.\nSupports authentication via bearer token or share token query parameter.","operationId":"get_file","parameters":[{"name":"id","in":"path","description":"File ID","required":true,"schema":{"type":"string","format":"uuid"}},{"name":"token","in":"query","description":"Share token for authentication","required":false,"schema":{"type":"string","nullable":true}}],"responses":{"200":{"description":"OK","content":{"application/octet-stream":{"schema":{"type":"string","format":"binary"}}}}}}},"/api/git/config":{"get":{"tags":["Git"],"operationId":"get_config","responses":{"200":{"description":"","content":{"application/json":{"schema":{"allOf":[{"$ref":"#/components/schemas/GitConfigResponse"}],"nullable":true}}}}}},"post":{"tags":["Git"],"operationId":"create_or_update_config","requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/CreateGitConfigRequest"}}},"required":true},"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/GitConfigResponse"}}}}}},"delete":{"tags":["Git"],"operationId":"delete_config","responses":{"204":{"description":"Deleted"}}}},"/api/health":{"get":{"tags":["Health"],"operationId":"health","responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/HealthResp"}}}}}}},"/api/me/api-tokens":{"get":{"tags":["Auth"],"operationId":"list_api_tokens","responses":{"200":{"description":"","content":{"application/json":{"schema":{"type":"array","items":{"$ref":"#/components/schemas/ApiTokenItem"}}}}}}},"post":{"tags":["Auth"],"operationId":"create_api_token","requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/ApiTokenCreateRequest"}}},"required":true},"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/ApiTokenCreateResponse"}}}}}}},"/api/me/api-tokens/{id}":{"delete":{"tags":["Auth"],"operationId":"revoke_api_token","parameters":[{"name":"id","in":"path","description":"Token ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"204":{"description":""}}}},"/api/me/encryption/migrate":{"post":{"tags":["E2EE"],"summary":"Migrate user data to E2EE.","description":"This endpoint receives encryption keys from the client and encrypts\nall of the user's existing plaintext data on the server.\n\nThe operation is atomic - either all data is encrypted or none is.","operationId":"migrate","requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/MigrateRequest"}}},"required":true},"responses":{"200":{"description":"Migration completed successfully","content":{"application/json":{"schema":{"$ref":"#/components/schemas/MigrationResponse"}}}},"400":{"description":"Invalid request (e.g., missing DEK for document)"},"409":{"description":"Migration already completed"},"500":{"description":"Migration failed"}}}},"/api/me/encryption/needs-migration":{"get":{"tags":["E2EE"],"summary":"Check if migration is needed for the current user.","operationId":"needs_migration","responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/NeedsMigrationResponse"}}}}}}},"/api/me/encryption/setup-complete":{"post":{"tags":["E2EE"],"operationId":"mark_encryption_setup_complete","responses":{"204":{"description":""}}}},"/api/me/encryption/status":{"get":{"tags":["E2EE"],"operationId":"get_encryption_status","responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/EncryptionStatusResponse"}}}}}}},"/api/me/keys":{"get":{"tags":["E2EE"],"operationId":"get_my_public_key","responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/UserPublicKeyResponse"}}}},"404":{"description":"Public key not found"}}},"post":{"tags":["E2EE"],"operationId":"register_public_key","requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/RegisterPublicKeyRequest"}}},"required":true},"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/UserPublicKeyResponse"}}}}}}},"/api/me/master-key/backup":{"get":{"tags":["E2EE"],"operationId":"get_master_key_backup","responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/MasterKeyBackupResponse"}}}},"404":{"description":"Master key backup not found"}}},"post":{"tags":["E2EE"],"operationId":"store_master_key_backup","requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/StoreMasterKeyBackupRequest"}}},"required":true},"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/MasterKeyBackupResponse"}}}}}}},"/api/me/plugins/install-from-url":{"post":{"tags":["Plugins"],"operationId":"pluginsInstallFromUrl","requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/InstallFromUrlBody"}}},"required":true},"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/InstallResponse"}}}}}}},"/api/me/plugins/manifest":{"get":{"tags":["Plugins"],"operationId":"pluginsGetManifest","responses":{"200":{"description":"","content":{"application/json":{"schema":{"type":"array","items":{"$ref":"#/components/schemas/ManifestItem"}}}}}}}},"/api/me/plugins/uninstall":{"post":{"tags":["Plugins"],"operationId":"pluginsUninstall","requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/UninstallBody"}}},"required":true},"responses":{"204":{"description":""}}}},"/api/me/plugins/updates":{"get":{"tags":["Plugins"],"operationId":"sse_updates","responses":{"200":{"description":"Plugin event stream"}}}},"/api/me/private-key/encrypted":{"get":{"tags":["E2EE"],"operationId":"get_encrypted_private_key","responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/EncryptedPrivateKeyResponse"}}}},"404":{"description":"Encrypted private key not found"}}},"post":{"tags":["E2EE"],"operationId":"store_encrypted_private_key","requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/StoreEncryptedPrivateKeyRequest"}}},"required":true},"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/EncryptedPrivateKeyResponse"}}}}}}},"/api/me/shortcuts":{"get":{"tags":["Auth"],"operationId":"get_user_shortcuts","responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/UserShortcutResponse"}}}}}},"put":{"tags":["Auth"],"operationId":"update_user_shortcuts","requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/UpdateUserShortcutRequest"}}},"required":true},"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/UserShortcutResponse"}}}}}}},"/api/plugin-assets":{"get":{"tags":["Plugins"],"operationId":"pluginsGetAsset","parameters":[{"name":"token","in":"query","description":"Share token (optional)","required":false,"schema":{"type":"string","nullable":true}}],"responses":{"200":{"description":"Plugin asset"}}}},"/api/plugins/{plugin}/docs/{doc_id}/kv/{key}":{"get":{"tags":["Plugins"],"operationId":"pluginsGetKv","parameters":[{"name":"plugin","in":"path","description":"Plugin ID","required":true,"schema":{"type":"string"}},{"name":"doc_id","in":"path","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}},{"name":"key","in":"path","description":"Key","required":true,"schema":{"type":"string"}}],"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/KvValueResponse"}}}}}},"put":{"tags":["Plugins"],"operationId":"pluginsPutKv","parameters":[{"name":"plugin","in":"path","description":"Plugin ID","required":true,"schema":{"type":"string"}},{"name":"doc_id","in":"path","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}},{"name":"key","in":"path","description":"Key","required":true,"schema":{"type":"string"}}],"requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/KvValueBody"}}},"required":true},"responses":{"204":{"description":""}}}},"/api/plugins/{plugin}/docs/{doc_id}/records/{kind}":{"get":{"tags":["Plugins"],"operationId":"list_records","parameters":[{"name":"plugin","in":"path","description":"Plugin ID","required":true,"schema":{"type":"string"}},{"name":"doc_id","in":"path","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}},{"name":"kind","in":"path","description":"Record kind","required":true,"schema":{"type":"string"}},{"name":"limit","in":"query","description":"Limit","required":false,"schema":{"type":"integer","format":"int64","nullable":true}},{"name":"offset","in":"query","description":"Offset","required":false,"schema":{"type":"integer","format":"int64","nullable":true}}],"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/RecordsResponse"}}}}}},"post":{"tags":["Plugins"],"operationId":"pluginsCreateRecord","parameters":[{"name":"plugin","in":"path","description":"Plugin ID","required":true,"schema":{"type":"string"}},{"name":"doc_id","in":"path","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}},{"name":"kind","in":"path","description":"Record kind","required":true,"schema":{"type":"string"}}],"requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/CreateRecordBody"}}},"required":true},"responses":{"200":{"description":"","content":{"application/json":{"schema":{}}}}}}},"/api/plugins/{plugin}/records/{id}":{"delete":{"tags":["Plugins"],"operationId":"pluginsDeleteRecord","parameters":[{"name":"plugin","in":"path","description":"Plugin ID","required":true,"schema":{"type":"string"}},{"name":"id","in":"path","description":"Record ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"204":{"description":""}}},"patch":{"tags":["Plugins"],"operationId":"pluginsUpdateRecord","parameters":[{"name":"plugin","in":"path","description":"Plugin ID","required":true,"schema":{"type":"string"}},{"name":"id","in":"path","description":"Record ID","required":true,"schema":{"type":"string","format":"uuid"}}],"requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/UpdateRecordBody"}}},"required":true},"responses":{"200":{"description":"","content":{"application/json":{"schema":{}}}}}}},"/api/public/documents/{id}":{"get":{"tags":["Public Documents"],"operationId":"get_publish_status","parameters":[{"name":"id","in":"path","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"200":{"description":"Published status","content":{"application/json":{"schema":{"$ref":"#/components/schemas/PublishResponse"}}}}}},"post":{"tags":["Public Documents"],"operationId":"publish_document","parameters":[{"name":"id","in":"path","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}}],"requestBody":{"description":"Optional plaintext content for E2EE workspaces","content":{"application/json":{"schema":{"allOf":[{"$ref":"#/components/schemas/PublishRequest"}],"nullable":true}}},"required":false},"responses":{"200":{"description":"Published","content":{"application/json":{"schema":{"$ref":"#/components/schemas/PublishResponse"}}}}}},"delete":{"tags":["Public Documents"],"operationId":"unpublish_document","parameters":[{"name":"id","in":"path","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"204":{"description":"Unpublished"}}},"patch":{"tags":["Public Documents"],"operationId":"update_publish_settings","parameters":[{"name":"id","in":"path","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}}],"requestBody":{"description":"Settings to update","content":{"application/json":{"schema":{"$ref":"#/components/schemas/UpdatePublishSettingsRequest"}}},"required":true},"responses":{"204":{"description":"Settings updated"}}}},"/api/public/documents/{id}/files/{file_id}":{"post":{"tags":["Public Documents"],"operationId":"upload_public_file","parameters":[{"name":"id","in":"path","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}},{"name":"file_id","in":"path","description":"File ID (original encrypted file ID)","required":true,"schema":{"type":"string","format":"uuid"}}],"requestBody":{"description":"Decrypted file data","content":{"application/json":{"schema":{"$ref":"#/components/schemas/UploadPublicFileRequest"}}},"required":true},"responses":{"204":{"description":"File uploaded"}}}},"/api/public/workspaces/{slug}":{"get":{"tags":["Public Documents"],"operationId":"list_workspace_public_documents","parameters":[{"name":"slug","in":"path","description":"Workspace slug","required":true,"schema":{"type":"string"}}],"responses":{"200":{"description":"Public documents for workspace","content":{"application/json":{"schema":{"type":"array","items":{"$ref":"#/components/schemas/PublicDocumentSummary"}}}}}}}},"/api/public/workspaces/{slug}/{id}":{"get":{"tags":["Public Documents"],"operationId":"get_public_by_workspace_and_id","parameters":[{"name":"slug","in":"path","description":"Workspace slug","required":true,"schema":{"type":"string"}},{"name":"id","in":"path","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"200":{"description":"Document metadata","content":{"application/json":{"schema":{"$ref":"#/components/schemas/Document"}}}}}}},"/api/public/workspaces/{slug}/{id}/content":{"get":{"tags":["Public Documents"],"operationId":"get_public_content_by_workspace_and_id","parameters":[{"name":"slug","in":"path","description":"Workspace slug","required":true,"schema":{"type":"string"}},{"name":"id","in":"path","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"200":{"description":"Document content"}}}},"/api/public/workspaces/{slug}/{id}/files":{"get":{"tags":["Public Documents"],"operationId":"list_public_files","parameters":[{"name":"slug","in":"path","description":"Workspace slug","required":true,"schema":{"type":"string"}},{"name":"id","in":"path","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"200":{"description":"List of public files","content":{"application/json":{"schema":{"type":"array","items":{"$ref":"#/components/schemas/PublicFile"}}}}}}}},"/api/public/workspaces/{slug}/{id}/files/{filename}":{"get":{"tags":["Public Documents"],"operationId":"get_public_file","parameters":[{"name":"slug","in":"path","description":"Workspace slug","required":true,"schema":{"type":"string"}},{"name":"id","in":"path","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}},{"name":"filename","in":"path","description":"Logical filename as it appears in markdown","required":true,"schema":{"type":"string"}}],"responses":{"200":{"description":"File content"}}}},"/api/shares":{"post":{"tags":["Sharing"],"operationId":"create_share","requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/CreateShareRequest"}}},"required":true},"responses":{"200":{"description":"Share link created","content":{"application/json":{"schema":{"$ref":"#/components/schemas/CreateShareResponse"}}}}}}},"/api/shares/active":{"get":{"tags":["Sharing"],"operationId":"list_active_shares","responses":{"200":{"description":"Active shares","content":{"application/json":{"schema":{"type":"array","items":{"$ref":"#/components/schemas/ActiveShareItem"}}}}}}}},"/api/shares/applicable":{"get":{"tags":["Sharing"],"operationId":"list_applicable_shares","parameters":[{"name":"doc_id","in":"query","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"200":{"description":"Shares that include the document","content":{"application/json":{"schema":{"type":"array","items":{"$ref":"#/components/schemas/ApplicableShareItem"}}}}}}}},"/api/shares/browse":{"get":{"tags":["Sharing"],"operationId":"browse_share","parameters":[{"name":"token","in":"query","description":"Share token","required":true,"schema":{"type":"string"}}],"responses":{"200":{"description":"Share tree","content":{"application/json":{"schema":{"$ref":"#/components/schemas/ShareBrowseResponse"}}}}}}},"/api/shares/documents/{id}":{"get":{"tags":["Sharing"],"operationId":"list_document_shares","parameters":[{"name":"id","in":"path","description":"Document ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"200":{"description":"OK","content":{"application/json":{"schema":{"type":"array","items":{"$ref":"#/components/schemas/ShareItem"}}}}}}}},"/api/shares/folders/{token}/materialize":{"post":{"tags":["Sharing"],"operationId":"materialize_folder_share","parameters":[{"name":"token","in":"path","description":"Folder share token","required":true,"schema":{"type":"string"}}],"responses":{"200":{"description":"Created doc shares","content":{"application/json":{"schema":{"$ref":"#/components/schemas/MaterializeResponse"}}}}}}},"/api/shares/mounts":{"get":{"tags":["Sharing"],"operationId":"list_share_mounts","responses":{"200":{"description":"Share mounts","content":{"application/json":{"schema":{"type":"array","items":{"$ref":"#/components/schemas/ShareMountItem"}}}}}}},"post":{"tags":["Sharing"],"operationId":"create_share_mount","requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/CreateShareMountRequest"}}},"required":true},"responses":{"200":{"description":"Saved share mount","content":{"application/json":{"schema":{"$ref":"#/components/schemas/ShareMountItem"}}}}}}},"/api/shares/mounts/{id}":{"delete":{"tags":["Sharing"],"operationId":"delete_share_mount","parameters":[{"name":"id","in":"path","description":"Share mount ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"204":{"description":"Share mount removed"}}}},"/api/shares/salt":{"get":{"tags":["Sharing"],"summary":"Get salt for password-protected share (for password challenge)","operationId":"get_share_salt","parameters":[{"name":"token","in":"query","description":"Share token","required":true,"schema":{"type":"string"}}],"responses":{"200":{"description":"Salt info for password-protected share","content":{"application/json":{"schema":{"$ref":"#/components/schemas/ShareSaltResponse"}}}}}}},"/api/shares/validate":{"get":{"tags":["Sharing"],"operationId":"validate_share_token","parameters":[{"name":"token","in":"query","description":"Share token","required":true,"schema":{"type":"string"}}],"responses":{"200":{"description":"Document info","content":{"application/json":{"schema":{"$ref":"#/components/schemas/ShareDocumentResponse"}}}}}}},"/api/shares/{id}/keys":{"get":{"tags":["E2EE"],"operationId":"get_share_key","parameters":[{"name":"id","in":"path","description":"Share ID","required":true,"schema":{"type":"string","format":"uuid"}},{"name":"token","in":"query","description":"Share token for authentication","required":true,"schema":{"type":"string"}}],"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/ShareKeyResponse"}}}},"401":{"description":"Invalid or missing share token"},"404":{"description":"Share key not found"}}},"post":{"tags":["E2EE"],"operationId":"store_share_key","parameters":[{"name":"id","in":"path","description":"Share ID","required":true,"schema":{"type":"string","format":"uuid"}}],"requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/StoreShareKeyRequest"}}},"required":true},"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/ShareKeyResponse"}}}}}}},"/api/shares/{id}/keys/password-protected":{"post":{"tags":["E2EE"],"operationId":"store_password_protected_share_key","parameters":[{"name":"id","in":"path","description":"Share ID","required":true,"schema":{"type":"string","format":"uuid"}}],"requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/StorePasswordProtectedShareKeyRequest"}}},"required":true},"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/ShareKeyResponse"}}}}}}},"/api/shares/{id}/salt":{"get":{"tags":["E2EE"],"operationId":"get_share_salt","parameters":[{"name":"id","in":"path","description":"Share ID","required":true,"schema":{"type":"string","format":"uuid"}},{"name":"token","in":"query","description":"Share token for authentication","required":true,"schema":{"type":"string"}}],"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/ShareSaltResponse"}}}},"401":{"description":"Invalid or missing share token"}}}},"/api/shares/{token}":{"delete":{"tags":["Sharing"],"operationId":"delete_share","parameters":[{"name":"token","in":"path","description":"Share token","required":true,"schema":{"type":"string"}}],"responses":{"204":{"description":"Share link deleted"}}}},"/api/storage/ingest":{"post":{"tags":["Storage"],"operationId":"enqueue_ingest_events","requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/IngestBatchRequest"}}},"required":true},"responses":{"202":{"description":"Events enqueued"},"400":{"description":"Invalid request"}}}},"/api/tags":{"get":{"tags":["Tags"],"summary":"List all tags in the workspace (E2EE format)","operationId":"list_tags","parameters":[{"name":"q","in":"query","description":"Base64 encoded encrypted tag for exact match filter","required":false,"schema":{"type":"string","nullable":true}}],"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/ListTagsResponse"}}}}}}},"/api/users/{user_id}/keys":{"get":{"tags":["E2EE"],"operationId":"get_user_public_key","parameters":[{"name":"user_id","in":"path","description":"User ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/UserPublicKeyResponse"}}}},"404":{"description":"Public key not found"}}}},"/api/workspace-invitations/{token}/accept":{"post":{"tags":["Workspaces"],"operationId":"accept_invitation","parameters":[{"name":"token","in":"path","description":"Invitation token","required":true,"schema":{"type":"string"}}],"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/AcceptInvitationResponse"}}}}}}},"/api/workspaces":{"get":{"tags":["Workspaces"],"operationId":"list_workspaces","responses":{"200":{"description":"","content":{"application/json":{"schema":{"type":"array","items":{"$ref":"#/components/schemas/WorkspaceResponse"}}}}}}},"post":{"tags":["Workspaces"],"operationId":"create_workspace","requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/CreateWorkspaceRequest"}}},"required":true},"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/WorkspaceResponse"}}}}}}},"/api/workspaces/{id}":{"get":{"tags":["Workspaces"],"operationId":"get_workspace_detail","parameters":[{"name":"id","in":"path","description":"Workspace ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/WorkspaceResponse"}}}}}},"put":{"tags":["Workspaces"],"operationId":"update_workspace","parameters":[{"name":"id","in":"path","description":"Workspace ID","required":true,"schema":{"type":"string","format":"uuid"}}],"requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/UpdateWorkspaceRequest"}}},"required":true},"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/WorkspaceResponse"}}}}}},"delete":{"tags":["Workspaces"],"operationId":"delete_workspace","parameters":[{"name":"id","in":"path","description":"Workspace ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"204":{"description":""}}}},"/api/workspaces/{id}/invitations":{"get":{"tags":["Workspaces"],"operationId":"list_invitations","parameters":[{"name":"id","in":"path","description":"Workspace ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"200":{"description":"","content":{"application/json":{"schema":{"type":"array","items":{"$ref":"#/components/schemas/WorkspaceInvitationResponse"}}}}}}},"post":{"tags":["Workspaces"],"operationId":"create_invitation","parameters":[{"name":"id","in":"path","description":"Workspace ID","required":true,"schema":{"type":"string","format":"uuid"}}],"requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/CreateWorkspaceInvitationRequest"}}},"required":true},"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/WorkspaceInvitationResponse"}}}}}}},"/api/workspaces/{id}/invitations/{invitation_id}":{"delete":{"tags":["Workspaces"],"operationId":"revoke_invitation","parameters":[{"name":"id","in":"path","description":"Workspace ID","required":true,"schema":{"type":"string","format":"uuid"}},{"name":"invitation_id","in":"path","description":"Invitation ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/WorkspaceInvitationResponse"}}}}}}},"/api/workspaces/{id}/invitations/{invitation_id}/kek":{"patch":{"tags":["Workspaces"],"operationId":"update_invitation_kek","parameters":[{"name":"id","in":"path","description":"Workspace ID","required":true,"schema":{"type":"string","format":"uuid"}},{"name":"invitation_id","in":"path","description":"Invitation ID","required":true,"schema":{"type":"string","format":"uuid"}}],"requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/UpdateInvitationKekRequest"}}},"required":true},"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/WorkspaceInvitationResponse"}}}}}}},"/api/workspaces/{id}/keys":{"get":{"tags":["E2EE"],"operationId":"list_workspace_keys","parameters":[{"name":"id","in":"path","description":"Workspace ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"200":{"description":"","content":{"application/json":{"schema":{"type":"array","items":{"$ref":"#/components/schemas/WorkspaceKeyResponse"}}}}}}},"post":{"tags":["E2EE"],"operationId":"store_workspace_key","parameters":[{"name":"id","in":"path","description":"Workspace ID","required":true,"schema":{"type":"string","format":"uuid"}}],"requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/StoreWorkspaceKeyRequest"}}},"required":true},"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/WorkspaceKeyResponse"}}}}}}},"/api/workspaces/{id}/keys/me":{"get":{"tags":["E2EE"],"operationId":"get_my_workspace_key","parameters":[{"name":"id","in":"path","description":"Workspace ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/WorkspaceKeyResponse"}}}},"404":{"description":"Key not found"}}}},"/api/workspaces/{id}/keys/rotate":{"post":{"tags":["E2EE"],"operationId":"rotate_workspace_key","parameters":[{"name":"id","in":"path","description":"Workspace ID","required":true,"schema":{"type":"string","format":"uuid"}}],"requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/RotateWorkspaceKeyRequest"}}},"required":true},"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/RotateWorkspaceKeyResponse"}}}},"400":{"description":"Invalid request"},"403":{"description":"Permission denied"}}}},"/api/workspaces/{id}/keys/version":{"get":{"tags":["E2EE"],"operationId":"get_workspace_key_version","parameters":[{"name":"id","in":"path","description":"Workspace ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/WorkspaceKeyVersionResponse"}}}}}}},"/api/workspaces/{id}/keys/{version}":{"delete":{"tags":["E2EE"],"operationId":"delete_key_version","parameters":[{"name":"id","in":"path","description":"Workspace ID","required":true,"schema":{"type":"string","format":"uuid"}},{"name":"version","in":"path","description":"Key version to delete","required":true,"schema":{"type":"integer","format":"int32"}}],"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/DeleteKeyVersionResponse"}}}},"403":{"description":"Permission denied"}}}},"/api/workspaces/{id}/leave":{"post":{"tags":["Workspaces"],"operationId":"leave_workspace","parameters":[{"name":"id","in":"path","description":"Workspace ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"204":{"description":""}}}},"/api/workspaces/{id}/members":{"get":{"tags":["Workspaces"],"operationId":"list_members","parameters":[{"name":"id","in":"path","description":"Workspace ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"200":{"description":"","content":{"application/json":{"schema":{"type":"array","items":{"$ref":"#/components/schemas/WorkspaceMemberResponse"}}}}}}}},"/api/workspaces/{id}/members/{user_id}":{"delete":{"tags":["Workspaces"],"operationId":"remove_member","parameters":[{"name":"id","in":"path","description":"Workspace ID","required":true,"schema":{"type":"string","format":"uuid"}},{"name":"user_id","in":"path","description":"Target user ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"204":{"description":""}}},"patch":{"tags":["Workspaces"],"operationId":"update_member_role","parameters":[{"name":"id","in":"path","description":"Workspace ID","required":true,"schema":{"type":"string","format":"uuid"}},{"name":"user_id","in":"path","description":"Target user ID","required":true,"schema":{"type":"string","format":"uuid"}}],"requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/UpdateMemberRoleRequest"}}},"required":true},"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/WorkspaceMemberResponse"}}}}}}},"/api/workspaces/{id}/permissions":{"get":{"tags":["Workspaces"],"operationId":"get_workspace_permissions","parameters":[{"name":"id","in":"path","description":"Workspace ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/WorkspacePermissionsResponse"}}}}}}},"/api/workspaces/{id}/roles":{"get":{"tags":["Workspaces"],"operationId":"list_roles","parameters":[{"name":"id","in":"path","description":"Workspace ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"200":{"description":"","content":{"application/json":{"schema":{"type":"array","items":{"$ref":"#/components/schemas/WorkspaceRoleResponse"}}}}}}},"post":{"tags":["Workspaces"],"operationId":"create_role","parameters":[{"name":"id","in":"path","description":"Workspace ID","required":true,"schema":{"type":"string","format":"uuid"}}],"requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/CreateWorkspaceRoleRequest"}}},"required":true},"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/WorkspaceRoleResponse"}}}}}}},"/api/workspaces/{id}/roles/{role_id}":{"delete":{"tags":["Workspaces"],"operationId":"delete_role","parameters":[{"name":"id","in":"path","description":"Workspace ID","required":true,"schema":{"type":"string","format":"uuid"}},{"name":"role_id","in":"path","description":"Role ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"204":{"description":""}}},"patch":{"tags":["Workspaces"],"operationId":"update_role","parameters":[{"name":"id","in":"path","description":"Workspace ID","required":true,"schema":{"type":"string","format":"uuid"}},{"name":"role_id","in":"path","description":"Role ID","required":true,"schema":{"type":"string","format":"uuid"}}],"requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/UpdateWorkspaceRoleRequest"}}},"required":true},"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/WorkspaceRoleResponse"}}}}}}},"/api/workspaces/{id}/switch":{"post":{"tags":["Workspaces"],"operationId":"switch_workspace","parameters":[{"name":"id","in":"path","description":"Workspace ID","required":true,"schema":{"type":"string","format":"uuid"}}],"responses":{"200":{"description":"","content":{"application/json":{"schema":{"$ref":"#/components/schemas/SwitchWorkspaceResponse"}}}}}}},"/api/yjs/{id}":{"get":{"tags":["Realtime"],"operationId":"axum_ws_entry","parameters":[{"name":"id","in":"path","description":"Document ID (UUID)","required":true,"schema":{"type":"string"}},{"name":"token","in":"query","description":"JWT or share token","required":false,"schema":{"type":"string","nullable":true}},{"name":"Authorization","in":"header","description":"Bearer token (JWT or share token)","required":false,"schema":{"type":"string","nullable":true}}],"responses":{"101":{"description":"Switching Protocols (WebSocket upgrade)"},"401":{"description":"Unauthorized"}}}}},"components":{"schemas":{"AcceptInvitationResponse":{"type":"object","description":"Response for accepting a workspace invitation","required":["workspaceId"],"properties":{"encryptedKekForInvite":{"type":"string","nullable":true},"kekVersion":{"type":"integer","format":"int32","nullable":true},"workspaceId":{"type":"string","format":"uuid"}}},"ActiveShareItem":{"type":"object","required":["id","token","permission","created_at","document_id","document_title","document_type","url"],"properties":{"created_at":{"type":"string","format":"date-time"},"document_id":{"type":"string","format":"uuid"},"document_title":{"type":"string"},"document_type":{"type":"string"},"expires_at":{"type":"string","format":"date-time","nullable":true},"id":{"type":"string","format":"uuid"},"parent_share_id":{"type":"string","format":"uuid","nullable":true},"permission":{"type":"string"},"token":{"type":"string"},"url":{"type":"string"}}},"ApiTokenCreateRequest":{"type":"object","properties":{"name":{"type":"string","example":"Deploy token","nullable":true}}},"ApiTokenCreateResponse":{"type":"object","required":["id","name","created_at","token"],"properties":{"created_at":{"type":"string","format":"date-time"},"id":{"type":"string","format":"uuid"},"name":{"type":"string"},"token":{"type":"string"}}},"ApiTokenItem":{"type":"object","required":["id","name","created_at"],"properties":{"created_at":{"type":"string","format":"date-time"},"id":{"type":"string","format":"uuid"},"last_used_at":{"type":"string","format":"date-time","nullable":true},"name":{"type":"string"},"revoked_at":{"type":"string","format":"date-time","nullable":true}}},"ApplicableShareItem":{"type":"object","required":["token","permission","scope","excluded"],"properties":{"excluded":{"type":"boolean"},"permission":{"type":"string"},"scope":{"type":"string"},"token":{"type":"string"}}},"AuthProviderInfoResponse":{"type":"object","required":["id","requires_state","client_ids"],"properties":{"authorization_url":{"type":"string","nullable":true},"client_ids":{"type":"array","items":{"type":"string"}},"id":{"type":"string"},"name":{"type":"string","nullable":true},"redirect_uri":{"type":"string","nullable":true},"requires_state":{"type":"boolean"},"scopes":{"type":"array","items":{"type":"string"}}}},"AuthProvidersResponse":{"type":"object","required":["providers"],"properties":{"providers":{"type":"array","items":{"$ref":"#/components/schemas/AuthProviderInfoResponse"}}}},"BacklinkInfo":{"type":"object","required":["document_id","title","document_type","link_type","link_count"],"properties":{"document_id":{"type":"string"},"document_type":{"type":"string"},"file_path":{"type":"string","nullable":true},"link_count":{"type":"integer","format":"int64"},"link_text":{"type":"string","nullable":true},"link_type":{"type":"string"},"title":{"type":"string"}}},"BacklinksResponse":{"type":"object","required":["backlinks","total_count"],"properties":{"backlinks":{"type":"array","items":{"$ref":"#/components/schemas/BacklinkInfo"}},"total_count":{"type":"integer","minimum":0}}},"CreateDocumentDekPayload":{"type":"object","description":"DEK payload for document creation","required":["encryptedDek","nonce"],"properties":{"encryptedDek":{"type":"string","format":"byte","description":"Base64 encoded encrypted DEK"},"keyVersion":{"type":"integer","format":"int32","description":"Key version"},"nonce":{"type":"string","format":"byte","description":"Base64 encoded nonce"}}},"CreateDocumentRequest":{"type":"object","properties":{"dek":{"allOf":[{"$ref":"#/components/schemas/CreateDocumentDekPayload"}],"nullable":true},"encryptedTitle":{"type":"string","format":"byte","description":"Base64 encoded encrypted title (for E2EE clients)","nullable":true},"encryptedTitleNonce":{"type":"string","format":"byte","description":"Base64 encoded nonce for encrypted title","nullable":true},"parentId":{"type":"string","format":"uuid","nullable":true},"title":{"type":"string","nullable":true},"type":{"type":"string","nullable":true}}},"CreateGitConfigRequest":{"type":"object","required":["repository_url","auth_type","auth_data"],"properties":{"auth_data":{},"auth_type":{"type":"string"},"auto_sync":{"type":"boolean","nullable":true},"branch_name":{"type":"string","nullable":true},"repository_url":{"type":"string"}}},"CreateRecordBody":{"type":"object","required":["data"],"properties":{"data":{}}},"CreateShareMountRequest":{"type":"object","required":["token"],"properties":{"parent_folder_id":{"type":"string","format":"uuid","nullable":true},"token":{"type":"string"}}},"CreateShareRequest":{"type":"object","required":["documentId"],"properties":{"creatorEncryptedShareKey":{"type":"string","format":"byte","description":"Base64 encoded share key encrypted with creator's KEK (for URL recovery)","nullable":true},"creatorShareKeyNonce":{"type":"string","format":"byte","description":"Base64 encoded nonce for creator_encrypted_share_key","nullable":true},"documentEncryptedDeks":{"type":"object","description":"For folder shares: encrypted DEKs for each document in the folder\nMap of document_id (as string) -> base64 encoded encrypted DEK (nonce prepended)","additionalProperties":{"type":"string"},"nullable":true},"documentId":{"type":"string","format":"uuid"},"encryptedDek":{"type":"string","format":"byte","description":"Base64 encoded encrypted DEK (encrypted with share key derived from password)","nullable":true},"expiresAt":{"type":"string","format":"date-time","nullable":true},"kdfParams":{"description":"KDF parameters (e.g., Argon2id settings)","nullable":true},"permission":{"type":"string","nullable":true},"salt":{"type":"string","format":"byte","description":"Base64 encoded salt for key derivation","nullable":true}}},"CreateShareResponse":{"type":"object","required":["token","url"],"properties":{"token":{"type":"string"},"url":{"type":"string"}}},"CreateWorkspaceInvitationRequest":{"type":"object","required":["email","role_kind"],"properties":{"custom_role_id":{"type":"string","format":"uuid","nullable":true},"email":{"type":"string"},"expires_at":{"type":"string","format":"date-time","nullable":true},"role_kind":{"type":"string"},"system_role":{"type":"string","nullable":true}}},"CreateWorkspaceRequest":{"type":"object","required":["name"],"properties":{"description":{"type":"string","nullable":true},"icon":{"type":"string","nullable":true},"name":{"type":"string"}}},"CreateWorkspaceRoleRequest":{"type":"object","required":["name","base_role"],"properties":{"base_role":{"type":"string"},"description":{"type":"string","nullable":true},"name":{"type":"string"},"overrides":{"type":"array","items":{"$ref":"#/components/schemas/PermissionOverridePayload"},"nullable":true},"priority":{"type":"integer","format":"int32","nullable":true}}},"DeleteKeyVersionResponse":{"type":"object","required":["workspaceId","keyVersion","deletedCount"],"properties":{"deletedCount":{"type":"integer","format":"int64","minimum":0},"keyVersion":{"type":"integer","format":"int32"},"workspaceId":{"type":"string","format":"uuid"}}},"Document":{"type":"object","required":["id","owner_id","workspace_id","title","type","created_at","updated_at","slug","desired_path"],"properties":{"archived_at":{"type":"string","format":"date-time","nullable":true},"archived_by":{"type":"string","format":"uuid","nullable":true},"archived_parent_id":{"type":"string","format":"uuid","nullable":true},"created_at":{"type":"string","format":"date-time"},"created_by":{"type":"string","format":"uuid","nullable":true},"created_by_plugin":{"type":"string","nullable":true},"desired_path":{"type":"string"},"encryptedTitle":{"type":"string","format":"byte","nullable":true},"encryptedTitleNonce":{"type":"string","format":"byte","nullable":true},"id":{"type":"string","format":"uuid"},"owner_id":{"type":"string","format":"uuid","description":"Legacy alias for `workspace_id` kept for backward compatibility with older clients."},"parent_id":{"type":"string","format":"uuid","nullable":true},"path":{"type":"string","nullable":true},"slug":{"type":"string"},"title":{"type":"string"},"type":{"type":"string"},"updated_at":{"type":"string","format":"date-time"},"workspace_id":{"type":"string","format":"uuid"}}},"DocumentKeyResponse":{"type":"object","required":["documentId","encryptedDek","nonce","keyVersion","createdAt","updatedAt"],"properties":{"createdAt":{"type":"string","format":"date-time"},"documentId":{"type":"string","format":"uuid"},"encryptedDek":{"type":"string","format":"byte"},"keyVersion":{"type":"integer","format":"int32"},"nonce":{"type":"string","format":"byte"},"updatedAt":{"type":"string","format":"date-time"}}},"DocumentListResponse":{"type":"object","required":["items"],"properties":{"items":{"type":"array","items":{"$ref":"#/components/schemas/Document"}}}},"DocumentPatchOperationRequest":{"oneOf":[{"type":"object","required":["offset","op"],"properties":{"encrypted_data":{"type":"string","format":"byte","description":"Base64 encoded encrypted data (for E2EE documents)","nullable":true},"nonce":{"type":"string","format":"byte","description":"Base64 encoded nonce (required when encrypted_data is provided)","nullable":true},"offset":{"type":"integer","minimum":0},"op":{"type":"string","enum":["insert"]},"public_key":{"type":"string","format":"byte","description":"Base64 encoded Ed25519 public key (for E2EE documents)","nullable":true},"signature":{"type":"string","format":"byte","description":"Base64 encoded Ed25519 signature (for E2EE documents)","nullable":true},"text":{"type":"string","description":"Plaintext to insert (for non-E2EE documents)","nullable":true}}},{"type":"object","required":["offset","length","op"],"properties":{"length":{"type":"integer","minimum":0},"offset":{"type":"integer","minimum":0},"op":{"type":"string","enum":["delete"]}}},{"type":"object","required":["offset","length","op"],"properties":{"encrypted_data":{"type":"string","format":"byte","description":"Base64 encoded encrypted data (for E2EE documents)","nullable":true},"length":{"type":"integer","minimum":0},"nonce":{"type":"string","format":"byte","description":"Base64 encoded nonce (required when encrypted_data is provided)","nullable":true},"offset":{"type":"integer","minimum":0},"op":{"type":"string","enum":["replace"]},"public_key":{"type":"string","format":"byte","description":"Base64 encoded Ed25519 public key (for E2EE documents)","nullable":true},"signature":{"type":"string","format":"byte","description":"Base64 encoded Ed25519 signature (for E2EE documents)","nullable":true},"text":{"type":"string","description":"Plaintext replacement (for non-E2EE documents)","nullable":true}}}],"description":"Patch operation for document content.\nFor plaintext mode: use `text` field.\nFor E2EE mode: use `encrypted_data` and `nonce` fields instead of `text`.","discriminator":{"propertyName":"op"}},"DocumentTagEntry":{"type":"object","description":"Tag entry in document tags response","required":["id","encryptedName","createdAt"],"properties":{"createdAt":{"type":"string","format":"date-time"},"encryptedName":{"type":"string","format":"byte","description":"Base64 encoded deterministically encrypted tag name"},"id":{"type":"string","format":"uuid"}}},"DocumentTagsResponse":{"type":"object","description":"Response for GET /api/documents/{id}/tags","required":["tags"],"properties":{"tags":{"type":"array","items":{"$ref":"#/components/schemas/DocumentTagEntry"}}}},"DuplicateDocumentRequest":{"type":"object","properties":{"parent_id":{"type":"string","nullable":true},"title":{"type":"string","nullable":true}}},"EncryptedDekRequest":{"type":"object","description":"Encrypted DEK for a document (request).","required":["encryptedDek","nonce"],"properties":{"encryptedDek":{"type":"string","format":"byte","description":"Base64-encoded encrypted DEK."},"nonce":{"type":"string","format":"byte","description":"Base64-encoded nonce."}}},"EncryptedPrivateKeyResponse":{"type":"object","required":["encryptedPrivateKey","nonce","createdAt","updatedAt"],"properties":{"createdAt":{"type":"string","format":"date-time"},"encryptedPrivateKey":{"type":"string","format":"byte"},"nonce":{"type":"string","format":"byte"},"updatedAt":{"type":"string","format":"date-time"}}},"EncryptedTagInput":{"type":"object","description":"Single encrypted tag in request","required":["encryptedName"],"properties":{"encryptedName":{"type":"string","format":"byte","description":"Base64 encoded deterministically encrypted tag name"}}},"EncryptedUpdateEntry":{"type":"object","description":"Encrypted update entry for E2EE documents","required":["seq","data"],"properties":{"data":{"type":"string","format":"byte","description":"Base64 encoded encrypted update data"},"nonce":{"type":"string","format":"byte","description":"Base64 encoded nonce for decryption","nullable":true},"publicKey":{"type":"string","format":"byte","description":"Base64 encoded public key of the signer","nullable":true},"seq":{"type":"integer","format":"int64","description":"Sequence number of the update"},"signature":{"type":"string","format":"byte","description":"Base64 encoded signature","nullable":true}}},"EncryptionStatusResponse":{"type":"object","required":["isSetupCompleted"],"properties":{"isSetupCompleted":{"type":"boolean"}}},"GetContentResponse":{"type":"object","description":"Response for GET /api/documents/{id}/content (E2EE encrypted)","required":["content"],"properties":{"content":{"type":"string","format":"byte","description":"Base64 encoded encrypted Yjs snapshot bytes"},"nonce":{"type":"string","format":"byte","description":"Base64 encoded nonce for decryption","nullable":true},"seqAtSnapshot":{"type":"integer","format":"int64","description":"Sequence number at which the snapshot was taken","nullable":true},"updates":{"type":"array","items":{"$ref":"#/components/schemas/EncryptedUpdateEntry"},"description":"Pending encrypted updates since the snapshot","nullable":true}}},"GitConfigResponse":{"type":"object","required":["id","repository_url","branch_name","auth_type","auto_sync","created_at","updated_at"],"properties":{"auth_type":{"type":"string"},"auto_sync":{"type":"boolean"},"branch_name":{"type":"string"},"created_at":{"type":"string","format":"date-time"},"encrypted_auth_data":{"description":"E2EE encrypted auth data (only present for E2EE clients)","nullable":true},"id":{"type":"string","format":"uuid"},"remote_check":{"allOf":[{"$ref":"#/components/schemas/GitRemoteCheckResponse"}],"nullable":true},"repository_url":{"type":"string"},"updated_at":{"type":"string","format":"date-time"}}},"GitRemoteCheckResponse":{"type":"object","required":["ok","message"],"properties":{"message":{"type":"string"},"ok":{"type":"boolean"},"reason":{"type":"string","nullable":true}}},"HealthResp":{"type":"object","required":["status"],"properties":{"status":{"type":"string"}}},"IngestBatchRequest":{"type":"object","required":["events"],"properties":{"events":{"type":"array","items":{"$ref":"#/components/schemas/IngestEventRequest"}}}},"IngestEventRequest":{"type":"object","required":["repo_path","kind"],"properties":{"backend":{"type":"string","nullable":true},"content_hash":{"type":"string","nullable":true},"kind":{"$ref":"#/components/schemas/IngestKindParam"},"payload":{"nullable":true},"repo_path":{"type":"string"}}},"IngestKindParam":{"type":"string","enum":["upsert","delete"]},"InstallFromUrlBody":{"type":"object","required":["url"],"properties":{"token":{"type":"string","nullable":true},"url":{"type":"string"}}},"InstallResponse":{"type":"object","required":["id","version"],"properties":{"id":{"type":"string"},"version":{"type":"string"}}},"KdfParamsResponse":{"type":"object","properties":{"iterations":{"type":"integer","format":"int32","nullable":true,"minimum":0},"memory":{"type":"integer","format":"int32","nullable":true,"minimum":0},"parallelism":{"type":"integer","format":"int32","nullable":true,"minimum":0}}},"KvValueBody":{"type":"object","required":["value"],"properties":{"value":{}}},"KvValueResponse":{"type":"object","required":["value"],"properties":{"value":{}}},"ListFileResponse":{"type":"object","description":"Response for listing files in a document.\nReturns encrypted metadata for client-side decryption to build file map.","required":["id","size"],"properties":{"encryptedHash":{"type":"string","description":"SHA256 hash of encrypted file content","nullable":true},"encryptedMetadata":{"type":"string","description":"Base64 encoded encrypted metadata (contains filename, logicalPath, mimeType)","nullable":true},"encryptedMetadataNonce":{"type":"string","description":"Base64 encoded nonce for encrypted metadata","nullable":true},"id":{"type":"string","format":"uuid"},"size":{"type":"integer","format":"int64","description":"File size in bytes"}}},"ListTagsResponse":{"type":"object","description":"Response for GET /api/tags","required":["tags"],"properties":{"tags":{"type":"array","items":{"$ref":"#/components/schemas/TagEntry"}}}},"LoginRequest":{"type":"object","required":["email","password"],"properties":{"email":{"type":"string"},"password":{"type":"string"},"remember_me":{"type":"boolean"}}},"LoginResponse":{"type":"object","required":["access_token","user"],"properties":{"access_token":{"type":"string"},"user":{"$ref":"#/components/schemas/UserResponse"}}},"ManifestItem":{"type":"object","required":["id","version","scope","mounts","frontend","backend","permissions","config","ui","renderers"],"properties":{"author":{"type":"string","nullable":true},"backend":{},"config":{},"frontend":{},"id":{"type":"string"},"mounts":{"type":"array","items":{"type":"string"}},"name":{"type":"string","nullable":true},"permissions":{"type":"array","items":{"type":"string"}},"renderers":{},"repository":{"type":"string","nullable":true},"scope":{"type":"string"},"ui":{},"version":{"type":"string"}}},"MasterKeyBackupResponse":{"type":"object","required":["encryptedKey","salt","kdfType","kdfParams","createdAt","updatedAt"],"properties":{"createdAt":{"type":"string","format":"date-time"},"encryptedKey":{"type":"string","format":"byte"},"kdfParams":{"$ref":"#/components/schemas/KdfParamsResponse"},"kdfType":{"type":"string"},"salt":{"type":"string","format":"byte"},"updatedAt":{"type":"string","format":"date-time"}}},"MaterializeResponse":{"type":"object","required":["created"],"properties":{"created":{"type":"integer","format":"int64"}}},"MemberEncryptedKekRequest":{"type":"object","description":"Encrypted KEK for a workspace member (request).","required":["userId","encryptedKek"],"properties":{"encryptedKek":{"type":"string","format":"byte","description":"Base64-encoded encrypted KEK."},"userId":{"type":"string","description":"User ID."}}},"MigrateRequest":{"type":"object","description":"Request to migrate user data to E2EE.","required":["workspaceKeks","documentDeks","encryptedWorkspaceKeks","encryptedDocumentDeks"],"properties":{"documentDeks":{"type":"object","description":"Document DEKs (Data Encryption Keys).\nMaps document_id (string) -> base64-encoded raw DEK.","additionalProperties":{"type":"string"}},"encryptedDocumentDeks":{"type":"object","description":"Encrypted DEKs to store for each document.\nMaps document_id (string) -> encrypted DEK with nonce.","additionalProperties":{"$ref":"#/components/schemas/EncryptedDekRequest"}},"encryptedWorkspaceKeks":{"type":"object","description":"Encrypted workspace KEKs to store for each member.\nMaps workspace_id (string) -> array of member encrypted KEKs.","additionalProperties":{"type":"array","items":{"$ref":"#/components/schemas/MemberEncryptedKekRequest"}}},"workspaceKeks":{"type":"object","description":"Workspace KEKs (Key Encryption Keys).\nMaps workspace_id (string) -> base64-encoded raw KEK.","additionalProperties":{"type":"string"}}}},"MigrationResponse":{"type":"object","description":"Response for migration result.","required":["documentsEncrypted","filesEncrypted","updatesCleared","status"],"properties":{"documentsEncrypted":{"type":"integer","description":"Number of documents encrypted.","minimum":0},"filesEncrypted":{"type":"integer","description":"Number of files with encrypted metadata.","minimum":0},"status":{"type":"string","description":"Migration status."},"updatesCleared":{"type":"integer","format":"int64","description":"Total number of Yjs updates cleared.","minimum":0}}},"NeedsMigrationResponse":{"type":"object","required":["needsMigration"],"properties":{"needsMigration":{"type":"boolean"}}},"OAuthLoginRequest":{"type":"object","properties":{"code":{"type":"string","nullable":true},"credential":{"type":"string","nullable":true},"redirect_uri":{"type":"string","nullable":true},"remember_me":{"type":"boolean"},"state":{"type":"string","nullable":true}}},"OAuthStateResponse":{"type":"object","required":["state"],"properties":{"state":{"type":"string"}}},"OutgoingLink":{"type":"object","required":["document_id","title","document_type","link_type"],"properties":{"document_id":{"type":"string"},"document_type":{"type":"string"},"file_path":{"type":"string","nullable":true},"link_text":{"type":"string","nullable":true},"link_type":{"type":"string"},"position_end":{"type":"integer","format":"int32","nullable":true},"position_start":{"type":"integer","format":"int32","nullable":true},"title":{"type":"string"}}},"OutgoingLinksResponse":{"type":"object","required":["links","total_count"],"properties":{"links":{"type":"array","items":{"$ref":"#/components/schemas/OutgoingLink"}},"total_count":{"type":"integer","minimum":0}}},"PatchDocumentContentRequest":{"type":"object","properties":{"operations":{"type":"array","items":{"$ref":"#/components/schemas/DocumentPatchOperationRequest"},"description":"Patch operations. Each operation can be either plaintext (using `text` field)\nor encrypted (using `encryptedData` and `nonce` fields)."},"signature":{"type":"string","format":"byte","description":"Base64 encoded signature for integrity verification (optional for E2EE)","nullable":true}}},"PermissionOverridePayload":{"type":"object","required":["permission","allowed"],"properties":{"allowed":{"type":"boolean"},"permission":{"type":"string"}}},"PublicDocumentSummary":{"type":"object","required":["id","title","updated_at","published_at"],"properties":{"id":{"type":"string","format":"uuid"},"published_at":{"type":"string","format":"date-time"},"title":{"type":"string"},"updated_at":{"type":"string","format":"date-time"}}},"PublicFile":{"type":"object","description":"Public file metadata","required":["id","fileId","originalFilename","logicalFilename","mimeType","size","createdAt"],"properties":{"createdAt":{"type":"string","format":"date-time"},"fileId":{"type":"string","format":"uuid"},"id":{"type":"string","format":"uuid"},"logicalFilename":{"type":"string"},"mimeType":{"type":"string"},"originalFilename":{"type":"string"},"size":{"type":"integer","format":"int64"}}},"PublishRequest":{"type":"object","description":"Request to publish a document. For E2EE workspaces, plaintext title and content\nmust be provided so public pages can be rendered without decryption.","properties":{"noindex":{"type":"boolean","description":"If true, adds noindex meta tag to prevent search engine indexing (default: true)","nullable":true},"plaintextContent":{"type":"string","description":"Plaintext content (required for E2EE mode)","nullable":true},"plaintextTitle":{"type":"string","description":"Plaintext title (required for E2EE mode)","nullable":true}}},"PublishResponse":{"type":"object","required":["slug","public_url","noindex"],"properties":{"noindex":{"type":"boolean","description":"If true, noindex meta tag is added to prevent search engine indexing"},"public_url":{"type":"string"},"slug":{"type":"string"}}},"RecordsResponse":{"type":"object","required":["items"],"properties":{"items":{"type":"array","items":{}}}},"RefreshResponse":{"type":"object","required":["access_token"],"properties":{"access_token":{"type":"string"}}},"RegisterPublicKeyRequest":{"type":"object","required":["publicKey","keyType"],"properties":{"keyType":{"type":"string","description":"Key type (x25519, ed25519, or ecdh-p256)","example":"x25519"},"publicKey":{"type":"string","format":"byte","description":"Base64 encoded public key"}}},"RegisterRequest":{"type":"object","required":["email","name","password"],"properties":{"email":{"type":"string"},"name":{"type":"string"},"password":{"type":"string"}}},"RotateDocumentKeyRequest":{"type":"object","description":"Request body for document DEK rotation","required":["encryptedDek","nonce"],"properties":{"encryptedDek":{"type":"string","format":"byte","description":"Base64 encoded new encrypted DEK"},"nonce":{"type":"string","format":"byte","description":"Base64 encoded nonce"}}},"RotateDocumentKeyResponse":{"type":"object","description":"Response for document DEK rotation","required":["documentId","newKeyVersion"],"properties":{"documentId":{"type":"string","format":"uuid"},"newKeyVersion":{"type":"integer","format":"int32"}}},"RotateWorkspaceKeyRequest":{"type":"object","description":"Request body for KEK rotation","required":["memberKeys"],"properties":{"memberKeys":{"type":"array","items":{"$ref":"#/components/schemas/RotationMemberKey"},"description":"Encrypted KEKs for all workspace members"}}},"RotateWorkspaceKeyResponse":{"type":"object","description":"Response for KEK rotation","required":["workspaceId","newKeyVersion","keysUpdated"],"properties":{"keysUpdated":{"type":"integer","minimum":0},"newKeyVersion":{"type":"integer","format":"int32"},"workspaceId":{"type":"string","format":"uuid"}}},"RotationMemberKey":{"type":"object","description":"A single member's encrypted KEK for key rotation","required":["userId","encryptedKek"],"properties":{"encryptedKek":{"type":"string","format":"byte","description":"Base64 encoded encrypted KEK for this member"},"userId":{"type":"string","format":"uuid","description":"User ID of the member"}}},"SessionResponse":{"type":"object","required":["id","workspace_id","remember_me","created_at","last_seen_at","expires_at","current"],"properties":{"created_at":{"type":"string","format":"date-time"},"current":{"type":"boolean"},"expires_at":{"type":"string","format":"date-time"},"id":{"type":"string","format":"uuid"},"ip_address":{"type":"string","nullable":true},"last_seen_at":{"type":"string","format":"date-time"},"remember_me":{"type":"boolean"},"user_agent":{"type":"string","nullable":true},"workspace_id":{"type":"string","format":"uuid"}}},"ShareBrowseResponse":{"type":"object","required":["tree"],"properties":{"tree":{"type":"array","items":{"$ref":"#/components/schemas/ShareBrowseTreeItem"}}}},"ShareBrowseTreeItem":{"type":"object","required":["id","title","type","createdAt","updatedAt"],"properties":{"createdAt":{"type":"string","format":"date-time"},"encryptedDek":{"type":"string","format":"byte","description":"Encrypted DEK for this document (base64, nonce prepended)","nullable":true},"id":{"type":"string","format":"uuid"},"parentId":{"type":"string","format":"uuid","nullable":true},"shareToken":{"type":"string","description":"Child share token for documents within a folder share","nullable":true},"title":{"type":"string"},"type":{"type":"string","example":"document"},"updatedAt":{"type":"string","format":"date-time"}}},"ShareDocumentResponse":{"type":"object","required":["id","title","permission"],"properties":{"content":{"type":"string","nullable":true},"encryptedDek":{"type":"string","format":"byte","description":"Base64 encoded encrypted DEK (encrypted with share key)","nullable":true},"id":{"type":"string","format":"uuid"},"kdfParams":{"description":"KDF parameters for password-protected shares","nullable":true},"permission":{"type":"string"},"salt":{"type":"string","format":"byte","description":"Base64 encoded salt for password-protected shares","nullable":true},"title":{"type":"string"}}},"ShareItem":{"type":"object","required":["id","token","permission","url","scope"],"properties":{"creatorEncryptedShareKey":{"type":"string","format":"byte","description":"Base64 encoded share key encrypted with creator's KEK (for URL recovery)","nullable":true},"creatorShareKeyNonce":{"type":"string","format":"byte","description":"Base64 encoded nonce for creator_encrypted_share_key","nullable":true},"expiresAt":{"type":"string","format":"date-time","nullable":true},"id":{"type":"string","format":"uuid"},"parentShareId":{"type":"string","format":"uuid","nullable":true},"permission":{"type":"string"},"scope":{"type":"string"},"token":{"type":"string"},"url":{"type":"string"}}},"ShareKeyResponse":{"type":"object","required":["shareId","encryptedDek","isPasswordProtected","createdAt"],"properties":{"createdAt":{"type":"string","format":"date-time"},"encryptedDek":{"type":"string","format":"byte"},"isPasswordProtected":{"type":"boolean"},"kdfParams":{"allOf":[{"$ref":"#/components/schemas/KdfParamsResponse"}],"nullable":true},"salt":{"type":"string","format":"byte","nullable":true},"shareId":{"type":"string","format":"uuid"}}},"ShareMountItem":{"type":"object","required":["id","token","target_document_id","target_document_type","target_title","permission","created_at"],"properties":{"created_at":{"type":"string","format":"date-time"},"id":{"type":"string","format":"uuid"},"parent_folder_id":{"type":"string","format":"uuid","nullable":true},"permission":{"type":"string"},"target_document_id":{"type":"string","format":"uuid"},"target_document_type":{"type":"string"},"target_title":{"type":"string"},"token":{"type":"string"}}},"ShareSaltResponse":{"type":"object","description":"Response for share salt challenge (for password-protected shares)","required":["passwordProtected"],"properties":{"kdfParams":{"description":"KDF parameters for key derivation (only present if password-protected)","nullable":true},"passwordProtected":{"type":"boolean","description":"Whether this share is password-protected"},"salt":{"type":"string","format":"byte","description":"Base64 encoded salt for key derivation (only present if password-protected)","nullable":true}}},"SnapshotDetailResponse":{"type":"object","description":"Response for GET /api/documents/{id}/snapshots/{snapshotId}\n- For E2EE documents: content is encrypted, nonce is present\n- For non-E2EE documents: content is plaintext Yjs state, nonce is None","required":["id","content","createdAt"],"properties":{"content":{"type":"string","format":"byte","description":"Base64 encoded Yjs snapshot (encrypted for E2EE, plaintext for non-E2EE)"},"createdAt":{"type":"string","format":"date-time"},"id":{"type":"string","format":"uuid"},"nonce":{"type":"string","format":"byte","description":"Base64 encoded nonce (present for E2EE documents)","nullable":true}}},"SnapshotDiffBaseParam":{"type":"string","enum":["auto","current","previous"]},"SnapshotDiffKind":{"type":"string","enum":["current","snapshot"]},"SnapshotDiffResponse":{"type":"object","required":["base","target","diff"],"properties":{"base":{"$ref":"#/components/schemas/SnapshotDiffSideResponse"},"diff":{"$ref":"#/components/schemas/TextDiffResult"},"target":{"$ref":"#/components/schemas/SnapshotDiffSideResponse"}}},"SnapshotDiffSideResponse":{"type":"object","required":["kind","markdown"],"properties":{"kind":{"$ref":"#/components/schemas/SnapshotDiffKind"},"markdown":{"type":"string"},"snapshot":{"allOf":[{"$ref":"#/components/schemas/SnapshotSummary"}],"nullable":true}}},"SnapshotListResponse":{"type":"object","required":["items"],"properties":{"items":{"type":"array","items":{"$ref":"#/components/schemas/SnapshotSummary"}}}},"SnapshotRestoreResponse":{"type":"object","required":["snapshot"],"properties":{"snapshot":{"$ref":"#/components/schemas/SnapshotSummary"}}},"SnapshotSummary":{"type":"object","required":["id","document_id","label","kind","created_at","byte_size","content_hash"],"properties":{"byte_size":{"type":"integer","format":"int64"},"content_hash":{"type":"string"},"created_at":{"type":"string","format":"date-time"},"created_by":{"type":"string","format":"uuid","nullable":true},"document_id":{"type":"string","format":"uuid"},"id":{"type":"string","format":"uuid"},"kind":{"type":"string"},"label":{"type":"string"},"nonce":{"type":"string","format":"byte","nullable":true},"notes":{"type":"string","nullable":true},"signature":{"type":"string","format":"byte","nullable":true}}},"StoreDocumentKeyRequest":{"type":"object","required":["encryptedDek","nonce","keyVersion"],"properties":{"encryptedDek":{"type":"string","format":"byte","description":"Base64 encoded encrypted DEK"},"keyVersion":{"type":"integer","format":"int32","description":"Key version"},"nonce":{"type":"string","format":"byte","description":"Base64 encoded nonce"}}},"StoreEncryptedPrivateKeyRequest":{"type":"object","required":["encryptedPrivateKey","nonce"],"properties":{"encryptedPrivateKey":{"type":"string","format":"byte","description":"Base64 encoded encrypted private key"},"nonce":{"type":"string","format":"byte","description":"Base64 encoded nonce"}}},"StoreMasterKeyBackupRequest":{"type":"object","required":["encryptedKey","salt","kdfType","kdfParams"],"properties":{"encryptedKey":{"type":"string","format":"byte","description":"Base64 encoded encrypted master key"},"kdfParams":{"$ref":"#/components/schemas/KdfParamsResponse"},"kdfType":{"type":"string","description":"KDF type (e.g., \"argon2id\", \"pbkdf2\")","example":"argon2id"},"salt":{"type":"string","format":"byte","description":"Base64 encoded salt"}}},"StorePasswordProtectedShareKeyRequest":{"type":"object","required":["encryptedDek","salt","kdfParams"],"properties":{"encryptedDek":{"type":"string","format":"byte","description":"Base64 encoded encrypted DEK"},"kdfParams":{"$ref":"#/components/schemas/KdfParamsResponse"},"salt":{"type":"string","format":"byte","description":"Base64 encoded salt"}}},"StoreShareKeyRequest":{"type":"object","required":["encryptedDek"],"properties":{"encryptedDek":{"type":"string","format":"byte","description":"Base64 encoded encrypted DEK"}}},"StoreWorkspaceKeyRequest":{"type":"object","required":["encryptedKek","keyVersion"],"properties":{"encryptedKek":{"type":"string","format":"byte","description":"Base64 encoded encrypted KEK"},"keyVersion":{"type":"integer","format":"int32","description":"Key version (for key rotation tracking)"}}},"SwitchWorkspaceResponse":{"type":"object","required":["access_token"],"properties":{"access_token":{"type":"string"}}},"TagEntry":{"type":"object","description":"Tag entry in list response (E2EE format)","required":["encryptedName","documentCount"],"properties":{"documentCount":{"type":"integer","format":"int64"},"encryptedName":{"type":"string","format":"byte","description":"Base64 encoded deterministically encrypted tag name"}}},"TextDiffLine":{"type":"object","required":["line_type","content"],"properties":{"content":{"type":"string"},"line_type":{"$ref":"#/components/schemas/TextDiffLineType"},"new_line_number":{"type":"integer","format":"int32","nullable":true,"minimum":0},"old_line_number":{"type":"integer","format":"int32","nullable":true,"minimum":0}}},"TextDiffLineType":{"type":"string","enum":["added","deleted","context"]},"TextDiffResult":{"type":"object","required":["file_path","diff_lines"],"properties":{"diff_lines":{"type":"array","items":{"$ref":"#/components/schemas/TextDiffLine"}},"file_path":{"type":"string"},"new_content":{"type":"string","nullable":true},"old_content":{"type":"string","nullable":true}}},"UninstallBody":{"type":"object","required":["id"],"properties":{"id":{"type":"string"}}},"UpdateDocumentContentRequest":{"type":"object","required":["content"],"properties":{"content":{"type":"string","description":"Document content (plaintext or Base64-encoded encrypted Yjs state for E2EE)"},"nonce":{"type":"string","format":"byte","description":"Base64 encoded nonce (required for E2EE content)","nullable":true},"signature":{"type":"string","format":"byte","description":"Base64 encoded signature for integrity verification (optional for E2EE)","nullable":true}}},"UpdateDocumentRequest":{"type":"object","properties":{"parent_id":{"type":"string","nullable":true},"title":{"type":"string","nullable":true}}},"UpdateDocumentTagsRequest":{"type":"object","description":"Request for PUT /api/documents/{id}/tags","required":["encryptedTags"],"properties":{"encryptedTags":{"type":"array","items":{"$ref":"#/components/schemas/EncryptedTagInput"}}}},"UpdateInvitationKekRequest":{"type":"object","description":"Request to update invitation with encrypted KEK","required":["encryptedKekForInvite","kekVersion"],"properties":{"encryptedKekForInvite":{"type":"string","description":"Base64 encoded encrypted KEK (encrypted with key derived from invitation token)"},"kekVersion":{"type":"integer","format":"int32","description":"KEK version at the time of invitation"}}},"UpdateMemberRoleRequest":{"type":"object","required":["role_kind"],"properties":{"custom_role_id":{"type":"string","format":"uuid","nullable":true},"role_kind":{"type":"string"},"system_role":{"type":"string","nullable":true}}},"UpdatePublishSettingsRequest":{"type":"object","description":"Request to update noindex setting for a published document","required":["noindex"],"properties":{"noindex":{"type":"boolean","description":"If true, adds noindex meta tag to prevent search engine indexing"}}},"UpdateRecordBody":{"type":"object","required":["patch"],"properties":{"patch":{}}},"UpdateUserShortcutRequest":{"type":"object","properties":{"bindings":{"type":"object"},"leader_key":{"type":"string","example":"","nullable":true}}},"UpdateWorkspaceRequest":{"type":"object","properties":{"description":{"type":"string","nullable":true},"icon":{"type":"string","nullable":true},"name":{"type":"string","nullable":true}}},"UpdateWorkspaceRoleRequest":{"type":"object","properties":{"base_role":{"type":"string","nullable":true},"description":{"type":"string","nullable":true},"name":{"type":"string","nullable":true},"overrides":{"type":"array","items":{"$ref":"#/components/schemas/PermissionOverridePayload"},"nullable":true},"priority":{"type":"integer","format":"int32","nullable":true}}},"UploadFileMultipart":{"type":"object","description":"Multipart upload schema for OpenAPI","required":["file"],"properties":{"file":{"type":"string","format":"binary","description":"Encrypted file binary (.rme format)"},"metadata":{"type":"string","description":"JSON metadata containing encrypted file metadata","nullable":true}}},"UploadFileResponse":{"type":"object","description":"Response for file upload (E2EE format per design)","required":["id","url","filename","encryptedHash","size"],"properties":{"encryptedHash":{"type":"string","description":"SHA256 hash of encrypted file content"},"filename":{"type":"string","description":"Storage filename (UUID, for building relative paths)"},"id":{"type":"string","format":"uuid"},"size":{"type":"integer","format":"int64"},"url":{"type":"string","description":"URL to access the file (relative or absolute)"}}},"UploadPublicFileRequest":{"type":"object","description":"Request to upload a public file (decrypted attachment for E2EE documents)","required":["originalFilename","logicalFilename","mimeType","content"],"properties":{"content":{"type":"string","description":"Base64 encoded file content"},"logicalFilename":{"type":"string","description":"Logical filename as it appears in markdown (e.g., \"image.png\" from \"./attachments/image.png\")"},"mimeType":{"type":"string","description":"MIME type of the file"},"originalFilename":{"type":"string","description":"Original filename (decrypted)"}}},"UserPublicKeyResponse":{"type":"object","required":["publicKey","keyType","createdAt","updatedAt"],"properties":{"createdAt":{"type":"string","format":"date-time"},"keyType":{"type":"string"},"publicKey":{"type":"string","format":"byte"},"updatedAt":{"type":"string","format":"date-time"}}},"UserResponse":{"type":"object","required":["id","email","name","workspaces"],"properties":{"active_workspace":{"allOf":[{"$ref":"#/components/schemas/WorkspaceMembershipResponse"}],"nullable":true},"active_workspace_id":{"type":"string","format":"uuid","nullable":true},"active_workspace_permissions":{"type":"array","items":{"type":"string"}},"email":{"type":"string"},"id":{"type":"string","format":"uuid"},"name":{"type":"string"},"workspaces":{"type":"array","items":{"$ref":"#/components/schemas/WorkspaceMembershipResponse"}}}},"UserShortcutResponse":{"type":"object","required":["bindings"],"properties":{"bindings":{"type":"object"},"leader_key":{"type":"string","example":"","nullable":true},"updated_at":{"type":"string","format":"date-time","nullable":true}}},"WorkspaceInvitationResponse":{"type":"object","required":["id","workspace_id","email","role_kind","invited_by","token","created_at"],"properties":{"accepted_at":{"type":"string","format":"date-time","nullable":true},"accepted_by":{"type":"string","format":"uuid","nullable":true},"created_at":{"type":"string","format":"date-time"},"custom_role_id":{"type":"string","format":"uuid","nullable":true},"email":{"type":"string"},"encryptedKekForInvite":{"type":"string","nullable":true},"expires_at":{"type":"string","format":"date-time","nullable":true},"id":{"type":"string","format":"uuid"},"invited_by":{"type":"string","format":"uuid"},"kekVersion":{"type":"integer","format":"int32","nullable":true},"revoked_at":{"type":"string","format":"date-time","nullable":true},"role_kind":{"type":"string"},"system_role":{"type":"string","nullable":true},"token":{"type":"string"},"workspace_id":{"type":"string","format":"uuid"}}},"WorkspaceKeyResponse":{"type":"object","required":["id","workspaceId","userId","encryptedKek","keyVersion","createdAt"],"properties":{"createdAt":{"type":"string","format":"date-time"},"encryptedKek":{"type":"string","format":"byte"},"id":{"type":"string","format":"uuid"},"keyVersion":{"type":"integer","format":"int32"},"userId":{"type":"string","format":"uuid"},"workspaceId":{"type":"string","format":"uuid"}}},"WorkspaceKeyVersionResponse":{"type":"object","required":["workspaceId"],"properties":{"keyVersion":{"type":"integer","format":"int32","nullable":true},"workspaceId":{"type":"string","format":"uuid"}}},"WorkspaceMemberResponse":{"type":"object","required":["workspace_id","user_id","email","name","role_kind","is_default"],"properties":{"custom_role_id":{"type":"string","format":"uuid","nullable":true},"email":{"type":"string"},"is_default":{"type":"boolean"},"name":{"type":"string"},"role_kind":{"type":"string"},"system_role":{"type":"string","nullable":true},"user_id":{"type":"string","format":"uuid"},"workspace_id":{"type":"string","format":"uuid"}}},"WorkspaceMembershipResponse":{"type":"object","required":["id","name","slug","is_personal","role_kind","is_default"],"properties":{"custom_role_id":{"type":"string","format":"uuid","nullable":true},"description":{"type":"string","nullable":true},"icon":{"type":"string","nullable":true},"id":{"type":"string","format":"uuid"},"is_default":{"type":"boolean"},"is_personal":{"type":"boolean"},"name":{"type":"string"},"role_kind":{"type":"string"},"slug":{"type":"string"},"system_role":{"type":"string","nullable":true}}},"WorkspacePermissionsResponse":{"type":"object","required":["workspace_id","permissions"],"properties":{"permissions":{"type":"array","items":{"type":"string"}},"workspace_id":{"type":"string","format":"uuid"}}},"WorkspaceResponse":{"type":"object","required":["id","name","slug","is_personal","role_kind","is_default"],"properties":{"custom_role_id":{"type":"string","format":"uuid","nullable":true},"description":{"type":"string","nullable":true},"icon":{"type":"string","nullable":true},"id":{"type":"string","format":"uuid"},"is_default":{"type":"boolean"},"is_personal":{"type":"boolean"},"name":{"type":"string"},"role_kind":{"type":"string"},"slug":{"type":"string"},"system_role":{"type":"string","nullable":true}}},"WorkspaceRoleResponse":{"type":"object","required":["id","workspace_id","name","base_role","priority","overrides"],"properties":{"base_role":{"type":"string"},"description":{"type":"string","nullable":true},"id":{"type":"string","format":"uuid"},"name":{"type":"string"},"overrides":{"type":"array","items":{"$ref":"#/components/schemas/PermissionOverridePayload"}},"priority":{"type":"integer","format":"int32"},"workspace_id":{"type":"string","format":"uuid"}}}}},"tags":[{"name":"Auth","description":"Authentication"},{"name":"E2EE","description":"End-to-end encryption key management"},{"name":"Documents","description":"Documents management"},{"name":"Files","description":"File management"},{"name":"Sharing","description":"Document sharing"},{"name":"Public Documents","description":"Public pages"},{"name":"Realtime","description":"Yjs WebSocket endpoint (/yjs/:id)"},{"name":"Git","description":"Git config storage (git operations are client-side)"},{"name":"Plugins","description":"Plugins management & data APIs"},{"name":"Storage","description":"Storage ingest APIs"},{"name":"Health","description":"System health checks"}]} diff --git a/app/.dockerignore b/app/.dockerignore index 89faafba..e5623015 100644 --- a/app/.dockerignore +++ b/app/.dockerignore @@ -1,7 +1,45 @@ +# Dependencies and build outputs node_modules dist .output +.nitro +.tanstack +.vinxi + +# Environment and secrets .env +.env.* +!.env.example *.local + +# Logs npm-debug.log* yarn-error.log* +openapi-ts-error-*.log + +# Docker +Dockerfile* +.dockerignore + +# Dev tools config (not needed at runtime) +prettier.config.js +.prettierignore +.cta.json + +# IDE +.idea +.vscode +*.swp +*.swo + +# Test +*.test.ts +*.spec.ts +__tests__ +coverage + +# Misc +*.md +LICENSE +.git +.gitignore diff --git a/app/Dockerfile b/app/Dockerfile index 549fc6a9..efeb287a 100644 --- a/app/Dockerfile +++ b/app/Dockerfile @@ -17,8 +17,35 @@ COPY src ./src RUN npm ci RUN npm run build -RUN npm prune --omit=dev -RUN npm cache clean --force + +# Production deps stage: clean install without dev dependencies +FROM node:22-alpine AS deps +WORKDIR /app + +RUN apk add --no-cache python3 make g++ libc6-compat + +COPY package*.json ./ +RUN npm ci --omit=dev && npm cache clean --force + +# Remove build tools and packages not needed at runtime +RUN rm -rf node_modules/vite \ + node_modules/typescript \ + node_modules/prettier \ + node_modules/@esbuild \ + node_modules/esbuild \ + node_modules/lightningcss* \ + node_modules/@babel \ + node_modules/eslint* \ + node_modules/@typescript-eslint \ + node_modules/@vitejs \ + node_modules/wasm-pandoc \ + node_modules/leveldown \ + node_modules/level \ + node_modules/y-leveldb \ + node_modules/rollup \ + node_modules/@rollup \ + node_modules/caniuse-lite \ + node_modules/browserslist # Runtime stage: run the prebuilt TanStack Start server FROM node:22-alpine AS runtime @@ -30,8 +57,8 @@ ENV HOST=0.0.0.0 ENV NITRO_HOST=0.0.0.0 WORKDIR /app +COPY --from=deps /app/node_modules ./node_modules COPY --from=build /app/package*.json ./ -COPY --from=build /app/node_modules ./node_modules COPY --from=build /app/.output ./.output COPY --from=build /app/dist ./dist diff --git a/app/nitro.config.ts b/app/nitro.config.ts index c06d7a71..59f655dd 100644 --- a/app/nitro.config.ts +++ b/app/nitro.config.ts @@ -5,6 +5,7 @@ import { fileURLToPath } from 'node:url' const __dirname = dirname(fileURLToPath(import.meta.url)) export default defineNitroConfig({ + compatibilityDate: '2026-01-01', serverAssets: [ { baseName: 'og-fonts', diff --git a/app/package-lock.json b/app/package-lock.json index 008da57f..d1338bf6 100644 --- a/app/package-lock.json +++ b/app/package-lock.json @@ -6,7 +6,19 @@ "": { "name": "app", "dependencies": { - "@monaco-editor/react": "^4.7.0", + "@codemirror/autocomplete": "^6.20.0", + "@codemirror/commands": "^6.10.1", + "@codemirror/lang-markdown": "^6.5.0", + "@codemirror/language": "^6.12.1", + "@codemirror/language-data": "^6.5.2", + "@codemirror/merge": "^6.11.2", + "@codemirror/search": "^6.5.11", + "@codemirror/state": "^6.5.3", + "@codemirror/view": "^6.39.9", + "@extism/extism": "2.0.0-rc13", + "@isomorphic-git/lightning-fs": "^4.6.2", + "@lezer/markdown": "^1.6.3", + "@noble/curves": "^2.0.1", "@radix-ui/react-alert-dialog": "^1.1.15", "@radix-ui/react-avatar": "^1.1.10", "@radix-ui/react-collapsible": "^1.1.12", @@ -20,48 +32,73 @@ "@radix-ui/react-switch": "^1.2.6", "@radix-ui/react-tabs": "^1.1.13", "@radix-ui/react-tooltip": "^1.2.8", + "@replit/codemirror-vim": "^6.3.0", "@resvg/resvg-js": "^2.6.2", - "@tailwindcss/vite": "^4.0.6", - "@tanstack/react-devtools": "^0.7.0", "@tanstack/react-query": "^5.51.21", "@tanstack/react-router": "^1.132.0", - "@tanstack/react-router-devtools": "^1.132.0", "@tanstack/react-router-ssr-query": "^1.131.7", "@tanstack/react-start": "^1.132.0", "@tanstack/react-virtual": "^3.10.8", - "@tanstack/router-plugin": "^1.132.0", + "@types/zxcvbn": "^4.4.5", + "bip39": "^3.1.0", + "buffer": "^6.0.3", + "canonicalize": "^2.1.0", "class-variance-authority": "^0.7.1", "clsx": "^2.1.1", "cmdk": "^1.1.1", + "hash-wasm": "^4.12.0", "is-hotkey": "^0.2.0", + "isomorphic-git": "^1.36.1", + "jszip": "^3.10.1", + "katex": "^0.16.27", + "libsodium-wrappers": "^0.8.0", "lucide-react": "^0.544.0", - "monaco-editor": "^0.52.2", - "monaco-markdown": "^0.0.12", - "monaco-vim": "^0.4.2", + "minisearch": "^7.2.0", "morphdom": "^2.7.7", "react": "^19.0.0", "react-dom": "^19.0.0", "react-mosaic-component": "^6.1.1", "react-resizable-panels": "^3.0.6", + "rehype-katex": "^7.0.1", + "rehype-sanitize": "^6.0.0", + "rehype-stringify": "^10.0.1", + "remark-breaks": "^4.0.0", + "remark-gfm": "^4.0.1", + "remark-math": "^6.0.0", + "remark-parse": "^11.0.0", + "remark-rehype": "^11.1.2", "satori": "^0.18.3", + "shiki": "^3.21.0", "sonner": "^2.0.7", "state-local": "^1.0.7", "tailwind-merge": "^3.0.2", "tailwindcss": "^4.0.6", + "tiny-segmenter": "^0.2.0", "tw-animate-css": "^1.3.6", - "vite-tsconfig-paths": "^5.1.4", + "unified": "^11.0.5", + "unist-util-visit": "^5.0.0", + "wasm-pandoc": "^0.8.0", + "y-codemirror.next": "^0.3.5", "y-indexeddb": "^9.0.12", - "y-monaco": "^0.1.6", "y-websocket": "^1.5.4", - "yjs": "^13.6.27" + "yjs": "^13.6.27", + "zxcvbn": "^4.4.2" }, "devDependencies": { "@hey-api/openapi-ts": "^0.86.10", + "@tailwindcss/vite": "^4.0.6", "@tanstack/nitro-v2-vite-plugin": "^1.132.31", + "@tanstack/react-devtools": "^0.7.0", + "@tanstack/react-router-devtools": "^1.132.0", + "@tanstack/router-plugin": "^1.132.0", "@tanstack/virtual-file-routes": "^1.132.31", "@testing-library/dom": "^10.4.0", "@testing-library/react": "^16.2.0", + "@types/hast": "^3.0.4", "@types/is-hotkey": "^0.1.10", + "@types/katex": "^0.16.8", + "@types/libsodium-wrappers": "^0.7.14", + "@types/mdast": "^4.0.4", "@types/node": "^22.18.1", "@types/react": "^19.0.8", "@types/react-dom": "^19.0.3", @@ -77,6 +114,9 @@ "typescript": "^5.7.2", "vite": "^7.1.7", "vite-plugin-pwa": "^1.1.0", + "vite-plugin-top-level-await": "^1.6.0", + "vite-plugin-wasm": "^3.5.0", + "vite-tsconfig-paths": "^5.1.4", "vitest": "^3.0.5", "web-vitals": "^5.1.0" } @@ -1735,6 +1775,12 @@ "node": ">=6.9.0" } }, + "node_modules/@bjorn3/browser_wasi_shim": { + "version": "0.4.2", + "resolved": "https://registry.npmjs.org/@bjorn3/browser_wasi_shim/-/browser_wasi_shim-0.4.2.tgz", + "integrity": "sha512-/iHkCVUG3VbcbmEHn5iIUpIrh7a7WPiwZ3sHy4HZKZzBdSadwdddYDZAII2zBvQYV0Lfi8naZngPCN7WPHI/hA==", + "license": "MIT OR Apache-2.0" + }, "node_modules/@boundaries/elements": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/@boundaries/elements/-/elements-1.1.2.tgz", @@ -1778,6 +1824,417 @@ "node": ">=10.0.0" } }, + "node_modules/@codemirror/autocomplete": { + "version": "6.20.0", + "resolved": "https://registry.npmjs.org/@codemirror/autocomplete/-/autocomplete-6.20.0.tgz", + "integrity": "sha512-bOwvTOIJcG5FVo5gUUupiwYh8MioPLQ4UcqbcRf7UQ98X90tCa9E1kZ3Z7tqwpZxYyOvh1YTYbmZE9RTfTp5hg==", + "license": "MIT", + "dependencies": { + "@codemirror/language": "^6.0.0", + "@codemirror/state": "^6.0.0", + "@codemirror/view": "^6.17.0", + "@lezer/common": "^1.0.0" + } + }, + "node_modules/@codemirror/commands": { + "version": "6.10.1", + "resolved": "https://registry.npmjs.org/@codemirror/commands/-/commands-6.10.1.tgz", + "integrity": "sha512-uWDWFypNdQmz2y1LaNJzK7fL7TYKLeUAU0npEC685OKTF3KcQ2Vu3klIM78D7I6wGhktme0lh3CuQLv0ZCrD9Q==", + "license": "MIT", + "dependencies": { + "@codemirror/language": "^6.0.0", + "@codemirror/state": "^6.4.0", + "@codemirror/view": "^6.27.0", + "@lezer/common": "^1.1.0" + } + }, + "node_modules/@codemirror/lang-angular": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/@codemirror/lang-angular/-/lang-angular-0.1.4.tgz", + "integrity": "sha512-oap+gsltb/fzdlTQWD6BFF4bSLKcDnlxDsLdePiJpCVNKWXSTAbiiQeYI3UmES+BLAdkmIC1WjyztC1pi/bX4g==", + "license": "MIT", + "dependencies": { + "@codemirror/lang-html": "^6.0.0", + "@codemirror/lang-javascript": "^6.1.2", + "@codemirror/language": "^6.0.0", + "@lezer/common": "^1.2.0", + "@lezer/highlight": "^1.0.0", + "@lezer/lr": "^1.3.3" + } + }, + "node_modules/@codemirror/lang-cpp": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/@codemirror/lang-cpp/-/lang-cpp-6.0.3.tgz", + "integrity": "sha512-URM26M3vunFFn9/sm6rzqrBzDgfWuDixp85uTY49wKudToc2jTHUrKIGGKs+QWND+YLofNNZpxcNGRynFJfvgA==", + "license": "MIT", + "dependencies": { + "@codemirror/language": "^6.0.0", + "@lezer/cpp": "^1.0.0" + } + }, + "node_modules/@codemirror/lang-css": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/@codemirror/lang-css/-/lang-css-6.3.1.tgz", + "integrity": "sha512-kr5fwBGiGtmz6l0LSJIbno9QrifNMUusivHbnA1H6Dmqy4HZFte3UAICix1VuKo0lMPKQr2rqB+0BkKi/S3Ejg==", + "license": "MIT", + "dependencies": { + "@codemirror/autocomplete": "^6.0.0", + "@codemirror/language": "^6.0.0", + "@codemirror/state": "^6.0.0", + "@lezer/common": "^1.0.2", + "@lezer/css": "^1.1.7" + } + }, + "node_modules/@codemirror/lang-go": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/@codemirror/lang-go/-/lang-go-6.0.1.tgz", + "integrity": "sha512-7fNvbyNylvqCphW9HD6WFnRpcDjr+KXX/FgqXy5H5ZS0eC5edDljukm/yNgYkwTsgp2busdod50AOTIy6Jikfg==", + "license": "MIT", + "dependencies": { + "@codemirror/autocomplete": "^6.0.0", + "@codemirror/language": "^6.6.0", + "@codemirror/state": "^6.0.0", + "@lezer/common": "^1.0.0", + "@lezer/go": "^1.0.0" + } + }, + "node_modules/@codemirror/lang-html": { + "version": "6.4.11", + "resolved": "https://registry.npmjs.org/@codemirror/lang-html/-/lang-html-6.4.11.tgz", + "integrity": "sha512-9NsXp7Nwp891pQchI7gPdTwBuSuT3K65NGTHWHNJ55HjYcHLllr0rbIZNdOzas9ztc1EUVBlHou85FFZS4BNnw==", + "license": "MIT", + "dependencies": { + "@codemirror/autocomplete": "^6.0.0", + "@codemirror/lang-css": "^6.0.0", + "@codemirror/lang-javascript": "^6.0.0", + "@codemirror/language": "^6.4.0", + "@codemirror/state": "^6.0.0", + "@codemirror/view": "^6.17.0", + "@lezer/common": "^1.0.0", + "@lezer/css": "^1.1.0", + "@lezer/html": "^1.3.12" + } + }, + "node_modules/@codemirror/lang-java": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/@codemirror/lang-java/-/lang-java-6.0.2.tgz", + "integrity": "sha512-m5Nt1mQ/cznJY7tMfQTJchmrjdjQ71IDs+55d1GAa8DGaB8JXWsVCkVT284C3RTASaY43YknrK2X3hPO/J3MOQ==", + "license": "MIT", + "dependencies": { + "@codemirror/language": "^6.0.0", + "@lezer/java": "^1.0.0" + } + }, + "node_modules/@codemirror/lang-javascript": { + "version": "6.2.4", + "resolved": "https://registry.npmjs.org/@codemirror/lang-javascript/-/lang-javascript-6.2.4.tgz", + "integrity": "sha512-0WVmhp1QOqZ4Rt6GlVGwKJN3KW7Xh4H2q8ZZNGZaP6lRdxXJzmjm4FqvmOojVj6khWJHIb9sp7U/72W7xQgqAA==", + "license": "MIT", + "dependencies": { + "@codemirror/autocomplete": "^6.0.0", + "@codemirror/language": "^6.6.0", + "@codemirror/lint": "^6.0.0", + "@codemirror/state": "^6.0.0", + "@codemirror/view": "^6.17.0", + "@lezer/common": "^1.0.0", + "@lezer/javascript": "^1.0.0" + } + }, + "node_modules/@codemirror/lang-jinja": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/@codemirror/lang-jinja/-/lang-jinja-6.0.0.tgz", + "integrity": "sha512-47MFmRcR8UAxd8DReVgj7WJN1WSAMT7OJnewwugZM4XiHWkOjgJQqvEM1NpMj9ALMPyxmlziEI1opH9IaEvmaw==", + "license": "MIT", + "dependencies": { + "@codemirror/lang-html": "^6.0.0", + "@codemirror/language": "^6.0.0", + "@lezer/common": "^1.2.0", + "@lezer/highlight": "^1.2.0", + "@lezer/lr": "^1.4.0" + } + }, + "node_modules/@codemirror/lang-json": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/@codemirror/lang-json/-/lang-json-6.0.2.tgz", + "integrity": "sha512-x2OtO+AvwEHrEwR0FyyPtfDUiloG3rnVTSZV1W8UteaLL8/MajQd8DpvUb2YVzC+/T18aSDv0H9mu+xw0EStoQ==", + "license": "MIT", + "dependencies": { + "@codemirror/language": "^6.0.0", + "@lezer/json": "^1.0.0" + } + }, + "node_modules/@codemirror/lang-less": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/@codemirror/lang-less/-/lang-less-6.0.2.tgz", + "integrity": "sha512-EYdQTG22V+KUUk8Qq582g7FMnCZeEHsyuOJisHRft/mQ+ZSZ2w51NupvDUHiqtsOy7It5cHLPGfHQLpMh9bqpQ==", + "license": "MIT", + "dependencies": { + "@codemirror/lang-css": "^6.2.0", + "@codemirror/language": "^6.0.0", + "@lezer/common": "^1.2.0", + "@lezer/highlight": "^1.0.0", + "@lezer/lr": "^1.0.0" + } + }, + "node_modules/@codemirror/lang-liquid": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/@codemirror/lang-liquid/-/lang-liquid-6.3.1.tgz", + "integrity": "sha512-S/jE/D7iij2Pu70AC65ME6AYWxOOcX20cSJvaPgY5w7m2sfxsArAcUAuUgm/CZCVmqoi9KiOlS7gj/gyLipABw==", + "license": "MIT", + "dependencies": { + "@codemirror/autocomplete": "^6.0.0", + "@codemirror/lang-html": "^6.0.0", + "@codemirror/language": "^6.0.0", + "@codemirror/state": "^6.0.0", + "@codemirror/view": "^6.0.0", + "@lezer/common": "^1.0.0", + "@lezer/highlight": "^1.0.0", + "@lezer/lr": "^1.3.1" + } + }, + "node_modules/@codemirror/lang-markdown": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/@codemirror/lang-markdown/-/lang-markdown-6.5.0.tgz", + "integrity": "sha512-0K40bZ35jpHya6FriukbgaleaqzBLZfOh7HuzqbMxBXkbYMJDxfF39c23xOgxFezR+3G+tR2/Mup+Xk865OMvw==", + "license": "MIT", + "dependencies": { + "@codemirror/autocomplete": "^6.7.1", + "@codemirror/lang-html": "^6.0.0", + "@codemirror/language": "^6.3.0", + "@codemirror/state": "^6.0.0", + "@codemirror/view": "^6.0.0", + "@lezer/common": "^1.2.1", + "@lezer/markdown": "^1.0.0" + } + }, + "node_modules/@codemirror/lang-php": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/@codemirror/lang-php/-/lang-php-6.0.2.tgz", + "integrity": "sha512-ZKy2v1n8Fc8oEXj0Th0PUMXzQJ0AIR6TaZU+PbDHExFwdu+guzOA4jmCHS1Nz4vbFezwD7LyBdDnddSJeScMCA==", + "license": "MIT", + "dependencies": { + "@codemirror/lang-html": "^6.0.0", + "@codemirror/language": "^6.0.0", + "@codemirror/state": "^6.0.0", + "@lezer/common": "^1.0.0", + "@lezer/php": "^1.0.0" + } + }, + "node_modules/@codemirror/lang-python": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/@codemirror/lang-python/-/lang-python-6.2.1.tgz", + "integrity": "sha512-IRjC8RUBhn9mGR9ywecNhB51yePWCGgvHfY1lWN/Mrp3cKuHr0isDKia+9HnvhiWNnMpbGhWrkhuWOc09exRyw==", + "license": "MIT", + "dependencies": { + "@codemirror/autocomplete": "^6.3.2", + "@codemirror/language": "^6.8.0", + "@codemirror/state": "^6.0.0", + "@lezer/common": "^1.2.1", + "@lezer/python": "^1.1.4" + } + }, + "node_modules/@codemirror/lang-rust": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/@codemirror/lang-rust/-/lang-rust-6.0.2.tgz", + "integrity": "sha512-EZaGjCUegtiU7kSMvOfEZpaCReowEf3yNidYu7+vfuGTm9ow4mthAparY5hisJqOHmJowVH3Upu+eJlUji6qqA==", + "license": "MIT", + "dependencies": { + "@codemirror/language": "^6.0.0", + "@lezer/rust": "^1.0.0" + } + }, + "node_modules/@codemirror/lang-sass": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/@codemirror/lang-sass/-/lang-sass-6.0.2.tgz", + "integrity": "sha512-l/bdzIABvnTo1nzdY6U+kPAC51czYQcOErfzQ9zSm9D8GmNPD0WTW8st/CJwBTPLO8jlrbyvlSEcN20dc4iL0Q==", + "license": "MIT", + "dependencies": { + "@codemirror/lang-css": "^6.2.0", + "@codemirror/language": "^6.0.0", + "@codemirror/state": "^6.0.0", + "@lezer/common": "^1.0.2", + "@lezer/sass": "^1.0.0" + } + }, + "node_modules/@codemirror/lang-sql": { + "version": "6.10.0", + "resolved": "https://registry.npmjs.org/@codemirror/lang-sql/-/lang-sql-6.10.0.tgz", + "integrity": "sha512-6ayPkEd/yRw0XKBx5uAiToSgGECo/GY2NoJIHXIIQh1EVwLuKoU8BP/qK0qH5NLXAbtJRLuT73hx7P9X34iO4w==", + "license": "MIT", + "dependencies": { + "@codemirror/autocomplete": "^6.0.0", + "@codemirror/language": "^6.0.0", + "@codemirror/state": "^6.0.0", + "@lezer/common": "^1.2.0", + "@lezer/highlight": "^1.0.0", + "@lezer/lr": "^1.0.0" + } + }, + "node_modules/@codemirror/lang-vue": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/@codemirror/lang-vue/-/lang-vue-0.1.3.tgz", + "integrity": "sha512-QSKdtYTDRhEHCfo5zOShzxCmqKJvgGrZwDQSdbvCRJ5pRLWBS7pD/8e/tH44aVQT6FKm0t6RVNoSUWHOI5vNug==", + "license": "MIT", + "dependencies": { + "@codemirror/lang-html": "^6.0.0", + "@codemirror/lang-javascript": "^6.1.2", + "@codemirror/language": "^6.0.0", + "@lezer/common": "^1.2.0", + "@lezer/highlight": "^1.0.0", + "@lezer/lr": "^1.3.1" + } + }, + "node_modules/@codemirror/lang-wast": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/@codemirror/lang-wast/-/lang-wast-6.0.2.tgz", + "integrity": "sha512-Imi2KTpVGm7TKuUkqyJ5NRmeFWF7aMpNiwHnLQe0x9kmrxElndyH0K6H/gXtWwY6UshMRAhpENsgfpSwsgmC6Q==", + "license": "MIT", + "dependencies": { + "@codemirror/language": "^6.0.0", + "@lezer/common": "^1.2.0", + "@lezer/highlight": "^1.0.0", + "@lezer/lr": "^1.0.0" + } + }, + "node_modules/@codemirror/lang-xml": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/@codemirror/lang-xml/-/lang-xml-6.1.0.tgz", + "integrity": "sha512-3z0blhicHLfwi2UgkZYRPioSgVTo9PV5GP5ducFH6FaHy0IAJRg+ixj5gTR1gnT/glAIC8xv4w2VL1LoZfs+Jg==", + "license": "MIT", + "dependencies": { + "@codemirror/autocomplete": "^6.0.0", + "@codemirror/language": "^6.4.0", + "@codemirror/state": "^6.0.0", + "@codemirror/view": "^6.0.0", + "@lezer/common": "^1.0.0", + "@lezer/xml": "^1.0.0" + } + }, + "node_modules/@codemirror/lang-yaml": { + "version": "6.1.2", + "resolved": "https://registry.npmjs.org/@codemirror/lang-yaml/-/lang-yaml-6.1.2.tgz", + "integrity": "sha512-dxrfG8w5Ce/QbT7YID7mWZFKhdhsaTNOYjOkSIMt1qmC4VQnXSDSYVHHHn8k6kJUfIhtLo8t1JJgltlxWdsITw==", + "license": "MIT", + "dependencies": { + "@codemirror/autocomplete": "^6.0.0", + "@codemirror/language": "^6.0.0", + "@codemirror/state": "^6.0.0", + "@lezer/common": "^1.2.0", + "@lezer/highlight": "^1.2.0", + "@lezer/lr": "^1.0.0", + "@lezer/yaml": "^1.0.0" + } + }, + "node_modules/@codemirror/language": { + "version": "6.12.1", + "resolved": "https://registry.npmjs.org/@codemirror/language/-/language-6.12.1.tgz", + "integrity": "sha512-Fa6xkSiuGKc8XC8Cn96T+TQHYj4ZZ7RdFmXA3i9xe/3hLHfwPZdM+dqfX0Cp0zQklBKhVD8Yzc8LS45rkqcwpQ==", + "license": "MIT", + "dependencies": { + "@codemirror/state": "^6.0.0", + "@codemirror/view": "^6.23.0", + "@lezer/common": "^1.5.0", + "@lezer/highlight": "^1.0.0", + "@lezer/lr": "^1.0.0", + "style-mod": "^4.0.0" + } + }, + "node_modules/@codemirror/language-data": { + "version": "6.5.2", + "resolved": "https://registry.npmjs.org/@codemirror/language-data/-/language-data-6.5.2.tgz", + "integrity": "sha512-CPkWBKrNS8stYbEU5kwBwTf3JB1kghlbh4FSAwzGW2TEscdeHHH4FGysREW86Mqnj3Qn09s0/6Ea/TutmoTobg==", + "license": "MIT", + "dependencies": { + "@codemirror/lang-angular": "^0.1.0", + "@codemirror/lang-cpp": "^6.0.0", + "@codemirror/lang-css": "^6.0.0", + "@codemirror/lang-go": "^6.0.0", + "@codemirror/lang-html": "^6.0.0", + "@codemirror/lang-java": "^6.0.0", + "@codemirror/lang-javascript": "^6.0.0", + "@codemirror/lang-jinja": "^6.0.0", + "@codemirror/lang-json": "^6.0.0", + "@codemirror/lang-less": "^6.0.0", + "@codemirror/lang-liquid": "^6.0.0", + "@codemirror/lang-markdown": "^6.0.0", + "@codemirror/lang-php": "^6.0.0", + "@codemirror/lang-python": "^6.0.0", + "@codemirror/lang-rust": "^6.0.0", + "@codemirror/lang-sass": "^6.0.0", + "@codemirror/lang-sql": "^6.0.0", + "@codemirror/lang-vue": "^0.1.1", + "@codemirror/lang-wast": "^6.0.0", + "@codemirror/lang-xml": "^6.0.0", + "@codemirror/lang-yaml": "^6.0.0", + "@codemirror/language": "^6.0.0", + "@codemirror/legacy-modes": "^6.4.0" + } + }, + "node_modules/@codemirror/legacy-modes": { + "version": "6.5.2", + "resolved": "https://registry.npmjs.org/@codemirror/legacy-modes/-/legacy-modes-6.5.2.tgz", + "integrity": "sha512-/jJbwSTazlQEDOQw2FJ8LEEKVS72pU0lx6oM54kGpL8t/NJ2Jda3CZ4pcltiKTdqYSRk3ug1B3pil1gsjA6+8Q==", + "license": "MIT", + "dependencies": { + "@codemirror/language": "^6.0.0" + } + }, + "node_modules/@codemirror/lint": { + "version": "6.9.2", + "resolved": "https://registry.npmjs.org/@codemirror/lint/-/lint-6.9.2.tgz", + "integrity": "sha512-sv3DylBiIyi+xKwRCJAAsBZZZWo82shJ/RTMymLabAdtbkV5cSKwWDeCgtUq3v8flTaXS2y1kKkICuRYtUswyQ==", + "license": "MIT", + "dependencies": { + "@codemirror/state": "^6.0.0", + "@codemirror/view": "^6.35.0", + "crelt": "^1.0.5" + } + }, + "node_modules/@codemirror/merge": { + "version": "6.11.2", + "resolved": "https://registry.npmjs.org/@codemirror/merge/-/merge-6.11.2.tgz", + "integrity": "sha512-NO5EJd2rLRbwVWLgMdhIntDIhfDtMOKYEZgqV5WnkNUS2oXOCVWLPjG/kgl/Jth2fGiOuG947bteqxP9nBXmMg==", + "license": "MIT", + "dependencies": { + "@codemirror/language": "^6.0.0", + "@codemirror/state": "^6.0.0", + "@codemirror/view": "^6.17.0", + "@lezer/highlight": "^1.0.0", + "style-mod": "^4.1.0" + } + }, + "node_modules/@codemirror/search": { + "version": "6.5.11", + "resolved": "https://registry.npmjs.org/@codemirror/search/-/search-6.5.11.tgz", + "integrity": "sha512-KmWepDE6jUdL6n8cAAqIpRmLPBZ5ZKnicE8oGU/s3QrAVID+0VhLFrzUucVKHG5035/BSykhExDL/Xm7dHthiA==", + "license": "MIT", + "dependencies": { + "@codemirror/state": "^6.0.0", + "@codemirror/view": "^6.0.0", + "crelt": "^1.0.5" + } + }, + "node_modules/@codemirror/state": { + "version": "6.5.3", + "resolved": "https://registry.npmjs.org/@codemirror/state/-/state-6.5.3.tgz", + "integrity": "sha512-MerMzJzlXogk2fxWFU1nKp36bY5orBG59HnPiz0G9nLRebWa0zXuv2siH6PLIHBvv5TH8CkQRqjBs0MlxCZu+A==", + "license": "MIT", + "dependencies": { + "@marijn/find-cluster-break": "^1.0.0" + } + }, + "node_modules/@codemirror/view": { + "version": "6.39.9", + "resolved": "https://registry.npmjs.org/@codemirror/view/-/view-6.39.9.tgz", + "integrity": "sha512-miGSIfBOKC1s2oHoa80dp+BjtsL8sXsrgGlQnQuOcfvaedcQUtqddTmKbJSDkLl4mkgPvZyXuKic2HDNYcJLYA==", + "license": "MIT", + "dependencies": { + "@codemirror/state": "^6.5.0", + "crelt": "^1.0.6", + "style-mod": "^4.1.0", + "w3c-keyname": "^2.2.4" + } + }, "node_modules/@csstools/color-helpers": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/@csstools/color-helpers/-/color-helpers-5.1.0.tgz", @@ -1917,9 +2374,10 @@ } }, "node_modules/@emnapi/core": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/@emnapi/core/-/core-1.5.0.tgz", - "integrity": "sha512-sbP8GzB1WDzacS8fgNPpHlp6C9VZe+SJP3F90W9rLemaQj2PzIuTEl1qDOYQf58YIpyjViI24y9aPWCjEzY2cg==", + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/@emnapi/core/-/core-1.8.1.tgz", + "integrity": "sha512-AvT9QFpxK0Zd8J0jopedNm+w/2fIzvtPKPjqyw9jwvBaReTTqPBk9Hixaz7KbjimP+QNz605/XnjFcDAL2pqBg==", + "dev": true, "license": "MIT", "optional": true, "dependencies": { @@ -1928,9 +2386,10 @@ } }, "node_modules/@emnapi/runtime": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/@emnapi/runtime/-/runtime-1.5.0.tgz", - "integrity": "sha512-97/BJ3iXHww3djw6hYIfErCZFee7qCtrneuLa20UXFCOTCfBM2cvQHjWJ2EG0s0MtdNwInarqCTz35i4wWXHsQ==", + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/@emnapi/runtime/-/runtime-1.8.1.tgz", + "integrity": "sha512-mehfKSMWjjNol8659Z8KxEMrdSJDDot5SXMq00dM8BN4o+CLNXQ0xH2V7EchNHV4RmbZLmmPdEaXZc5H2FXmDg==", + "dev": true, "license": "MIT", "optional": true, "dependencies": { @@ -1941,6 +2400,7 @@ "version": "1.1.0", "resolved": "https://registry.npmjs.org/@emnapi/wasi-threads/-/wasi-threads-1.1.0.tgz", "integrity": "sha512-WI0DdZ8xFSbgMjR1sFsKABJ/C5OnRrjT06JXbZKexJGrDuPTzZdDYfFlsgcCXCyf+suG5QU2e/y1Wo2V/OapLQ==", + "dev": true, "license": "MIT", "optional": true, "dependencies": { @@ -2517,6 +2977,12 @@ "node": "^18.18.0 || ^20.9.0 || >=21.1.0" } }, + "node_modules/@extism/extism": { + "version": "2.0.0-rc13", + "resolved": "https://registry.npmjs.org/@extism/extism/-/extism-2.0.0-rc13.tgz", + "integrity": "sha512-iQ3mrPKOC0WMZ94fuJrKbJmMyz4LQ9Abf8gd4F5ShxKWa+cRKcVzk0EqRQsp5xXsQ2dO3zJTiA6eTc4Ihf7k+A==", + "license": "BSD-3-Clause" + }, "node_modules/@floating-ui/core": { "version": "1.7.3", "resolved": "https://registry.npmjs.org/@floating-ui/core/-/core-1.7.3.tgz", @@ -2746,6 +3212,7 @@ "version": "4.0.1", "resolved": "https://registry.npmjs.org/@isaacs/fs-minipass/-/fs-minipass-4.0.1.tgz", "integrity": "sha512-wgm9Ehl2jpeqP3zw/7mo3kRHFp5MEDhqAdwy1fTGkHAwnkGOVsgpvQhL8B5n1qlb01jV3n/bI0ZfZp5lWA1k4w==", + "dev": true, "license": "ISC", "dependencies": { "minipass": "^7.0.4" @@ -2754,6 +3221,27 @@ "node": ">=18.0.0" } }, + "node_modules/@isomorphic-git/idb-keyval": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/@isomorphic-git/idb-keyval/-/idb-keyval-3.3.2.tgz", + "integrity": "sha512-r8/AdpiS0/WJCNR/t/gsgL+M8NMVj/ek7s60uz3LmpCaTF2mEVlZJlB01ZzalgYzRLXwSPC92o+pdzjM7PN/pA==", + "license": "Apache-2.0" + }, + "node_modules/@isomorphic-git/lightning-fs": { + "version": "4.6.2", + "resolved": "https://registry.npmjs.org/@isomorphic-git/lightning-fs/-/lightning-fs-4.6.2.tgz", + "integrity": "sha512-RS/oa1UBnoUFe56bsjOEgoUUReYKQzYUlQnbERRRNv9s9KmjyWuuylPV+YgsWirR2oONKaipWYMebVQ8SAe55Q==", + "license": "MIT", + "dependencies": { + "@isomorphic-git/idb-keyval": "3.3.2", + "isomorphic-textencoder": "1.0.1", + "just-debounce-it": "1.1.0", + "just-once": "1.1.0" + }, + "bin": { + "superblocktxt": "src/superblocktxt.js" + } + }, "node_modules/@jridgewell/gen-mapping": { "version": "0.3.13", "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.13.tgz", @@ -2817,6 +3305,183 @@ "dev": true, "license": "MIT" }, + "node_modules/@lezer/common": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/@lezer/common/-/common-1.5.0.tgz", + "integrity": "sha512-PNGcolp9hr4PJdXR4ix7XtixDrClScvtSCYW3rQG106oVMOOI+jFb+0+J3mbeL/53g1Zd6s0kJzaw6Ri68GmAA==", + "license": "MIT" + }, + "node_modules/@lezer/cpp": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/@lezer/cpp/-/cpp-1.1.5.tgz", + "integrity": "sha512-DIhSXmYtJKLehrjzDFN+2cPt547ySQ41nA8yqcDf/GxMc+YM736xqltFkvADL2M0VebU5I+3+4ks2Vv+Kyq3Aw==", + "license": "MIT", + "dependencies": { + "@lezer/common": "^1.2.0", + "@lezer/highlight": "^1.0.0", + "@lezer/lr": "^1.0.0" + } + }, + "node_modules/@lezer/css": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/@lezer/css/-/css-1.3.0.tgz", + "integrity": "sha512-pBL7hup88KbI7hXnZV3PQsn43DHy6TWyzuyk2AO9UyoXcDltvIdqWKE1dLL/45JVZ+YZkHe1WVHqO6wugZZWcw==", + "license": "MIT", + "dependencies": { + "@lezer/common": "^1.2.0", + "@lezer/highlight": "^1.0.0", + "@lezer/lr": "^1.3.0" + } + }, + "node_modules/@lezer/go": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@lezer/go/-/go-1.0.1.tgz", + "integrity": "sha512-xToRsYxwsgJNHTgNdStpcvmbVuKxTapV0dM0wey1geMMRc9aggoVyKgzYp41D2/vVOx+Ii4hmE206kvxIXBVXQ==", + "license": "MIT", + "dependencies": { + "@lezer/common": "^1.2.0", + "@lezer/highlight": "^1.0.0", + "@lezer/lr": "^1.3.0" + } + }, + "node_modules/@lezer/highlight": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/@lezer/highlight/-/highlight-1.2.3.tgz", + "integrity": "sha512-qXdH7UqTvGfdVBINrgKhDsVTJTxactNNxLk7+UMwZhU13lMHaOBlJe9Vqp907ya56Y3+ed2tlqzys7jDkTmW0g==", + "license": "MIT", + "dependencies": { + "@lezer/common": "^1.3.0" + } + }, + "node_modules/@lezer/html": { + "version": "1.3.13", + "resolved": "https://registry.npmjs.org/@lezer/html/-/html-1.3.13.tgz", + "integrity": "sha512-oI7n6NJml729m7pjm9lvLvmXbdoMoi2f+1pwSDJkl9d68zGr7a9Btz8NdHTGQZtW2DA25ybeuv/SyDb9D5tseg==", + "license": "MIT", + "dependencies": { + "@lezer/common": "^1.2.0", + "@lezer/highlight": "^1.0.0", + "@lezer/lr": "^1.0.0" + } + }, + "node_modules/@lezer/java": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/@lezer/java/-/java-1.1.3.tgz", + "integrity": "sha512-yHquUfujwg6Yu4Fd1GNHCvidIvJwi/1Xu2DaKl/pfWIA2c1oXkVvawH3NyXhCaFx4OdlYBVX5wvz2f7Aoa/4Xw==", + "license": "MIT", + "dependencies": { + "@lezer/common": "^1.2.0", + "@lezer/highlight": "^1.0.0", + "@lezer/lr": "^1.0.0" + } + }, + "node_modules/@lezer/javascript": { + "version": "1.5.4", + "resolved": "https://registry.npmjs.org/@lezer/javascript/-/javascript-1.5.4.tgz", + "integrity": "sha512-vvYx3MhWqeZtGPwDStM2dwgljd5smolYD2lR2UyFcHfxbBQebqx8yjmFmxtJ/E6nN6u1D9srOiVWm3Rb4tmcUA==", + "license": "MIT", + "dependencies": { + "@lezer/common": "^1.2.0", + "@lezer/highlight": "^1.1.3", + "@lezer/lr": "^1.3.0" + } + }, + "node_modules/@lezer/json": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@lezer/json/-/json-1.0.3.tgz", + "integrity": "sha512-BP9KzdF9Y35PDpv04r0VeSTKDeox5vVr3efE7eBbx3r4s3oNLfunchejZhjArmeieBH+nVOpgIiBJpEAv8ilqQ==", + "license": "MIT", + "dependencies": { + "@lezer/common": "^1.2.0", + "@lezer/highlight": "^1.0.0", + "@lezer/lr": "^1.0.0" + } + }, + "node_modules/@lezer/lr": { + "version": "1.4.7", + "resolved": "https://registry.npmjs.org/@lezer/lr/-/lr-1.4.7.tgz", + "integrity": "sha512-wNIFWdSUfX9Jc6ePMzxSPVgTVB4EOfDIwLQLWASyiUdHKaMsiilj9bYiGkGQCKVodd0x6bgQCV207PILGFCF9Q==", + "license": "MIT", + "dependencies": { + "@lezer/common": "^1.0.0" + } + }, + "node_modules/@lezer/markdown": { + "version": "1.6.3", + "resolved": "https://registry.npmjs.org/@lezer/markdown/-/markdown-1.6.3.tgz", + "integrity": "sha512-jpGm5Ps+XErS+xA4urw7ogEGkeZOahVQF21Z6oECF0sj+2liwZopd2+I8uH5I/vZsRuuze3OxBREIANLf6KKUw==", + "license": "MIT", + "dependencies": { + "@lezer/common": "^1.5.0", + "@lezer/highlight": "^1.0.0" + } + }, + "node_modules/@lezer/php": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/@lezer/php/-/php-1.0.5.tgz", + "integrity": "sha512-W7asp9DhM6q0W6DYNwIkLSKOvxlXRrif+UXBMxzsJUuqmhE7oVU+gS3THO4S/Puh7Xzgm858UNaFi6dxTP8dJA==", + "license": "MIT", + "dependencies": { + "@lezer/common": "^1.2.0", + "@lezer/highlight": "^1.0.0", + "@lezer/lr": "^1.1.0" + } + }, + "node_modules/@lezer/python": { + "version": "1.1.18", + "resolved": "https://registry.npmjs.org/@lezer/python/-/python-1.1.18.tgz", + "integrity": "sha512-31FiUrU7z9+d/ElGQLJFXl+dKOdx0jALlP3KEOsGTex8mvj+SoE1FgItcHWK/axkxCHGUSpqIHt6JAWfWu9Rhg==", + "license": "MIT", + "dependencies": { + "@lezer/common": "^1.2.0", + "@lezer/highlight": "^1.0.0", + "@lezer/lr": "^1.0.0" + } + }, + "node_modules/@lezer/rust": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@lezer/rust/-/rust-1.0.2.tgz", + "integrity": "sha512-Lz5sIPBdF2FUXcWeCu1//ojFAZqzTQNRga0aYv6dYXqJqPfMdCAI0NzajWUd4Xijj1IKJLtjoXRPMvTKWBcqKg==", + "license": "MIT", + "dependencies": { + "@lezer/common": "^1.2.0", + "@lezer/highlight": "^1.0.0", + "@lezer/lr": "^1.0.0" + } + }, + "node_modules/@lezer/sass": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@lezer/sass/-/sass-1.1.0.tgz", + "integrity": "sha512-3mMGdCTUZ/84ArHOuXWQr37pnf7f+Nw9ycPUeKX+wu19b7pSMcZGLbaXwvD2APMBDOGxPmpK/O6S1v1EvLoqgQ==", + "license": "MIT", + "dependencies": { + "@lezer/common": "^1.2.0", + "@lezer/highlight": "^1.0.0", + "@lezer/lr": "^1.0.0" + } + }, + "node_modules/@lezer/xml": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/@lezer/xml/-/xml-1.0.6.tgz", + "integrity": "sha512-CdDwirL0OEaStFue/66ZmFSeppuL6Dwjlk8qk153mSQwiSH/Dlri4GNymrNWnUmPl2Um7QfV1FO9KFUyX3Twww==", + "license": "MIT", + "dependencies": { + "@lezer/common": "^1.2.0", + "@lezer/highlight": "^1.0.0", + "@lezer/lr": "^1.0.0" + } + }, + "node_modules/@lezer/yaml": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@lezer/yaml/-/yaml-1.0.3.tgz", + "integrity": "sha512-GuBLekbw9jDBDhGur82nuwkxKQ+a3W5H0GfaAthDXcAu+XdpS43VlnxA9E9hllkpSP5ellRDKjLLj7Lu9Wr6xA==", + "license": "MIT", + "dependencies": { + "@lezer/common": "^1.2.0", + "@lezer/highlight": "^1.0.0", + "@lezer/lr": "^1.4.0" + } + }, "node_modules/@mapbox/node-pre-gyp": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/@mapbox/node-pre-gyp/-/node-pre-gyp-2.0.0.tgz", @@ -2852,33 +3517,17 @@ "node": ">=10" } }, - "node_modules/@monaco-editor/loader": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/@monaco-editor/loader/-/loader-1.5.0.tgz", - "integrity": "sha512-hKoGSM+7aAc7eRTRjpqAZucPmoNOC4UUbknb/VNoTkEIkCPhqV8LfbsgM1webRM7S/z21eHEx9Fkwx8Z/C/+Xw==", - "license": "MIT", - "dependencies": { - "state-local": "^1.0.6" - } - }, - "node_modules/@monaco-editor/react": { - "version": "4.7.0", - "resolved": "https://registry.npmjs.org/@monaco-editor/react/-/react-4.7.0.tgz", - "integrity": "sha512-cyzXQCtO47ydzxpQtCGSQGOC8Gk3ZUeBXFAxD+CWXYFo5OqZyZUonFl0DwUlTyAfRHntBfw2p3w4s9R6oe1eCA==", - "license": "MIT", - "dependencies": { - "@monaco-editor/loader": "^1.5.0" - }, - "peerDependencies": { - "monaco-editor": ">= 0.25.0 < 1", - "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0", - "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" - } + "node_modules/@marijn/find-cluster-break": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@marijn/find-cluster-break/-/find-cluster-break-1.0.2.tgz", + "integrity": "sha512-l0h88YhZFyKdXIFNfSWpyjStDjGHwZ/U7iobcK1cQQD8sejsONdQtTVU+1wVN1PBw40PiiHB1vA5S7VTfQiP9g==", + "license": "MIT" }, "node_modules/@napi-rs/wasm-runtime": { "version": "0.2.12", "resolved": "https://registry.npmjs.org/@napi-rs/wasm-runtime/-/wasm-runtime-0.2.12.tgz", "integrity": "sha512-ZVWUcfwY4E/yPitQJl481FjFo3K22D6qF0DuFH6Y/nbnE11GY5uguDxZMGXPQ8WQ0128MXQD7TnfHyK4oWoIJQ==", + "dev": true, "license": "MIT", "optional": true, "dependencies": { @@ -2887,6 +3536,33 @@ "@tybys/wasm-util": "^0.10.0" } }, + "node_modules/@noble/curves": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@noble/curves/-/curves-2.0.1.tgz", + "integrity": "sha512-vs1Az2OOTBiP4q0pwjW5aF0xp9n4MxVrmkFBxc6EKZc6ddYx5gaZiAsZoq0uRRXWbi3AT/sBqn05eRPtn1JCPw==", + "license": "MIT", + "dependencies": { + "@noble/hashes": "2.0.1" + }, + "engines": { + "node": ">= 20.19.0" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + } + }, + "node_modules/@noble/hashes": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@noble/hashes/-/hashes-2.0.1.tgz", + "integrity": "sha512-XlOlEbQcE9fmuXxrVTXCTlG2nlRXa9Rj3rr5Ue/+tX+nmkgbX720YHh0VR3hBF9xDvwnb8D2shVGOwNx+ulArw==", + "license": "MIT", + "engines": { + "node": ">= 20.19.0" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + } + }, "node_modules/@nodelib/fs.scandir": { "version": "2.1.5", "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", @@ -4333,6 +5009,19 @@ "integrity": "sha512-/RVXdLvJxLg4QKvMoM5WlwNR9ViO9z8B/qPcc+C0Sa/teJY7QG7kJ441DwzOjMYEY7GmU4dj5EcGHIkKZiQZCA==", "license": "MIT" }, + "node_modules/@replit/codemirror-vim": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/@replit/codemirror-vim/-/codemirror-vim-6.3.0.tgz", + "integrity": "sha512-aTx931ULAMuJx6xLf7KQDOL7CxD+Sa05FktTDrtLaSy53uj01ll3Zf17JdKsriER248oS55GBzg0CfCTjEneAQ==", + "license": "MIT", + "peerDependencies": { + "@codemirror/commands": "6.x.x", + "@codemirror/language": "6.x.x", + "@codemirror/search": "6.x.x", + "@codemirror/state": "6.x.x", + "@codemirror/view": "6.x.x" + } + }, "node_modules/@resvg/resvg-js": { "version": "2.6.2", "resolved": "https://registry.npmjs.org/@resvg/resvg-js/-/resvg-js-2.6.2.tgz", @@ -4727,6 +5416,24 @@ } } }, + "node_modules/@rollup/plugin-virtual": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@rollup/plugin-virtual/-/plugin-virtual-3.0.2.tgz", + "integrity": "sha512-10monEYsBp3scM4/ND4LNH5Rxvh3e/cVeL3jWTgZ2SrQ+BmUoQcopVQvnaMcOnykb1VkxUFuDAN+0FnpTFRy2A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "rollup": "^1.20.0||^2.0.0||^3.0.0||^4.0.0" + }, + "peerDependenciesMeta": { + "rollup": { + "optional": true + } + } + }, "node_modules/@rollup/pluginutils": { "version": "5.3.0", "resolved": "https://registry.npmjs.org/@rollup/pluginutils/-/pluginutils-5.3.0.tgz", @@ -5050,6 +5757,73 @@ "dev": true, "license": "MIT" }, + "node_modules/@shikijs/core": { + "version": "3.21.0", + "resolved": "https://registry.npmjs.org/@shikijs/core/-/core-3.21.0.tgz", + "integrity": "sha512-AXSQu/2n1UIQekY8euBJlvFYZIw0PHY63jUzGbrOma4wPxzznJXTXkri+QcHeBNaFxiiOljKxxJkVSoB3PjbyA==", + "license": "MIT", + "dependencies": { + "@shikijs/types": "3.21.0", + "@shikijs/vscode-textmate": "^10.0.2", + "@types/hast": "^3.0.4", + "hast-util-to-html": "^9.0.5" + } + }, + "node_modules/@shikijs/engine-javascript": { + "version": "3.21.0", + "resolved": "https://registry.npmjs.org/@shikijs/engine-javascript/-/engine-javascript-3.21.0.tgz", + "integrity": "sha512-ATwv86xlbmfD9n9gKRiwuPpWgPENAWCLwYCGz9ugTJlsO2kOzhOkvoyV/UD+tJ0uT7YRyD530x6ugNSffmvIiQ==", + "license": "MIT", + "dependencies": { + "@shikijs/types": "3.21.0", + "@shikijs/vscode-textmate": "^10.0.2", + "oniguruma-to-es": "^4.3.4" + } + }, + "node_modules/@shikijs/engine-oniguruma": { + "version": "3.21.0", + "resolved": "https://registry.npmjs.org/@shikijs/engine-oniguruma/-/engine-oniguruma-3.21.0.tgz", + "integrity": "sha512-OYknTCct6qiwpQDqDdf3iedRdzj6hFlOPv5hMvI+hkWfCKs5mlJ4TXziBG9nyabLwGulrUjHiCq3xCspSzErYQ==", + "license": "MIT", + "dependencies": { + "@shikijs/types": "3.21.0", + "@shikijs/vscode-textmate": "^10.0.2" + } + }, + "node_modules/@shikijs/langs": { + "version": "3.21.0", + "resolved": "https://registry.npmjs.org/@shikijs/langs/-/langs-3.21.0.tgz", + "integrity": "sha512-g6mn5m+Y6GBJ4wxmBYqalK9Sp0CFkUqfNzUy2pJglUginz6ZpWbaWjDB4fbQ/8SHzFjYbtU6Ddlp1pc+PPNDVA==", + "license": "MIT", + "dependencies": { + "@shikijs/types": "3.21.0" + } + }, + "node_modules/@shikijs/themes": { + "version": "3.21.0", + "resolved": "https://registry.npmjs.org/@shikijs/themes/-/themes-3.21.0.tgz", + "integrity": "sha512-BAE4cr9EDiZyYzwIHEk7JTBJ9CzlPuM4PchfcA5ao1dWXb25nv6hYsoDiBq2aZK9E3dlt3WB78uI96UESD+8Mw==", + "license": "MIT", + "dependencies": { + "@shikijs/types": "3.21.0" + } + }, + "node_modules/@shikijs/types": { + "version": "3.21.0", + "resolved": "https://registry.npmjs.org/@shikijs/types/-/types-3.21.0.tgz", + "integrity": "sha512-zGrWOxZ0/+0ovPY7PvBU2gIS9tmhSUUt30jAcNV0Bq0gb2S98gwfjIs1vxlmH5zM7/4YxLamT6ChlqqAJmPPjA==", + "license": "MIT", + "dependencies": { + "@shikijs/vscode-textmate": "^10.0.2", + "@types/hast": "^3.0.4" + } + }, + "node_modules/@shikijs/vscode-textmate": { + "version": "10.0.2", + "resolved": "https://registry.npmjs.org/@shikijs/vscode-textmate/-/vscode-textmate-10.0.2.tgz", + "integrity": "sha512-83yeghZ2xxin3Nj8z1NMd/NCuca+gsYXswywDy5bHvwlWL8tpTQmzGeUuHd9FC3E/SBEMvzJRwWEOz5gGes9Qg==", + "license": "MIT" + }, "node_modules/@shuding/opentype.js": { "version": "1.4.0-beta.0", "resolved": "https://registry.npmjs.org/@shuding/opentype.js/-/opentype.js-1.4.0-beta.0.tgz", @@ -5096,6 +5870,7 @@ "version": "2.4.3", "resolved": "https://registry.npmjs.org/@solid-primitives/event-listener/-/event-listener-2.4.3.tgz", "integrity": "sha512-h4VqkYFv6Gf+L7SQj+Y6puigL/5DIi7x5q07VZET7AWcS+9/G3WfIE9WheniHWJs51OEkRB43w6lDys5YeFceg==", + "dev": true, "license": "MIT", "dependencies": { "@solid-primitives/utils": "^6.3.2" @@ -5108,6 +5883,7 @@ "version": "1.3.3", "resolved": "https://registry.npmjs.org/@solid-primitives/keyboard/-/keyboard-1.3.3.tgz", "integrity": "sha512-9dQHTTgLBqyAI7aavtO+HnpTVJgWQA1ghBSrmLtMu1SMxLPDuLfuNr+Tk5udb4AL4Ojg7h9JrKOGEEDqsJXWJA==", + "dev": true, "license": "MIT", "dependencies": { "@solid-primitives/event-listener": "^2.4.3", @@ -5122,6 +5898,7 @@ "version": "1.5.2", "resolved": "https://registry.npmjs.org/@solid-primitives/rootless/-/rootless-1.5.2.tgz", "integrity": "sha512-9HULb0QAzL2r47CCad0M+NKFtQ+LrGGNHZfteX/ThdGvKIg2o2GYhBooZubTCd/RTu2l2+Nw4s+dEfiDGvdrrQ==", + "dev": true, "license": "MIT", "dependencies": { "@solid-primitives/utils": "^6.3.2" @@ -5134,6 +5911,7 @@ "version": "6.3.2", "resolved": "https://registry.npmjs.org/@solid-primitives/utils/-/utils-6.3.2.tgz", "integrity": "sha512-hZ/M/qr25QOCcwDPOHtGjxTD8w2mNyVAYvcfgwzBHq2RwNqHNdDNsMZYap20+ruRwW4A3Cdkczyoz0TSxLCAPQ==", + "dev": true, "license": "MIT", "peerDependencies": { "solid-js": "^1.6.12" @@ -5169,10 +5947,244 @@ "sourcemap-codec": "^1.4.8" } }, + "node_modules/@swc/core": { + "version": "1.15.8", + "resolved": "https://registry.npmjs.org/@swc/core/-/core-1.15.8.tgz", + "integrity": "sha512-T8keoJjXaSUoVBCIjgL6wAnhADIb09GOELzKg10CjNg+vLX48P93SME6jTfte9MZIm5m+Il57H3rTSk/0kzDUw==", + "dev": true, + "hasInstallScript": true, + "license": "Apache-2.0", + "dependencies": { + "@swc/counter": "^0.1.3", + "@swc/types": "^0.1.25" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/swc" + }, + "optionalDependencies": { + "@swc/core-darwin-arm64": "1.15.8", + "@swc/core-darwin-x64": "1.15.8", + "@swc/core-linux-arm-gnueabihf": "1.15.8", + "@swc/core-linux-arm64-gnu": "1.15.8", + "@swc/core-linux-arm64-musl": "1.15.8", + "@swc/core-linux-x64-gnu": "1.15.8", + "@swc/core-linux-x64-musl": "1.15.8", + "@swc/core-win32-arm64-msvc": "1.15.8", + "@swc/core-win32-ia32-msvc": "1.15.8", + "@swc/core-win32-x64-msvc": "1.15.8" + }, + "peerDependencies": { + "@swc/helpers": ">=0.5.17" + }, + "peerDependenciesMeta": { + "@swc/helpers": { + "optional": true + } + } + }, + "node_modules/@swc/core-darwin-arm64": { + "version": "1.15.8", + "resolved": "https://registry.npmjs.org/@swc/core-darwin-arm64/-/core-darwin-arm64-1.15.8.tgz", + "integrity": "sha512-M9cK5GwyWWRkRGwwCbREuj6r8jKdES/haCZ3Xckgkl8MUQJZA3XB7IXXK1IXRNeLjg6m7cnoMICpXv1v1hlJOg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "Apache-2.0 AND MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=10" + } + }, + "node_modules/@swc/core-darwin-x64": { + "version": "1.15.8", + "resolved": "https://registry.npmjs.org/@swc/core-darwin-x64/-/core-darwin-x64-1.15.8.tgz", + "integrity": "sha512-j47DasuOvXl80sKJHSi2X25l44CMc3VDhlJwA7oewC1nV1VsSzwX+KOwE5tLnfORvVJJyeiXgJORNYg4jeIjYQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "Apache-2.0 AND MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=10" + } + }, + "node_modules/@swc/core-linux-arm-gnueabihf": { + "version": "1.15.8", + "resolved": "https://registry.npmjs.org/@swc/core-linux-arm-gnueabihf/-/core-linux-arm-gnueabihf-1.15.8.tgz", + "integrity": "sha512-siAzDENu2rUbwr9+fayWa26r5A9fol1iORG53HWxQL1J8ym4k7xt9eME0dMPXlYZDytK5r9sW8zEA10F2U3Xwg==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=10" + } + }, + "node_modules/@swc/core-linux-arm64-gnu": { + "version": "1.15.8", + "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-gnu/-/core-linux-arm64-gnu-1.15.8.tgz", + "integrity": "sha512-o+1y5u6k2FfPYbTRUPvurwzNt5qd0NTumCTFscCNuBksycloXY16J8L+SMW5QRX59n4Hp9EmFa3vpvNHRVv1+Q==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "Apache-2.0 AND MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=10" + } + }, + "node_modules/@swc/core-linux-arm64-musl": { + "version": "1.15.8", + "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-musl/-/core-linux-arm64-musl-1.15.8.tgz", + "integrity": "sha512-koiCqL09EwOP1S2RShCI7NbsQuG6r2brTqUYE7pV7kZm9O17wZ0LSz22m6gVibpwEnw8jI3IE1yYsQTVpluALw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "Apache-2.0 AND MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=10" + } + }, + "node_modules/@swc/core-linux-x64-gnu": { + "version": "1.15.8", + "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-gnu/-/core-linux-x64-gnu-1.15.8.tgz", + "integrity": "sha512-4p6lOMU3bC+Vd5ARtKJ/FxpIC5G8v3XLoPEZ5s7mLR8h7411HWC/LmTXDHcrSXRC55zvAVia1eldy6zDLz8iFQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "Apache-2.0 AND MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=10" + } + }, + "node_modules/@swc/core-linux-x64-musl": { + "version": "1.15.8", + "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-musl/-/core-linux-x64-musl-1.15.8.tgz", + "integrity": "sha512-z3XBnbrZAL+6xDGAhJoN4lOueIxC/8rGrJ9tg+fEaeqLEuAtHSW2QHDHxDwkxZMjuF/pZ6MUTjHjbp8wLbuRLA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "Apache-2.0 AND MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=10" + } + }, + "node_modules/@swc/core-win32-arm64-msvc": { + "version": "1.15.8", + "resolved": "https://registry.npmjs.org/@swc/core-win32-arm64-msvc/-/core-win32-arm64-msvc-1.15.8.tgz", + "integrity": "sha512-djQPJ9Rh9vP8GTS/Df3hcc6XP6xnG5c8qsngWId/BLA9oX6C7UzCPAn74BG/wGb9a6j4w3RINuoaieJB3t+7iQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "Apache-2.0 AND MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=10" + } + }, + "node_modules/@swc/core-win32-ia32-msvc": { + "version": "1.15.8", + "resolved": "https://registry.npmjs.org/@swc/core-win32-ia32-msvc/-/core-win32-ia32-msvc-1.15.8.tgz", + "integrity": "sha512-/wfAgxORg2VBaUoFdytcVBVCgf1isWZIEXB9MZEUty4wwK93M/PxAkjifOho9RN3WrM3inPLabICRCEgdHpKKQ==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "Apache-2.0 AND MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=10" + } + }, + "node_modules/@swc/core-win32-x64-msvc": { + "version": "1.15.8", + "resolved": "https://registry.npmjs.org/@swc/core-win32-x64-msvc/-/core-win32-x64-msvc-1.15.8.tgz", + "integrity": "sha512-GpMePrh9Sl4d61o4KAHOOv5is5+zt6BEXCOCgs/H0FLGeii7j9bWDE8ExvKFy2GRRZVNR1ugsnzaGWHKM6kuzA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "Apache-2.0 AND MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=10" + } + }, + "node_modules/@swc/counter": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/@swc/counter/-/counter-0.1.3.tgz", + "integrity": "sha512-e2BR4lsJkkRlKZ/qCHPw9ZaSxc0MVUd7gtbtaB7aMvHeJVYe8sOB8DBZkP2DtISHGSku9sCK6T6cnY0CtXrOCQ==", + "dev": true, + "license": "Apache-2.0" + }, + "node_modules/@swc/types": { + "version": "0.1.25", + "resolved": "https://registry.npmjs.org/@swc/types/-/types-0.1.25.tgz", + "integrity": "sha512-iAoY/qRhNH8a/hBvm3zKj9qQ4oc2+3w1unPJa2XvTK3XjeLXtzcCingVPw/9e5mn1+0yPqxcBGp9Jf0pkfMb1g==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@swc/counter": "^0.1.3" + } + }, + "node_modules/@swc/wasm": { + "version": "1.15.8", + "resolved": "https://registry.npmjs.org/@swc/wasm/-/wasm-1.15.8.tgz", + "integrity": "sha512-RG2BxGbbsjtddFCo1ghKH6A/BMXbY1eMBfpysV0lJMCpI4DZOjW1BNBnxvBt7YsYmlJtmy5UXIg9/4ekBTFFaQ==", + "dev": true, + "license": "Apache-2.0" + }, "node_modules/@tailwindcss/node": { "version": "4.1.14", "resolved": "https://registry.npmjs.org/@tailwindcss/node/-/node-4.1.14.tgz", "integrity": "sha512-hpz+8vFk3Ic2xssIA3e01R6jkmsAhvkQdXlEbRTk6S10xDAtiQiM3FyvZVGsucefq764euO/b8WUW9ysLdThHw==", + "dev": true, "license": "MIT", "dependencies": { "@jridgewell/remapping": "^2.3.4", @@ -5188,6 +6200,7 @@ "version": "4.1.14", "resolved": "https://registry.npmjs.org/@tailwindcss/oxide/-/oxide-4.1.14.tgz", "integrity": "sha512-23yx+VUbBwCg2x5XWdB8+1lkPajzLmALEfMb51zZUBYaYVPDQvBSD/WYDqiVyBIo2BZFa3yw1Rpy3G2Jp+K0dw==", + "dev": true, "hasInstallScript": true, "license": "MIT", "dependencies": { @@ -5219,6 +6232,7 @@ "cpu": [ "arm64" ], + "dev": true, "license": "MIT", "optional": true, "os": [ @@ -5235,6 +6249,7 @@ "cpu": [ "arm64" ], + "dev": true, "license": "MIT", "optional": true, "os": [ @@ -5251,6 +6266,7 @@ "cpu": [ "x64" ], + "dev": true, "license": "MIT", "optional": true, "os": [ @@ -5267,6 +6283,7 @@ "cpu": [ "x64" ], + "dev": true, "license": "MIT", "optional": true, "os": [ @@ -5283,6 +6300,7 @@ "cpu": [ "arm" ], + "dev": true, "license": "MIT", "optional": true, "os": [ @@ -5299,6 +6317,7 @@ "cpu": [ "arm64" ], + "dev": true, "license": "MIT", "optional": true, "os": [ @@ -5315,6 +6334,7 @@ "cpu": [ "arm64" ], + "dev": true, "license": "MIT", "optional": true, "os": [ @@ -5331,6 +6351,7 @@ "cpu": [ "x64" ], + "dev": true, "license": "MIT", "optional": true, "os": [ @@ -5347,6 +6368,7 @@ "cpu": [ "x64" ], + "dev": true, "license": "MIT", "optional": true, "os": [ @@ -5371,6 +6393,7 @@ "cpu": [ "wasm32" ], + "dev": true, "license": "MIT", "optional": true, "dependencies": { @@ -5392,6 +6415,7 @@ "cpu": [ "arm64" ], + "dev": true, "license": "MIT", "optional": true, "os": [ @@ -5408,6 +6432,7 @@ "cpu": [ "x64" ], + "dev": true, "license": "MIT", "optional": true, "os": [ @@ -5421,6 +6446,7 @@ "version": "4.1.14", "resolved": "https://registry.npmjs.org/@tailwindcss/vite/-/vite-4.1.14.tgz", "integrity": "sha512-BoFUoU0XqgCUS1UXWhmDJroKKhNXeDzD7/XwabjkDIAbMnc4ULn5e2FuEuBbhZ6ENZoSYzKlzvZ44Yr6EUDUSA==", + "dev": true, "license": "MIT", "dependencies": { "@tailwindcss/node": "4.1.14", @@ -5435,6 +6461,7 @@ "version": "0.6.15", "resolved": "https://registry.npmjs.org/@tanstack/devtools/-/devtools-0.6.15.tgz", "integrity": "sha512-rfa1Kb0wrvsn4eYsCnYXuTzK2BEmHXCEmk3kuGdbZrHo2UTqjxFRK4E1NEGxd1yUgkzbn1VToO3GzDdt9mQTwA==", + "dev": true, "license": "MIT", "dependencies": { "@solid-primitives/keyboard": "^1.3.3", @@ -5459,6 +6486,7 @@ "version": "0.3.2", "resolved": "https://registry.npmjs.org/@tanstack/devtools-event-bus/-/devtools-event-bus-0.3.2.tgz", "integrity": "sha512-yJT2As/drc+Epu0nsqCsJaKaLcaNGufiNxSlp/+/oeTD0jsBxF9/PJBfh66XVpYXkKr97b8689mSu7QMef0Rrw==", + "dev": true, "license": "MIT", "dependencies": { "ws": "^8.18.3" @@ -5475,6 +6503,7 @@ "version": "0.3.5", "resolved": "https://registry.npmjs.org/@tanstack/devtools-ui/-/devtools-ui-0.3.5.tgz", "integrity": "sha512-DU8OfLntngnph+Tb7ivQvh4F4w+rDu6r01fXlhjq/Nmgdr0gtsOox4kdmyq5rCs+C6aPgP3M7+BE+fv4dN+VvA==", + "dev": true, "license": "MIT", "dependencies": { "clsx": "^2.1.1", @@ -5566,6 +6595,7 @@ "version": "0.7.1", "resolved": "https://registry.npmjs.org/@tanstack/react-devtools/-/react-devtools-0.7.1.tgz", "integrity": "sha512-PluCZ9ytBVKJrgaNYC9wcNfwwm6ysGZampUiXCs9A4HmXA7moAfcxbLKnQ+EFsuF4bg5Qqn/9ofhKxpvP1Fcmg==", + "dev": true, "license": "MIT", "dependencies": { "@tanstack/devtools": "0.6.15" @@ -5629,6 +6659,7 @@ "version": "1.132.31", "resolved": "https://registry.npmjs.org/@tanstack/react-router-devtools/-/react-router-devtools-1.132.31.tgz", "integrity": "sha512-EiO+t6s1K8igqqtxCO0GLG6KoJgaIsv9JAZMcJV+z/BspElGQwGDBzTtWYcHd9NOP2Yw7OCkAhM8ihwMbzWJNQ==", + "dev": true, "license": "MIT", "dependencies": { "@tanstack/router-devtools-core": "1.132.31", @@ -5828,6 +6859,7 @@ "version": "1.132.31", "resolved": "https://registry.npmjs.org/@tanstack/router-devtools-core/-/router-devtools-core-1.132.31.tgz", "integrity": "sha512-GwymJRm21hkluQMjOkXn+mBNPMyWlpzQut8mqEObh1cnF3zUsYT5YkCFV8ePA0jb/YVdjK/AfCAgSlhyIa09IA==", + "dev": true, "license": "MIT", "dependencies": { "clsx": "^2.1.1", @@ -6209,6 +7241,7 @@ "version": "0.10.1", "resolved": "https://registry.npmjs.org/@tybys/wasm-util/-/wasm-util-0.10.1.tgz", "integrity": "sha512-9tTaPJLSiejZKx+Bmog4uSubteqTvFrVrURwkmHixBo0G4seD0zUxp98E1DzUBJxLQ3NPwXrGKDiVjwx/DpPsg==", + "dev": true, "license": "MIT", "optional": true, "dependencies": { @@ -6277,7 +7310,16 @@ "@types/deep-eql": "*" } }, - "node_modules/@types/deep-eql": { + "node_modules/@types/debug": { + "version": "4.1.12", + "resolved": "https://registry.npmjs.org/@types/debug/-/debug-4.1.12.tgz", + "integrity": "sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ==", + "license": "MIT", + "dependencies": { + "@types/ms": "*" + } + }, + "node_modules/@types/deep-eql": { "version": "4.0.2", "resolved": "https://registry.npmjs.org/@types/deep-eql/-/deep-eql-4.0.2.tgz", "integrity": "sha512-c9h9dVVMigMPc4bwTvC5dxqtqJZwQPePsWjPlpSOnojbor6pGqdk541lfA7AqFQr5pB1BRdq0juY9db81BwyFw==", @@ -6290,6 +7332,15 @@ "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==", "license": "MIT" }, + "node_modules/@types/hast": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz", + "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==", + "license": "MIT", + "dependencies": { + "@types/unist": "*" + } + }, "node_modules/@types/is-hotkey": { "version": "0.1.10", "resolved": "https://registry.npmjs.org/@types/is-hotkey/-/is-hotkey-0.1.10.tgz", @@ -6311,6 +7362,34 @@ "dev": true, "license": "MIT" }, + "node_modules/@types/katex": { + "version": "0.16.8", + "resolved": "https://registry.npmjs.org/@types/katex/-/katex-0.16.8.tgz", + "integrity": "sha512-trgaNyfU+Xh2Tc+ABIb44a5AYUpicB3uwirOioeOkNPPbmgRNtcWyDeeFRzjPZENO9Vq8gvVqfhaaXWLlevVwg==", + "license": "MIT" + }, + "node_modules/@types/libsodium-wrappers": { + "version": "0.7.14", + "resolved": "https://registry.npmjs.org/@types/libsodium-wrappers/-/libsodium-wrappers-0.7.14.tgz", + "integrity": "sha512-5Kv68fXuXK0iDuUir1WPGw2R9fOZUlYlSAa0ztMcL0s0BfIDTqg9GXz8K30VJpPP3sxWhbolnQma2x+/TfkzDQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/mdast": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.4.tgz", + "integrity": "sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==", + "license": "MIT", + "dependencies": { + "@types/unist": "*" + } + }, + "node_modules/@types/ms": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@types/ms/-/ms-2.1.0.tgz", + "integrity": "sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA==", + "license": "MIT" + }, "node_modules/@types/node": { "version": "22.18.8", "resolved": "https://registry.npmjs.org/@types/node/-/node-22.18.8.tgz", @@ -6325,6 +7404,7 @@ "version": "19.2.0", "resolved": "https://registry.npmjs.org/@types/react/-/react-19.2.0.tgz", "integrity": "sha512-1LOH8xovvsKsCBq1wnT4ntDUdCJKmnEakhsuoUSy6ExlHCkGP2hqnatagYTgFk6oeL0VU31u7SNjunPN+GchtA==", + "devOptional": true, "license": "MIT", "dependencies": { "csstype": "^3.0.2" @@ -6334,6 +7414,7 @@ "version": "19.2.0", "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-19.2.0.tgz", "integrity": "sha512-brtBs0MnE9SMx7px208g39lRmC5uHZs96caOJfTjFcYSLHNamvaSMfJNagChVNkup2SdtOxKX1FDBkRSJe1ZAg==", + "devOptional": true, "license": "MIT", "peerDependencies": { "@types/react": "^19.2.0" @@ -6353,6 +7434,18 @@ "dev": true, "license": "MIT" }, + "node_modules/@types/unist": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz", + "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==", + "license": "MIT" + }, + "node_modules/@types/zxcvbn": { + "version": "4.4.5", + "resolved": "https://registry.npmjs.org/@types/zxcvbn/-/zxcvbn-4.4.5.tgz", + "integrity": "sha512-FZJgC5Bxuqg7Rhsm/bx6gAruHHhDQ55r+s0JhDh8CQ16fD7NsJJ+p8YMMQDhSQoIrSmjpqqYWA96oQVMNkjRyA==", + "license": "MIT" + }, "node_modules/@typescript-eslint/eslint-plugin": { "version": "8.45.0", "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.45.0.tgz", @@ -6624,6 +7717,12 @@ "url": "https://opencollective.com/typescript-eslint" } }, + "node_modules/@ungap/structured-clone": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.3.0.tgz", + "integrity": "sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g==", + "license": "ISC" + }, "node_modules/@unrs/resolver-binding-android-arm-eabi": { "version": "1.11.1", "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-android-arm-eabi/-/resolver-binding-android-arm-eabi-1.11.1.tgz", @@ -7106,7 +8205,6 @@ "version": "3.0.0", "resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz", "integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==", - "dev": true, "license": "MIT", "dependencies": { "event-target-shim": "^5.0.0" @@ -7133,6 +8231,31 @@ "node": ">=6" } }, + "node_modules/abstract-leveldown/node_modules/buffer": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", + "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "optional": true, + "dependencies": { + "base64-js": "^1.3.1", + "ieee754": "^1.1.13" + } + }, "node_modules/acorn": { "version": "8.15.0", "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz", @@ -7300,31 +8423,6 @@ "node": ">= 14" } }, - "node_modules/archiver-utils/node_modules/buffer": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz", - "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT", - "dependencies": { - "base64-js": "^1.3.1", - "ieee754": "^1.2.1" - } - }, "node_modules/archiver-utils/node_modules/readable-stream": { "version": "4.7.0", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-4.7.0.tgz", @@ -7342,31 +8440,6 @@ "node": "^12.22.0 || ^14.17.0 || >=16.0.0" } }, - "node_modules/archiver/node_modules/buffer": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz", - "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT", - "dependencies": { - "base64-js": "^1.3.1", - "ieee754": "^1.2.1" - } - }, "node_modules/archiver/node_modules/readable-stream": { "version": "4.7.0", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-4.7.0.tgz", @@ -7580,6 +8653,12 @@ "license": "MIT", "optional": true }, + "node_modules/async-lock": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/async-lock/-/async-lock-1.4.1.tgz", + "integrity": "sha512-Az2ZTpuytrtqENulXwO3GGv1Bztugx6TT37NIo7imr/Qo0gsYiGtSdBa2B6fsXhTpVZDNfu1Qn3pk531e3q+nQ==", + "license": "MIT" + }, "node_modules/async-sema": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/async-sema/-/async-sema-3.1.1.tgz", @@ -7601,7 +8680,6 @@ "version": "1.0.7", "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.7.tgz", "integrity": "sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ==", - "dev": true, "license": "MIT", "dependencies": { "possible-typed-array-names": "^1.0.0" @@ -7682,6 +8760,16 @@ "@babel/core": "^7.4.0 || ^8.0.0-0 <8.0.0" } }, + "node_modules/bail": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/bail/-/bail-2.0.2.tgz", + "integrity": "sha512-0xO6mYd7JB2YesxDKplafRpsiOzPt9V02ddPCLbY1xYGPOX24NTyN50qnUxgCPcSoYMhKpAuBTjQoRZCAkUDRw==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/balanced-match": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", @@ -7700,7 +8788,6 @@ "version": "1.5.1", "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", - "devOptional": true, "funding": [ { "type": "github", @@ -7758,6 +8845,27 @@ "file-uri-to-path": "1.0.0" } }, + "node_modules/bip39": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/bip39/-/bip39-3.1.0.tgz", + "integrity": "sha512-c9kiwdk45Do5GL0vJMe7tS95VjCii65mYAH7DfWl3uW8AVzXKQVUm64i3hzVybBDMp9r7j9iNxR85+ul8MdN/A==", + "license": "ISC", + "dependencies": { + "@noble/hashes": "^1.2.0" + } + }, + "node_modules/bip39/node_modules/@noble/hashes": { + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/@noble/hashes/-/hashes-1.8.0.tgz", + "integrity": "sha512-jCs9ldd7NwzpgXDIf6P3+NrHh9/sD6CQdxHyjQI+h/6rDNo88ypBxxz45UDuZHz9r3tNz7N/VInSVoVdtXEI4A==", + "license": "MIT", + "engines": { + "node": "^14.21.3 || >=16" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + } + }, "node_modules/boolbase": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz", @@ -7821,9 +8929,9 @@ } }, "node_modules/buffer": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", - "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz", + "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==", "funding": [ { "type": "github", @@ -7839,10 +8947,9 @@ } ], "license": "MIT", - "optional": true, "dependencies": { "base64-js": "^1.3.1", - "ieee754": "^1.1.13" + "ieee754": "^1.2.1" } }, "node_modules/buffer-crc32": { @@ -7951,7 +9058,6 @@ "version": "1.0.8", "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.8.tgz", "integrity": "sha512-oKlSFMcMwpUg2ednkhQ454wfWiU/ul3CkJe/PEHcTKuiX6RpbehUiFMXu13HalGZxfUwCQzZG747YXBn1im9ww==", - "dev": true, "license": "MIT", "dependencies": { "call-bind-apply-helpers": "^1.0.0", @@ -7970,7 +9076,6 @@ "version": "1.0.2", "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", - "dev": true, "license": "MIT", "dependencies": { "es-errors": "^1.3.0", @@ -7984,7 +9089,6 @@ "version": "1.0.4", "resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.4.tgz", "integrity": "sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==", - "dev": true, "license": "MIT", "dependencies": { "call-bind-apply-helpers": "^1.0.2", @@ -8036,6 +9140,25 @@ ], "license": "CC-BY-4.0" }, + "node_modules/canonicalize": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/canonicalize/-/canonicalize-2.1.0.tgz", + "integrity": "sha512-F705O3xrsUtgt98j7leetNhTWPe+5S72rlL5O4jA1pKqBVQ/dT1O1D6PFxmSXvc0SUOinWS57DKx0I3CHrXJHQ==", + "license": "Apache-2.0", + "bin": { + "canonicalize": "bin/canonicalize.js" + } + }, + "node_modules/ccount": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/ccount/-/ccount-2.0.1.tgz", + "integrity": "sha512-eyrF0jiFpY+3drT6383f1qhkbGsLSifNAjA61IUjZjmLCWjItY6LB9ft9YhoDgwfmclB2zhu51Lc7+95b8NRAg==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/chai": { "version": "5.3.3", "resolved": "https://registry.npmjs.org/chai/-/chai-5.3.3.tgz", @@ -8070,6 +9193,36 @@ "url": "https://github.com/chalk/chalk?sponsor=1" } }, + "node_modules/character-entities": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/character-entities/-/character-entities-2.0.2.tgz", + "integrity": "sha512-shx7oQ0Awen/BRIdkjkvz54PnEEI/EjwXDSIZp86/KKdbafHh1Df/RYGBhn4hbe2+uKC9FnT5UCEdyPz3ai9hQ==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/character-entities-html4": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/character-entities-html4/-/character-entities-html4-2.1.0.tgz", + "integrity": "sha512-1v7fgQRj6hnSwFpq1Eu0ynr/CDEw0rXo2B61qXrLNdHZmPKgb7fqS1a2JwF0rISo9q77jDI8VMEHoApn8qDoZA==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/character-entities-legacy": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/character-entities-legacy/-/character-entities-legacy-3.0.0.tgz", + "integrity": "sha512-RpPp0asT/6ufRm//AJVwpViZbGM/MkjQFxJccQRHmISF/22NBtsHqAWmL+/pmkPWoIUJdWyeVleTl1wydHATVQ==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/check-error": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/check-error/-/check-error-2.1.1.tgz", @@ -8150,6 +9303,7 @@ "version": "3.0.0", "resolved": "https://registry.npmjs.org/chownr/-/chownr-3.0.0.tgz", "integrity": "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g==", + "dev": true, "license": "BlueOak-1.0.0", "engines": { "node": ">=18" @@ -8183,6 +9337,12 @@ "integrity": "sha512-saHYOzhIQs6wy2sVxTM6bUDsQO4F50V9RQ22qBpEdCW+I+/Wmke2HOl6lS6dTpdxVhb88/I6+Hs+438c3lfUow==", "license": "MIT" }, + "node_modules/clean-git-ref": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/clean-git-ref/-/clean-git-ref-2.0.1.tgz", + "integrity": "sha512-bLSptAy2P0s6hU4PzuIMKmMJJSE6gLXGH1cntDu7bWJUksvuM+7ReOK61mozULErYvP6a15rnYl0zFDef+pyPw==", + "license": "Apache-2.0" + }, "node_modules/clipboardy": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/clipboardy/-/clipboardy-4.0.0.tgz", @@ -8333,6 +9493,16 @@ "color-support": "bin.js" } }, + "node_modules/comma-separated-tokens": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/comma-separated-tokens/-/comma-separated-tokens-2.0.3.tgz", + "integrity": "sha512-Fu4hJdvzeylCfQPp9SGWidpzrMs7tTrlu6Vb8XGaRGck8QSNZJJp538Wrb60Lax4fPwR64ViY468OIUTbRlGZg==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/commander": { "version": "14.0.1", "resolved": "https://registry.npmjs.org/commander/-/commander-14.0.1.tgz", @@ -8384,31 +9554,6 @@ "node": ">= 14" } }, - "node_modules/compress-commons/node_modules/buffer": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz", - "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT", - "dependencies": { - "base64-js": "^1.3.1", - "ieee754": "^1.2.1" - } - }, "node_modules/compress-commons/node_modules/readable-stream": { "version": "4.7.0", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-4.7.0.tgz", @@ -8490,14 +9635,12 @@ "version": "1.0.3", "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==", - "dev": true, "license": "MIT" }, "node_modules/crc-32": { "version": "1.2.2", "resolved": "https://registry.npmjs.org/crc-32/-/crc-32-1.2.2.tgz", "integrity": "sha512-ROmzCKrTnOwybPcJApAA6WBWij23HVfGVNKqqrZpuyZOHqK2CwHSvpGuyt/UNNvaIjEd8X5IFGp4Mh+Ie1IHJQ==", - "dev": true, "license": "Apache-2.0", "bin": { "crc32": "bin/crc32.njs" @@ -8520,31 +9663,6 @@ "node": ">= 14" } }, - "node_modules/crc32-stream/node_modules/buffer": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz", - "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT", - "dependencies": { - "base64-js": "^1.3.1", - "ieee754": "^1.2.1" - } - }, "node_modules/crc32-stream/node_modules/readable-stream": { "version": "4.7.0", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-4.7.0.tgz", @@ -8562,6 +9680,12 @@ "node": "^12.22.0 || ^14.17.0 || >=16.0.0" } }, + "node_modules/crelt": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/crelt/-/crelt-1.0.6.tgz", + "integrity": "sha512-VQ2MBenTq1fWZUH9DJNGti7kKv6EeAuYr3cLwxUWhIu1baTaXh4Ib5W2CqHVqib4/MqbYGJqiL3Zb8GJZr3l4g==", + "license": "MIT" + }, "node_modules/croner": { "version": "9.1.0", "resolved": "https://registry.npmjs.org/croner/-/croner-9.1.0.tgz", @@ -8699,6 +9823,7 @@ "version": "3.1.3", "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.1.3.tgz", "integrity": "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==", + "devOptional": true, "license": "MIT" }, "node_modules/data-urls": { @@ -8828,6 +9953,34 @@ "dev": true, "license": "MIT" }, + "node_modules/decode-named-character-reference": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/decode-named-character-reference/-/decode-named-character-reference-1.2.0.tgz", + "integrity": "sha512-c6fcElNV6ShtZXmsgNgFFV5tVX2PaV4g+MOAkb8eXHvn6sryJBrZa9r0zV6+dtTyoCKxtDy5tyQ5ZwQuidtd+Q==", + "license": "MIT", + "dependencies": { + "character-entities": "^2.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/decompress-response": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-6.0.0.tgz", + "integrity": "sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==", + "license": "MIT", + "dependencies": { + "mimic-response": "^3.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/deep-eql": { "version": "5.0.2", "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-5.0.2.tgz", @@ -8904,7 +10057,6 @@ "version": "1.1.4", "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz", "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==", - "dev": true, "license": "MIT", "dependencies": { "es-define-property": "^1.0.0", @@ -8977,7 +10129,6 @@ "version": "2.0.3", "resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz", "integrity": "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==", - "dev": true, "license": "MIT", "engines": { "node": ">=6" @@ -8994,6 +10145,7 @@ "version": "2.1.1", "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.1.1.tgz", "integrity": "sha512-ecqj/sy1jcK1uWrwpR67UhYrIFQ+5WlGxth34WquCbamhFA6hkkwiu37o6J5xCHdo1oixJRfVRw+ywV+Hq/0Aw==", + "devOptional": true, "license": "Apache-2.0", "engines": { "node": ">=8" @@ -9005,15 +10157,34 @@ "integrity": "sha512-ypdmJU/TbBby2Dxibuv7ZLW3Bs1QEmM7nHjEANfohJLvE0XVujisn1qPJcZxg+qDucsr+bP6fLD1rPS3AhJ7EQ==", "license": "MIT" }, + "node_modules/devlop": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/devlop/-/devlop-1.1.0.tgz", + "integrity": "sha512-RWmIqhcFf1lRYBvNmr7qTNuyCt/7/ns2jbpp1+PalgE/rDQcBT0fioSMUpJ93irlUhC5hrg4cYqe6U+0ImW0rA==", + "license": "MIT", + "dependencies": { + "dequal": "^2.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/diff": { - "version": "8.0.2", - "resolved": "https://registry.npmjs.org/diff/-/diff-8.0.2.tgz", - "integrity": "sha512-sSuxWU5j5SR9QQji/o2qMvqRNYRDOcBTgsJ/DeCf4iSN4gW+gNMXM7wFIP+fdXZxoNiAnHUTGjCr+TSWXdRDKg==", + "version": "8.0.3", + "resolved": "https://registry.npmjs.org/diff/-/diff-8.0.3.tgz", + "integrity": "sha512-qejHi7bcSD4hQAZE0tNAawRK1ZtafHDmMTMkrrIGgSLl7hTnQHmKCeB45xAcbfTqK2zowkM3j3bHt/4b/ARbYQ==", "license": "BSD-3-Clause", "engines": { "node": ">=0.3.1" } }, + "node_modules/diff3": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/diff3/-/diff3-0.0.3.tgz", + "integrity": "sha512-iSq8ngPOt0K53A6eVr4d5Kn6GNrM2nQZtC740pzIriHtn4pOQ2lyzEXQMBeVcWERN0ye7fhBsk9PbLLQOnUx/g==", + "license": "MIT" + }, "node_modules/dnd-core": { "version": "16.0.1", "resolved": "https://registry.npmjs.org/dnd-core/-/dnd-core-16.0.1.tgz", @@ -9146,7 +10317,6 @@ "version": "1.0.1", "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", - "dev": true, "license": "MIT", "dependencies": { "call-bind-apply-helpers": "^1.0.1", @@ -9260,6 +10430,7 @@ "version": "5.18.3", "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.18.3.tgz", "integrity": "sha512-d4lC8xfavMeBjzGr2vECC3fsGXziXZQyJxD868h2M/mBI3PwAuODxAkLkq5HYuvrPYcUtiLzsTo8U3PgX3Ocww==", + "dev": true, "license": "MIT", "dependencies": { "graceful-fs": "^4.2.4", @@ -9377,7 +10548,6 @@ "version": "1.0.1", "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", - "dev": true, "license": "MIT", "engines": { "node": ">= 0.4" @@ -9387,7 +10557,6 @@ "version": "1.3.0", "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", - "dev": true, "license": "MIT", "engines": { "node": ">= 0.4" @@ -9404,7 +10573,6 @@ "version": "1.1.1", "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", - "dev": true, "license": "MIT", "dependencies": { "es-errors": "^1.3.0" @@ -9883,7 +11051,6 @@ "version": "5.0.1", "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz", "integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==", - "dev": true, "license": "MIT", "engines": { "node": ">=6" @@ -9893,7 +11060,6 @@ "version": "3.3.0", "resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz", "integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==", - "dev": true, "license": "MIT", "engines": { "node": ">=0.8.x" @@ -9962,6 +11128,12 @@ "integrity": "sha512-VO5fQUzZtI6C+vx4w/4BWJpg3s/5l+6pRQEHzFRM8WFi4XffSP1Z+4qi7GbjWbvRQEbdIco5mIMq+zX4rPuLrw==", "license": "MIT" }, + "node_modules/extend": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", + "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==", + "license": "MIT" + }, "node_modules/fast-deep-equal": { "version": "3.1.3", "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", @@ -10006,6 +11178,12 @@ "dev": true, "license": "MIT" }, + "node_modules/fast-text-encoding": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/fast-text-encoding/-/fast-text-encoding-1.0.6.tgz", + "integrity": "sha512-VhXlQgj9ioXCqGstD37E/HBeqEGV/qOD/kmbVG8h5xKBYvM1L3lR1Zn4555cQ8GkYbJa8aJSipLPndE1k6zK2w==", + "license": "Apache-2.0" + }, "node_modules/fast-uri": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/fast-uri/-/fast-uri-3.1.0.tgz", @@ -10169,7 +11347,6 @@ "version": "0.3.5", "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.5.tgz", "integrity": "sha512-dKx12eRCVIzqCxFGplyFKJMPvLEWgmNtUrpTiJIR5u97zEhRG8ySrtboPHZXx7daLxQVrl643cTzbab2tkQjxg==", - "dev": true, "license": "MIT", "dependencies": { "is-callable": "^1.2.7" @@ -10249,7 +11426,6 @@ "version": "1.1.2", "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", - "dev": true, "license": "MIT", "funding": { "url": "https://github.com/sponsors/ljharb" @@ -10319,7 +11495,6 @@ "version": "1.3.0", "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", - "dev": true, "license": "MIT", "dependencies": { "call-bind-apply-helpers": "^1.0.2", @@ -10367,7 +11542,6 @@ "version": "1.0.1", "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", - "dev": true, "license": "MIT", "dependencies": { "dunder-proto": "^1.0.1", @@ -10549,12 +11723,14 @@ "version": "0.1.2", "resolved": "https://registry.npmjs.org/globrex/-/globrex-0.1.2.tgz", "integrity": "sha512-uHJgbwAMwNFf5mLst7IWLNg14x1CkeqglJb/K3doi4dw6q2IvAAmM/Y81kevy83wP+Sst+nutFTYOGg3d1lsxg==", + "dev": true, "license": "MIT" }, "node_modules/goober": { "version": "2.1.16", "resolved": "https://registry.npmjs.org/goober/-/goober-2.1.16.tgz", "integrity": "sha512-erjk19y1U33+XAMe1VTvIONHYoSqE4iS7BYUZfHaqeohLmnC0FdxEh7rQU+6MZ4OajItzjZFSRtVANrQwNq6/g==", + "dev": true, "license": "MIT", "peerDependencies": { "csstype": "^3.0.10" @@ -10564,7 +11740,6 @@ "version": "1.2.0", "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", - "dev": true, "license": "MIT", "engines": { "node": ">= 0.4" @@ -10577,6 +11752,7 @@ "version": "4.2.11", "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", + "dev": true, "license": "ISC" }, "node_modules/graphemer": { @@ -10603,9 +11779,9 @@ } }, "node_modules/h3": { - "version": "1.15.4", - "resolved": "https://registry.npmjs.org/h3/-/h3-1.15.4.tgz", - "integrity": "sha512-z5cFQWDffyOe4vQ9xIqNfCZdV4p//vy6fBnr8Q1AWnVZ0teurKMG66rLj++TKwKPUP3u7iMUvrvKaEUiQw2QWQ==", + "version": "1.15.5", + "resolved": "https://registry.npmjs.org/h3/-/h3-1.15.5.tgz", + "integrity": "sha512-xEyq3rSl+dhGX2Lm0+eFQIAzlDN6Fs0EcC4f7BNUmzaRX/PTzeuM+Tr2lHB8FoXggsQIeXLj8EDVgs5ywxyxmg==", "dev": true, "license": "MIT", "dependencies": { @@ -10614,9 +11790,9 @@ "defu": "^6.1.4", "destr": "^2.0.5", "iron-webcrypto": "^1.2.1", - "node-mock-http": "^1.0.2", + "node-mock-http": "^1.0.4", "radix3": "^1.1.2", - "ufo": "^1.6.1", + "ufo": "^1.6.3", "uncrypto": "^0.1.3" } }, @@ -10732,7 +11908,6 @@ "version": "1.0.2", "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz", "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==", - "dev": true, "license": "MIT", "dependencies": { "es-define-property": "^1.0.0" @@ -10761,7 +11936,6 @@ "version": "1.1.0", "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", - "dev": true, "license": "MIT", "engines": { "node": ">= 0.4" @@ -10774,7 +11948,6 @@ "version": "1.0.2", "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", - "dev": true, "license": "MIT", "dependencies": { "has-symbols": "^1.0.3" @@ -10786,11 +11959,16 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/hash-wasm": { + "version": "4.12.0", + "resolved": "https://registry.npmjs.org/hash-wasm/-/hash-wasm-4.12.0.tgz", + "integrity": "sha512-+/2B2rYLb48I/evdOIhP+K/DD2ca2fgBjp6O+GBEnCDk2e4rpeXIK8GvIyRPjTezgmWn9gmKwkQjjx6BtqDHVQ==", + "license": "MIT" + }, "node_modules/hasown": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", - "dev": true, "license": "MIT", "dependencies": { "function-bind": "^1.1.2" @@ -10799,6 +11977,185 @@ "node": ">= 0.4" } }, + "node_modules/hast-util-from-dom": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/hast-util-from-dom/-/hast-util-from-dom-5.0.1.tgz", + "integrity": "sha512-N+LqofjR2zuzTjCPzyDUdSshy4Ma6li7p/c3pA78uTwzFgENbgbUrm2ugwsOdcjI1muO+o6Dgzp9p8WHtn/39Q==", + "license": "ISC", + "dependencies": { + "@types/hast": "^3.0.0", + "hastscript": "^9.0.0", + "web-namespaces": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-from-html": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/hast-util-from-html/-/hast-util-from-html-2.0.3.tgz", + "integrity": "sha512-CUSRHXyKjzHov8yKsQjGOElXy/3EKpyX56ELnkHH34vDVw1N1XSQ1ZcAvTyAPtGqLTuKP/uxM+aLkSPqF/EtMw==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0", + "devlop": "^1.1.0", + "hast-util-from-parse5": "^8.0.0", + "parse5": "^7.0.0", + "vfile": "^6.0.0", + "vfile-message": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-from-html-isomorphic": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/hast-util-from-html-isomorphic/-/hast-util-from-html-isomorphic-2.0.0.tgz", + "integrity": "sha512-zJfpXq44yff2hmE0XmwEOzdWin5xwH+QIhMLOScpX91e/NSGPsAzNCvLQDIEPyO2TXi+lBmU6hjLIhV8MwP2kw==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0", + "hast-util-from-dom": "^5.0.0", + "hast-util-from-html": "^2.0.0", + "unist-util-remove-position": "^5.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-from-parse5": { + "version": "8.0.3", + "resolved": "https://registry.npmjs.org/hast-util-from-parse5/-/hast-util-from-parse5-8.0.3.tgz", + "integrity": "sha512-3kxEVkEKt0zvcZ3hCRYI8rqrgwtlIOFMWkbclACvjlDw8Li9S2hk/d51OI0nr/gIpdMHNepwgOKqZ/sy0Clpyg==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0", + "@types/unist": "^3.0.0", + "devlop": "^1.0.0", + "hastscript": "^9.0.0", + "property-information": "^7.0.0", + "vfile": "^6.0.0", + "vfile-location": "^5.0.0", + "web-namespaces": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-is-element": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/hast-util-is-element/-/hast-util-is-element-3.0.0.tgz", + "integrity": "sha512-Val9mnv2IWpLbNPqc/pUem+a7Ipj2aHacCwgNfTiK0vJKl0LF+4Ba4+v1oPHFpf3bLYmreq0/l3Gud9S5OH42g==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-parse-selector": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/hast-util-parse-selector/-/hast-util-parse-selector-4.0.0.tgz", + "integrity": "sha512-wkQCkSYoOGCRKERFWcxMVMOcYE2K1AaNLU8DXS9arxnLOUEWbOXKXiJUNzEpqZ3JOKpnha3jkFrumEjVliDe7A==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-sanitize": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/hast-util-sanitize/-/hast-util-sanitize-5.0.2.tgz", + "integrity": "sha512-3yTWghByc50aGS7JlGhk61SPenfE/p1oaFeNwkOOyrscaOkMGrcW9+Cy/QAIOBpZxP1yqDIzFMR0+Np0i0+usg==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0", + "@ungap/structured-clone": "^1.0.0", + "unist-util-position": "^5.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-to-html": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/hast-util-to-html/-/hast-util-to-html-9.0.5.tgz", + "integrity": "sha512-OguPdidb+fbHQSU4Q4ZiLKnzWo8Wwsf5bZfbvu7//a9oTYoqD/fWpe96NuHkoS9h0ccGOTe0C4NGXdtS0iObOw==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0", + "@types/unist": "^3.0.0", + "ccount": "^2.0.0", + "comma-separated-tokens": "^2.0.0", + "hast-util-whitespace": "^3.0.0", + "html-void-elements": "^3.0.0", + "mdast-util-to-hast": "^13.0.0", + "property-information": "^7.0.0", + "space-separated-tokens": "^2.0.0", + "stringify-entities": "^4.0.0", + "zwitch": "^2.0.4" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-to-text": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/hast-util-to-text/-/hast-util-to-text-4.0.2.tgz", + "integrity": "sha512-KK6y/BN8lbaq654j7JgBydev7wuNMcID54lkRav1P0CaE1e47P72AWWPiGKXTJU271ooYzcvTAn/Zt0REnvc7A==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0", + "@types/unist": "^3.0.0", + "hast-util-is-element": "^3.0.0", + "unist-util-find-after": "^5.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-whitespace": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/hast-util-whitespace/-/hast-util-whitespace-3.0.0.tgz", + "integrity": "sha512-88JUN06ipLwsnv+dVn+OIYOvAuvBMy/Qoi6O7mQHxdPXpjy+Cd6xRkWwux7DKO+4sYILtLBRIKgsdpS2gQc7qw==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hastscript": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/hastscript/-/hastscript-9.0.1.tgz", + "integrity": "sha512-g7df9rMFX/SPi34tyGCyUBREQoKkapwdY/T04Qn9TDWfHhAYt4/I0gMVirzK5wEzeUqIjEB+LXC/ypb7Aqno5w==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0", + "comma-separated-tokens": "^2.0.0", + "hast-util-parse-selector": "^4.0.0", + "property-information": "^7.0.0", + "space-separated-tokens": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, "node_modules/hex-rgb": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/hex-rgb/-/hex-rgb-4.3.0.tgz", @@ -10846,6 +12203,16 @@ "node": ">=18" } }, + "node_modules/html-void-elements": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/html-void-elements/-/html-void-elements-3.0.0.tgz", + "integrity": "sha512-bEqo66MRXsUGxWHV5IP0PUiAWwoEjba4VCzg0LjFJBpchPaTfyfCKTG6bc5F8ucKec3q5y6qOdGyYTSBEvhCrg==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/htmlparser2": { "version": "10.0.0", "resolved": "https://registry.npmjs.org/htmlparser2/-/htmlparser2-10.0.0.tgz", @@ -10983,7 +12350,6 @@ "version": "1.2.1", "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", - "devOptional": true, "funding": [ { "type": "github", @@ -11004,7 +12370,6 @@ "version": "5.3.2", "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz", "integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==", - "dev": true, "license": "MIT", "engines": { "node": ">= 4" @@ -11066,7 +12431,6 @@ "version": "2.0.4", "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", - "devOptional": true, "license": "ISC" }, "node_modules/internal-slot": { @@ -11229,7 +12593,6 @@ "version": "1.2.7", "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz", "integrity": "sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==", - "dev": true, "license": "MIT", "engines": { "node": ">= 0.4" @@ -11466,6 +12829,18 @@ "node": ">=0.10.0" } }, + "node_modules/is-plain-obj": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-4.1.0.tgz", + "integrity": "sha512-+Pgi+vMuUNkJyExiMBt5IlFoMyKnr5zhJ4Uspz58WOhBF5QoIZkFyNHIbBAtHwzVAgk5RtndVNsDRN61/mmDqg==", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/is-potential-custom-element-name": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz", @@ -11593,7 +12968,6 @@ "version": "1.1.15", "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.15.tgz", "integrity": "sha512-p3EcsicXjit7SaskXHs1hA91QxgTw46Fv6EFKKGS5DRFLD8yKnohjF3hxoju94b/OcMZoQukzpPpBE9uLVKzgQ==", - "dev": true, "license": "MIT", "dependencies": { "which-typed-array": "^1.1.16" @@ -11687,7 +13061,6 @@ "version": "2.0.5", "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.5.tgz", "integrity": "sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==", - "dev": true, "license": "MIT" }, "node_modules/isbot": { @@ -11706,6 +13079,62 @@ "dev": true, "license": "ISC" }, + "node_modules/isomorphic-git": { + "version": "1.36.1", + "resolved": "https://registry.npmjs.org/isomorphic-git/-/isomorphic-git-1.36.1.tgz", + "integrity": "sha512-fC8SRT8MwoaXDK8G4z5biPEbqf2WyEJUb2MJ2ftSd39/UIlsnoZxLGux+lae0poLZO4AEcx6aUVOh5bV+P8zFA==", + "license": "MIT", + "dependencies": { + "async-lock": "^1.4.1", + "clean-git-ref": "^2.0.1", + "crc-32": "^1.2.0", + "diff3": "0.0.3", + "ignore": "^5.1.4", + "minimisted": "^2.0.0", + "pako": "^1.0.10", + "pify": "^4.0.1", + "readable-stream": "^4.0.0", + "sha.js": "^2.4.12", + "simple-get": "^4.0.1" + }, + "bin": { + "isogit": "cli.cjs" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/isomorphic-git/node_modules/pako": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/pako/-/pako-1.0.11.tgz", + "integrity": "sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw==", + "license": "(MIT AND Zlib)" + }, + "node_modules/isomorphic-git/node_modules/readable-stream": { + "version": "4.7.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-4.7.0.tgz", + "integrity": "sha512-oIGGmcpTLwPga8Bn6/Z75SVaH1z5dUut2ibSyAMVhmUggWpmDn2dapB0n7f8nwaSiRtepAsfJyfXIO5DCVAODg==", + "license": "MIT", + "dependencies": { + "abort-controller": "^3.0.0", + "buffer": "^6.0.3", + "events": "^3.3.0", + "process": "^0.11.10", + "string_decoder": "^1.3.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } + }, + "node_modules/isomorphic-textencoder": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/isomorphic-textencoder/-/isomorphic-textencoder-1.0.1.tgz", + "integrity": "sha512-676hESgHullDdHDsj469hr+7t3i/neBKU9J7q1T4RHaWwLAsaQnywC0D1dIUId0YZ+JtVrShzuBk1soo0+GVcQ==", + "license": "MIT", + "dependencies": { + "fast-text-encoding": "^1.0.0" + } + }, "node_modules/isomorphic.js": { "version": "0.2.5", "resolved": "https://registry.npmjs.org/isomorphic.js/-/isomorphic.js-0.2.5.tgz", @@ -11754,6 +13183,7 @@ "version": "2.6.1", "resolved": "https://registry.npmjs.org/jiti/-/jiti-2.6.1.tgz", "integrity": "sha512-ekilCSN1jwRvIbgeg/57YFh8qQDNbwDb9xT/qu2DAHbFFZUicIl4ygVaAvzveMhMVr3LnpSKTNnwt8PoOfmKhQ==", + "devOptional": true, "license": "MIT", "bin": { "jiti": "lib/jiti-cli.mjs" @@ -11892,6 +13322,97 @@ "node": ">=0.10.0" } }, + "node_modules/jszip": { + "version": "3.10.1", + "resolved": "https://registry.npmjs.org/jszip/-/jszip-3.10.1.tgz", + "integrity": "sha512-xXDvecyTpGLrqFrvkrUSoxxfJI5AH7U8zxxtVclpsUtMCq4JQ290LY8AW5c7Ggnr/Y/oK+bQMbqK2qmtk3pN4g==", + "license": "(MIT OR GPL-3.0-or-later)", + "dependencies": { + "lie": "~3.3.0", + "pako": "~1.0.2", + "readable-stream": "~2.3.6", + "setimmediate": "^1.0.5" + } + }, + "node_modules/jszip/node_modules/isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==", + "license": "MIT" + }, + "node_modules/jszip/node_modules/pako": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/pako/-/pako-1.0.11.tgz", + "integrity": "sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw==", + "license": "(MIT AND Zlib)" + }, + "node_modules/jszip/node_modules/readable-stream": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", + "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", + "license": "MIT", + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "node_modules/jszip/node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "license": "MIT" + }, + "node_modules/jszip/node_modules/string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "license": "MIT", + "dependencies": { + "safe-buffer": "~5.1.0" + } + }, + "node_modules/just-debounce-it": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/just-debounce-it/-/just-debounce-it-1.1.0.tgz", + "integrity": "sha512-87Nnc0qZKgBZuhFZjYVjSraic0x7zwjhaTMrCKlj0QYKH6lh0KbFzVnfu6LHan03NO7J8ygjeBeD0epejn5Zcg==", + "license": "MIT" + }, + "node_modules/just-once": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/just-once/-/just-once-1.1.0.tgz", + "integrity": "sha512-+rZVpl+6VyTilK7vB/svlMPil4pxqIJZkbnN7DKZTOzyXfun6ZiFeq2Pk4EtCEHZ0VU4EkdFzG8ZK5F3PErcDw==", + "license": "MIT" + }, + "node_modules/katex": { + "version": "0.16.27", + "resolved": "https://registry.npmjs.org/katex/-/katex-0.16.27.tgz", + "integrity": "sha512-aeQoDkuRWSqQN6nSvVCEFvfXdqo1OQiCmmW1kc9xSdjutPv7BGO7pqY9sQRJpMOGrEdfDgF2TfRXe5eUAD2Waw==", + "funding": [ + "https://opencollective.com/katex", + "https://github.com/sponsors/katex" + ], + "license": "MIT", + "dependencies": { + "commander": "^8.3.0" + }, + "bin": { + "katex": "cli.js" + } + }, + "node_modules/katex/node_modules/commander": { + "version": "8.3.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-8.3.0.tgz", + "integrity": "sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww==", + "license": "MIT", + "engines": { + "node": ">= 12" + } + }, "node_modules/keyv": { "version": "4.5.4", "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz", @@ -12015,24 +13536,49 @@ "node": ">=6" } }, - "node_modules/level-concat-iterator": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/level-concat-iterator/-/level-concat-iterator-2.0.1.tgz", - "integrity": "sha512-OTKKOqeav2QWcERMJR7IS9CUo1sHnke2C0gkSmcR7QuEtFNLLzHQAvnMw8ykvEcv0Qtkg0p7FOwP1v9e5Smdcw==", - "deprecated": "Superseded by abstract-level (https://github.com/Level/community#faq)", - "license": "MIT", - "optional": true, - "engines": { - "node": ">=6" - } - }, - "node_modules/level-errors": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/level-errors/-/level-errors-2.0.1.tgz", - "integrity": "sha512-UVprBJXite4gPS+3VznfgDSU8PTRuVX0NXwoWW50KLxd2yw4Y1t2JUR5In1itQnudZqRMT9DlAM3Q//9NCjCFw==", - "deprecated": "Superseded by abstract-level (https://github.com/Level/community#faq)", - "license": "MIT", - "optional": true, + "node_modules/level-codec/node_modules/buffer": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", + "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "optional": true, + "dependencies": { + "base64-js": "^1.3.1", + "ieee754": "^1.1.13" + } + }, + "node_modules/level-concat-iterator": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/level-concat-iterator/-/level-concat-iterator-2.0.1.tgz", + "integrity": "sha512-OTKKOqeav2QWcERMJR7IS9CUo1sHnke2C0gkSmcR7QuEtFNLLzHQAvnMw8ykvEcv0Qtkg0p7FOwP1v9e5Smdcw==", + "deprecated": "Superseded by abstract-level (https://github.com/Level/community#faq)", + "license": "MIT", + "optional": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/level-errors": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/level-errors/-/level-errors-2.0.1.tgz", + "integrity": "sha512-UVprBJXite4gPS+3VznfgDSU8PTRuVX0NXwoWW50KLxd2yw4Y1t2JUR5In1itQnudZqRMT9DlAM3Q//9NCjCFw==", + "deprecated": "Superseded by abstract-level (https://github.com/Level/community#faq)", + "license": "MIT", + "optional": true, "dependencies": { "errno": "~0.1.1" }, @@ -12069,6 +13615,31 @@ "ltgt": "^2.1.2" } }, + "node_modules/level-js/node_modules/buffer": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", + "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "optional": true, + "dependencies": { + "base64-js": "^1.3.1", + "ieee754": "^1.1.13" + } + }, "node_modules/level-packager": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/level-packager/-/level-packager-5.1.1.tgz", @@ -12177,10 +13748,41 @@ "url": "https://github.com/sponsors/dmonad" } }, + "node_modules/libsodium": { + "version": "0.8.0", + "resolved": "https://registry.npmjs.org/libsodium/-/libsodium-0.8.0.tgz", + "integrity": "sha512-GQ4Sg0/Z0Ui6ZvKeTd8bH7VAAqk1ZHZDAo/pcuSi0uPbIN6LYAAotR0GEYb8v+y4/tSsXZPr06D6hhqKd7tnoQ==", + "license": "ISC" + }, + "node_modules/libsodium-wrappers": { + "version": "0.8.0", + "resolved": "https://registry.npmjs.org/libsodium-wrappers/-/libsodium-wrappers-0.8.0.tgz", + "integrity": "sha512-PVyXAtP1nmpQrDKAVnA8pir0f7bj7vmMGs7mb+0OCSJ+BOfLNBb5hPy2GHfrx6cQ+Co9fMliR5R0WRbVuMllNA==", + "license": "ISC", + "dependencies": { + "libsodium": "^0.8.0" + } + }, + "node_modules/lie": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/lie/-/lie-3.3.0.tgz", + "integrity": "sha512-UaiMJzeWRlEujzAuw5LokY1L5ecNQYZKfmyZ9L7wDHb/p5etKaxXhohBcrw0EYby+G/NA52vRSN4N39dxHAIwQ==", + "license": "MIT", + "dependencies": { + "immediate": "~3.0.5" + } + }, + "node_modules/lie/node_modules/immediate": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/immediate/-/immediate-3.0.6.tgz", + "integrity": "sha512-XXOFtyqDjNDAQxVfYxuF7g9Il/IbWmmlQg2MYKOH8ExIT1qg6xc4zyS3HaEEATgs1btfzxq15ciUiY7gjSXRGQ==", + "license": "MIT" + }, "node_modules/lightningcss": { "version": "1.30.1", "resolved": "https://registry.npmjs.org/lightningcss/-/lightningcss-1.30.1.tgz", "integrity": "sha512-xi6IyHML+c9+Q3W0S4fCQJOym42pyurFiJUHEcEyHS0CeKzia4yZDEsLlqOFykxOdHpNy0NmvVO31vcSqAxJCg==", + "devOptional": true, "license": "MPL-2.0", "dependencies": { "detect-libc": "^2.0.3" @@ -12546,6 +14148,16 @@ "dev": true, "license": "MIT" }, + "node_modules/longest-streak": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/longest-streak/-/longest-streak-3.1.0.tgz", + "integrity": "sha512-9Ri+o0JYgehTaVBBDoMqIl8GXtbWg711O3srftcHhZ0dqnETqLaoIK0x17fUw9rFSlK/0NlsKe0Ahhyl5pXE2g==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/loose-envify": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", @@ -12596,64 +14208,911 @@ "integrity": "sha512-h5bgJWpxJNswbU7qCrV0tIKQCaS3blPDrqKWx+QxzuzL1zGUzij9XCWLrSLsJPu5t+eWA/ycetzYAO5IOMcWAQ==", "dev": true, "license": "MIT", - "bin": { - "lz-string": "bin/bin.js" + "bin": { + "lz-string": "bin/bin.js" + } + }, + "node_modules/magic-string": { + "version": "0.30.19", + "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.19.tgz", + "integrity": "sha512-2N21sPY9Ws53PZvsEpVtNuSW+ScYbQdp4b9qUaL+9QkHUrGFKo56Lg9Emg5s9V/qrtNBmiR01sYhUOwu3H+VOw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.5" + } + }, + "node_modules/magicast": { + "version": "0.3.5", + "resolved": "https://registry.npmjs.org/magicast/-/magicast-0.3.5.tgz", + "integrity": "sha512-L0WhttDl+2BOsybvEOLK7fW3UA0OQ0IQ2d6Zl2x/a6vVRs3bAY0ECOSHHeL5jD+SbOpOCUEi0y1DgHEn9Qn1AQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.25.4", + "@babel/types": "^7.25.4", + "source-map-js": "^1.2.0" + } + }, + "node_modules/markdown-table": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/markdown-table/-/markdown-table-3.0.4.tgz", + "integrity": "sha512-wiYz4+JrLyb/DqW2hkFJxP7Vd7JuTDm77fvbM8VfEQdmSMqcImWeeRbHwZjBjIFki/VaMK2BhFi7oUUZeM5bqw==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/math-intrinsics": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", + "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/mdast-util-find-and-replace": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/mdast-util-find-and-replace/-/mdast-util-find-and-replace-3.0.2.tgz", + "integrity": "sha512-Tmd1Vg/m3Xz43afeNxDIhWRtFZgM2VLyaf4vSTYwudTyeuTneoL3qtWMA5jeLyz/O1vDJmmV4QuScFCA2tBPwg==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "escape-string-regexp": "^5.0.0", + "unist-util-is": "^6.0.0", + "unist-util-visit-parents": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-find-and-replace/node_modules/escape-string-regexp": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-5.0.0.tgz", + "integrity": "sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/mdast-util-from-markdown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/mdast-util-from-markdown/-/mdast-util-from-markdown-2.0.2.tgz", + "integrity": "sha512-uZhTV/8NBuw0WHkPTrCqDOl0zVe1BIng5ZtHoDk49ME1qqcjYmmLmOf0gELgcRMxN4w2iuIeVso5/6QymSrgmA==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "@types/unist": "^3.0.0", + "decode-named-character-reference": "^1.0.0", + "devlop": "^1.0.0", + "mdast-util-to-string": "^4.0.0", + "micromark": "^4.0.0", + "micromark-util-decode-numeric-character-reference": "^2.0.0", + "micromark-util-decode-string": "^2.0.0", + "micromark-util-normalize-identifier": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0", + "unist-util-stringify-position": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-gfm": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/mdast-util-gfm/-/mdast-util-gfm-3.1.0.tgz", + "integrity": "sha512-0ulfdQOM3ysHhCJ1p06l0b0VKlhU0wuQs3thxZQagjcjPrlFRqY215uZGHHJan9GEAXd9MbfPjFJz+qMkVR6zQ==", + "license": "MIT", + "dependencies": { + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-gfm-autolink-literal": "^2.0.0", + "mdast-util-gfm-footnote": "^2.0.0", + "mdast-util-gfm-strikethrough": "^2.0.0", + "mdast-util-gfm-table": "^2.0.0", + "mdast-util-gfm-task-list-item": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-gfm-autolink-literal": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-autolink-literal/-/mdast-util-gfm-autolink-literal-2.0.1.tgz", + "integrity": "sha512-5HVP2MKaP6L+G6YaxPNjuL0BPrq9orG3TsrZ9YXbA3vDw/ACI4MEsnoDpn6ZNm7GnZgtAcONJyPhOP8tNJQavQ==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "ccount": "^2.0.0", + "devlop": "^1.0.0", + "mdast-util-find-and-replace": "^3.0.0", + "micromark-util-character": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-gfm-footnote": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-footnote/-/mdast-util-gfm-footnote-2.1.0.tgz", + "integrity": "sha512-sqpDWlsHn7Ac9GNZQMeUzPQSMzR6Wv0WKRNvQRg0KqHh02fpTz69Qc1QSseNX29bhz1ROIyNyxExfawVKTm1GQ==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "devlop": "^1.1.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0", + "micromark-util-normalize-identifier": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-gfm-strikethrough": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-strikethrough/-/mdast-util-gfm-strikethrough-2.0.0.tgz", + "integrity": "sha512-mKKb915TF+OC5ptj5bJ7WFRPdYtuHv0yTRxK2tJvi+BDqbkiG7h7u/9SI89nRAYcmap2xHQL9D+QG/6wSrTtXg==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-gfm-table": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-table/-/mdast-util-gfm-table-2.0.0.tgz", + "integrity": "sha512-78UEvebzz/rJIxLvE7ZtDd/vIQ0RHv+3Mh5DR96p7cS7HsBhYIICDBCu8csTNWNO6tBWfqXPWekRuj2FNOGOZg==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "devlop": "^1.0.0", + "markdown-table": "^3.0.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-gfm-task-list-item": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-task-list-item/-/mdast-util-gfm-task-list-item-2.0.0.tgz", + "integrity": "sha512-IrtvNvjxC1o06taBAVJznEnkiHxLFTzgonUdy8hzFVeDun0uTjxxrRGVaNFqkU1wJR3RBPEfsxmU6jDWPofrTQ==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "devlop": "^1.0.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-math": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-math/-/mdast-util-math-3.0.0.tgz", + "integrity": "sha512-Tl9GBNeG/AhJnQM221bJR2HPvLOSnLE/T9cJI9tlc6zwQk2nPk/4f0cHkOdEixQPC/j8UtKDdITswvLAy1OZ1w==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0", + "@types/mdast": "^4.0.0", + "devlop": "^1.0.0", + "longest-streak": "^3.0.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.1.0", + "unist-util-remove-position": "^5.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-newline-to-break": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-newline-to-break/-/mdast-util-newline-to-break-2.0.0.tgz", + "integrity": "sha512-MbgeFca0hLYIEx/2zGsszCSEJJ1JSCdiY5xQxRcLDDGa8EPvlLPupJ4DSajbMPAnC0je8jfb9TiUATnxxrHUog==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "mdast-util-find-and-replace": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-phrasing": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/mdast-util-phrasing/-/mdast-util-phrasing-4.1.0.tgz", + "integrity": "sha512-TqICwyvJJpBwvGAMZjj4J2n0X8QWp21b9l0o7eXyVJ25YNWYbJDVIyD1bZXE6WtV6RmKJVYmQAKWa0zWOABz2w==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "unist-util-is": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-to-hast": { + "version": "13.2.1", + "resolved": "https://registry.npmjs.org/mdast-util-to-hast/-/mdast-util-to-hast-13.2.1.tgz", + "integrity": "sha512-cctsq2wp5vTsLIcaymblUriiTcZd0CwWtCbLvrOzYCDZoWyMNV8sZ7krj09FSnsiJi3WVsHLM4k6Dq/yaPyCXA==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0", + "@types/mdast": "^4.0.0", + "@ungap/structured-clone": "^1.0.0", + "devlop": "^1.0.0", + "micromark-util-sanitize-uri": "^2.0.0", + "trim-lines": "^3.0.0", + "unist-util-position": "^5.0.0", + "unist-util-visit": "^5.0.0", + "vfile": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-to-markdown": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/mdast-util-to-markdown/-/mdast-util-to-markdown-2.1.2.tgz", + "integrity": "sha512-xj68wMTvGXVOKonmog6LwyJKrYXZPvlwabaryTjLh9LuvovB/KAH+kvi8Gjj+7rJjsFi23nkUxRQv1KqSroMqA==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "@types/unist": "^3.0.0", + "longest-streak": "^3.0.0", + "mdast-util-phrasing": "^4.0.0", + "mdast-util-to-string": "^4.0.0", + "micromark-util-classify-character": "^2.0.0", + "micromark-util-decode-string": "^2.0.0", + "unist-util-visit": "^5.0.0", + "zwitch": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-to-string": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-4.0.0.tgz", + "integrity": "sha512-0H44vDimn51F0YwvxSJSm0eCDOJTRlmN0R1yBh4HLj9wiV1Dn0QoXGbvFAWj2hSItVTlCmBF1hqKlIyUBVFLPg==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdn-data": { + "version": "2.12.2", + "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.12.2.tgz", + "integrity": "sha512-IEn+pegP1aManZuckezWCO+XZQDplx1366JoVhTpMpBB1sPey/SbveZQUosKiKiGYjg1wH4pMlNgXbCiYgihQA==", + "dev": true, + "license": "CC0-1.0" + }, + "node_modules/merge-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", + "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", + "dev": true, + "license": "MIT" + }, + "node_modules/merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/micromark": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/micromark/-/micromark-4.0.2.tgz", + "integrity": "sha512-zpe98Q6kvavpCr1NPVSCMebCKfD7CA2NqZ+rykeNhONIJBpc1tFKt9hucLGwha3jNTNI8lHpctWJWoimVF4PfA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "@types/debug": "^4.0.0", + "debug": "^4.0.0", + "decode-named-character-reference": "^1.0.0", + "devlop": "^1.0.0", + "micromark-core-commonmark": "^2.0.0", + "micromark-factory-space": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-chunked": "^2.0.0", + "micromark-util-combine-extensions": "^2.0.0", + "micromark-util-decode-numeric-character-reference": "^2.0.0", + "micromark-util-encode": "^2.0.0", + "micromark-util-normalize-identifier": "^2.0.0", + "micromark-util-resolve-all": "^2.0.0", + "micromark-util-sanitize-uri": "^2.0.0", + "micromark-util-subtokenize": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-core-commonmark": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/micromark-core-commonmark/-/micromark-core-commonmark-2.0.3.tgz", + "integrity": "sha512-RDBrHEMSxVFLg6xvnXmb1Ayr2WzLAWjeSATAoxwKYJV94TeNavgoIdA0a9ytzDSVzBy2YKFK+emCPOEibLeCrg==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "decode-named-character-reference": "^1.0.0", + "devlop": "^1.0.0", + "micromark-factory-destination": "^2.0.0", + "micromark-factory-label": "^2.0.0", + "micromark-factory-space": "^2.0.0", + "micromark-factory-title": "^2.0.0", + "micromark-factory-whitespace": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-chunked": "^2.0.0", + "micromark-util-classify-character": "^2.0.0", + "micromark-util-html-tag-name": "^2.0.0", + "micromark-util-normalize-identifier": "^2.0.0", + "micromark-util-resolve-all": "^2.0.0", + "micromark-util-subtokenize": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-extension-gfm": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm/-/micromark-extension-gfm-3.0.0.tgz", + "integrity": "sha512-vsKArQsicm7t0z2GugkCKtZehqUm31oeGBV/KVSorWSy8ZlNAv7ytjFhvaryUiCUJYqs+NoE6AFhpQvBTM6Q4w==", + "license": "MIT", + "dependencies": { + "micromark-extension-gfm-autolink-literal": "^2.0.0", + "micromark-extension-gfm-footnote": "^2.0.0", + "micromark-extension-gfm-strikethrough": "^2.0.0", + "micromark-extension-gfm-table": "^2.0.0", + "micromark-extension-gfm-tagfilter": "^2.0.0", + "micromark-extension-gfm-task-list-item": "^2.0.0", + "micromark-util-combine-extensions": "^2.0.0", + "micromark-util-types": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-gfm-autolink-literal": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-autolink-literal/-/micromark-extension-gfm-autolink-literal-2.1.0.tgz", + "integrity": "sha512-oOg7knzhicgQ3t4QCjCWgTmfNhvQbDDnJeVu9v81r7NltNCVmhPy1fJRX27pISafdjL+SVc4d3l48Gb6pbRypw==", + "license": "MIT", + "dependencies": { + "micromark-util-character": "^2.0.0", + "micromark-util-sanitize-uri": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-gfm-footnote": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-footnote/-/micromark-extension-gfm-footnote-2.1.0.tgz", + "integrity": "sha512-/yPhxI1ntnDNsiHtzLKYnE3vf9JZ6cAisqVDauhp4CEHxlb4uoOTxOCJ+9s51bIB8U1N1FJ1RXOKTIlD5B/gqw==", + "license": "MIT", + "dependencies": { + "devlop": "^1.0.0", + "micromark-core-commonmark": "^2.0.0", + "micromark-factory-space": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-normalize-identifier": "^2.0.0", + "micromark-util-sanitize-uri": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-gfm-strikethrough": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-strikethrough/-/micromark-extension-gfm-strikethrough-2.1.0.tgz", + "integrity": "sha512-ADVjpOOkjz1hhkZLlBiYA9cR2Anf8F4HqZUO6e5eDcPQd0Txw5fxLzzxnEkSkfnD0wziSGiv7sYhk/ktvbf1uw==", + "license": "MIT", + "dependencies": { + "devlop": "^1.0.0", + "micromark-util-chunked": "^2.0.0", + "micromark-util-classify-character": "^2.0.0", + "micromark-util-resolve-all": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-gfm-table": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-table/-/micromark-extension-gfm-table-2.1.1.tgz", + "integrity": "sha512-t2OU/dXXioARrC6yWfJ4hqB7rct14e8f7m0cbI5hUmDyyIlwv5vEtooptH8INkbLzOatzKuVbQmAYcbWoyz6Dg==", + "license": "MIT", + "dependencies": { + "devlop": "^1.0.0", + "micromark-factory-space": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-gfm-tagfilter": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-tagfilter/-/micromark-extension-gfm-tagfilter-2.0.0.tgz", + "integrity": "sha512-xHlTOmuCSotIA8TW1mDIM6X2O1SiX5P9IuDtqGonFhEK0qgRI4yeC6vMxEV2dgyr2TiD+2PQ10o+cOhdVAcwfg==", + "license": "MIT", + "dependencies": { + "micromark-util-types": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-gfm-task-list-item": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-task-list-item/-/micromark-extension-gfm-task-list-item-2.1.0.tgz", + "integrity": "sha512-qIBZhqxqI6fjLDYFTBIa4eivDMnP+OZqsNwmQ3xNLE4Cxwc+zfQEfbs6tzAo2Hjq+bh6q5F+Z8/cksrLFYWQQw==", + "license": "MIT", + "dependencies": { + "devlop": "^1.0.0", + "micromark-factory-space": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-math": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/micromark-extension-math/-/micromark-extension-math-3.1.0.tgz", + "integrity": "sha512-lvEqd+fHjATVs+2v/8kg9i5Q0AP2k85H0WUOwpIVvUML8BapsMvh1XAogmQjOCsLpoKRCVQqEkQBB3NhVBcsOg==", + "license": "MIT", + "dependencies": { + "@types/katex": "^0.16.0", + "devlop": "^1.0.0", + "katex": "^0.16.0", + "micromark-factory-space": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-factory-destination": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-factory-destination/-/micromark-factory-destination-2.0.1.tgz", + "integrity": "sha512-Xe6rDdJlkmbFRExpTOmRj9N3MaWmbAgdpSrBQvCFqhezUn4AHqJHbaEnfbVYYiexVSs//tqOdY/DxhjdCiJnIA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-factory-label": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-factory-label/-/micromark-factory-label-2.0.1.tgz", + "integrity": "sha512-VFMekyQExqIW7xIChcXn4ok29YE3rnuyveW3wZQWWqF4Nv9Wk5rgJ99KzPvHjkmPXF93FXIbBp6YdW3t71/7Vg==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "devlop": "^1.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-factory-space": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-factory-space/-/micromark-factory-space-2.0.1.tgz", + "integrity": "sha512-zRkxjtBxxLd2Sc0d+fbnEunsTj46SWXgXciZmHq0kDYGnck/ZSGj9/wULTV95uoeYiK5hRXP2mJ98Uo4cq/LQg==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-character": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-factory-title": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-factory-title/-/micromark-factory-title-2.0.1.tgz", + "integrity": "sha512-5bZ+3CjhAd9eChYTHsjy6TGxpOFSKgKKJPJxr293jTbfry2KDoWkhBb6TcPVB4NmzaPhMs1Frm9AZH7OD4Cjzw==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-factory-space": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-factory-whitespace": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-factory-whitespace/-/micromark-factory-whitespace-2.0.1.tgz", + "integrity": "sha512-Ob0nuZ3PKt/n0hORHyvoD9uZhr+Za8sFoP+OnMcnWK5lngSzALgQYKMr9RJVOWLqQYuyn6ulqGWSXdwf6F80lQ==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-factory-space": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-util-character": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz", + "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-util-chunked": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-chunked/-/micromark-util-chunked-2.0.1.tgz", + "integrity": "sha512-QUNFEOPELfmvv+4xiNg2sRYeS/P84pTW0TCgP5zc9FpXetHY0ab7SxKyAQCNCc1eK0459uoLI1y5oO5Vc1dbhA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-symbol": "^2.0.0" + } + }, + "node_modules/micromark-util-classify-character": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-classify-character/-/micromark-util-classify-character-2.0.1.tgz", + "integrity": "sha512-K0kHzM6afW/MbeWYWLjoHQv1sgg2Q9EccHEDzSkxiP/EaagNzCm7T/WMKZ3rjMbvIpvBiZgwR3dKMygtA4mG1Q==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-util-combine-extensions": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-combine-extensions/-/micromark-util-combine-extensions-2.0.1.tgz", + "integrity": "sha512-OnAnH8Ujmy59JcyZw8JSbK9cGpdVY44NKgSM7E9Eh7DiLS2E9RNQf0dONaGDzEG9yjEl5hcqeIsj4hfRkLH/Bg==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-chunked": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-util-decode-numeric-character-reference": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/micromark-util-decode-numeric-character-reference/-/micromark-util-decode-numeric-character-reference-2.0.2.tgz", + "integrity": "sha512-ccUbYk6CwVdkmCQMyr64dXz42EfHGkPQlBj5p7YVGzq8I7CtjXZJrubAYezf7Rp+bjPseiROqe7G6foFd+lEuw==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-symbol": "^2.0.0" + } + }, + "node_modules/micromark-util-decode-string": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-decode-string/-/micromark-util-decode-string-2.0.1.tgz", + "integrity": "sha512-nDV/77Fj6eH1ynwscYTOsbK7rR//Uj0bZXBwJZRfaLEJ1iGBR6kIfNmlNqaqJf649EP0F3NWNdeJi03elllNUQ==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "decode-named-character-reference": "^1.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-decode-numeric-character-reference": "^2.0.0", + "micromark-util-symbol": "^2.0.0" + } + }, + "node_modules/micromark-util-encode": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-encode/-/micromark-util-encode-2.0.1.tgz", + "integrity": "sha512-c3cVx2y4KqUnwopcO9b/SCdo2O67LwJJ/UyqGfbigahfegL9myoEFoDYZgkT7f36T0bLrM9hZTAaAyH+PCAXjw==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT" + }, + "node_modules/micromark-util-html-tag-name": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-html-tag-name/-/micromark-util-html-tag-name-2.0.1.tgz", + "integrity": "sha512-2cNEiYDhCWKI+Gs9T0Tiysk136SnR13hhO8yW6BGNyhOC4qYFnwF1nKfD3HFAIXA5c45RrIG1ub11GiXeYd1xA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT" + }, + "node_modules/micromark-util-normalize-identifier": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-normalize-identifier/-/micromark-util-normalize-identifier-2.0.1.tgz", + "integrity": "sha512-sxPqmo70LyARJs0w2UclACPUUEqltCkJ6PhKdMIDuJ3gSf/Q+/GIe3WKl0Ijb/GyH9lOpUkRAO2wp0GVkLvS9Q==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-symbol": "^2.0.0" } }, - "node_modules/magic-string": { - "version": "0.30.19", - "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.19.tgz", - "integrity": "sha512-2N21sPY9Ws53PZvsEpVtNuSW+ScYbQdp4b9qUaL+9QkHUrGFKo56Lg9Emg5s9V/qrtNBmiR01sYhUOwu3H+VOw==", + "node_modules/micromark-util-resolve-all": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-resolve-all/-/micromark-util-resolve-all-2.0.1.tgz", + "integrity": "sha512-VdQyxFWFT2/FGJgwQnJYbe1jjQoNTS4RjglmSjTUlpUMa95Htx9NHeYW4rGDJzbjvCsl9eLjMQwGeElsqmzcHg==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], "license": "MIT", "dependencies": { - "@jridgewell/sourcemap-codec": "^1.5.5" + "micromark-util-types": "^2.0.0" } }, - "node_modules/magicast": { - "version": "0.3.5", - "resolved": "https://registry.npmjs.org/magicast/-/magicast-0.3.5.tgz", - "integrity": "sha512-L0WhttDl+2BOsybvEOLK7fW3UA0OQ0IQ2d6Zl2x/a6vVRs3bAY0ECOSHHeL5jD+SbOpOCUEi0y1DgHEn9Qn1AQ==", - "dev": true, + "node_modules/micromark-util-sanitize-uri": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-sanitize-uri/-/micromark-util-sanitize-uri-2.0.1.tgz", + "integrity": "sha512-9N9IomZ/YuGGZZmQec1MbgxtlgougxTodVwDzzEouPKo3qFWvymFHWcnDi2vzV1ff6kas9ucW+o3yzJK9YB1AQ==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], "license": "MIT", "dependencies": { - "@babel/parser": "^7.25.4", - "@babel/types": "^7.25.4", - "source-map-js": "^1.2.0" + "micromark-util-character": "^2.0.0", + "micromark-util-encode": "^2.0.0", + "micromark-util-symbol": "^2.0.0" } }, - "node_modules/math-intrinsics": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", - "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", - "dev": true, + "node_modules/micromark-util-subtokenize": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-subtokenize/-/micromark-util-subtokenize-2.1.0.tgz", + "integrity": "sha512-XQLu552iSctvnEcgXw6+Sx75GflAPNED1qx7eBJ+wydBb2KCbRZe+NwvIEEMM83uml1+2WSXpBAcp9IUCgCYWA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], "license": "MIT", - "engines": { - "node": ">= 0.4" + "dependencies": { + "devlop": "^1.0.0", + "micromark-util-chunked": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" } }, - "node_modules/mdn-data": { - "version": "2.12.2", - "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.12.2.tgz", - "integrity": "sha512-IEn+pegP1aManZuckezWCO+XZQDplx1366JoVhTpMpBB1sPey/SbveZQUosKiKiGYjg1wH4pMlNgXbCiYgihQA==", - "dev": true, - "license": "CC0-1.0" - }, - "node_modules/merge-stream": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", - "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", - "dev": true, + "node_modules/micromark-util-symbol": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz", + "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], "license": "MIT" }, - "node_modules/merge2": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", - "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 8" - } + "node_modules/micromark-util-types": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz", + "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT" }, "node_modules/micromatch": { "version": "4.0.8", @@ -12734,6 +15193,18 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/mimic-response": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-3.1.0.tgz", + "integrity": "sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==", + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/minimatch": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", @@ -12751,25 +15222,41 @@ "version": "1.2.8", "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", - "dev": true, "license": "MIT", "funding": { "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/minimisted": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/minimisted/-/minimisted-2.0.1.tgz", + "integrity": "sha512-1oPjfuLQa2caorJUM8HV8lGgWCc0qqAO1MNv/k05G4qslmsndV/5WdNZrqCiyqiz3wohia2Ij2B7w2Dr7/IyrA==", + "license": "MIT", + "dependencies": { + "minimist": "^1.2.5" + } + }, "node_modules/minipass": { "version": "7.1.2", "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz", "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==", + "dev": true, "license": "ISC", "engines": { "node": ">=16 || 14 >=14.17" } }, + "node_modules/minisearch": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/minisearch/-/minisearch-7.2.0.tgz", + "integrity": "sha512-dqT2XBYUOZOiC5t2HRnwADjhNS2cecp9u+TJRiJ1Qp/f5qjkeT5APcGPjHw+bz89Ms8Jp+cG4AlE+QZ/QnDglg==", + "license": "MIT" + }, "node_modules/minizlib": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.1.0.tgz", "integrity": "sha512-KZxYo1BUkWD2TVFLr0MQoM8vUUigWD3LlD83a/75BqC+4qE0Hb1Vo5v1FgcfaNXvfXzr+5EhQ6ing/CaBijTlw==", + "dev": true, "license": "MIT", "dependencies": { "minipass": "^7.1.2" @@ -12810,37 +15297,6 @@ "pathe": "^2.0.1" } }, - "node_modules/monaco-editor": { - "version": "0.52.2", - "resolved": "https://registry.npmjs.org/monaco-editor/-/monaco-editor-0.52.2.tgz", - "integrity": "sha512-GEQWEZmfkOGLdd3XK8ryrfWz3AIP8YymVXiPHEdewrUq7mh0qrKrfHLNCXcbB6sTnMLnOZ3ztSiKcciFUkIJwQ==", - "license": "MIT" - }, - "node_modules/monaco-markdown": { - "version": "0.0.12", - "resolved": "https://registry.npmjs.org/monaco-markdown/-/monaco-markdown-0.0.12.tgz", - "integrity": "sha512-KkGheL2pUazZJY2DfBqpHuMQOjILkbYGNtF5MzpT3ZWupKjunkpL7ByKTllBQGKKcVNt5nuRHl7YsKFskWlc6Q==", - "license": "MIT", - "dependencies": { - "monaco-editor": "0.30.1", - "string-similarity": "^3.0.0" - } - }, - "node_modules/monaco-markdown/node_modules/monaco-editor": { - "version": "0.30.1", - "resolved": "https://registry.npmjs.org/monaco-editor/-/monaco-editor-0.30.1.tgz", - "integrity": "sha512-B/y4+b2O5G2gjuxIFtCE2EkM17R2NM7/3F8x0qcPsqy4V83bitJTIO4TIeZpYlzu/xy6INiY/+84BEm6+7Cmzg==", - "license": "MIT" - }, - "node_modules/monaco-vim": { - "version": "0.4.2", - "resolved": "https://registry.npmjs.org/monaco-vim/-/monaco-vim-0.4.2.tgz", - "integrity": "sha512-rdbQC3O2rmpwX2Orzig/6gZjZfH7q7TIeB+uEl49sa+QyNm3jCKJOw5mwxBdFzTqbrPD+URfg6A2lEkuL5kymw==", - "license": "MIT", - "peerDependencies": { - "monaco-editor": "*" - } - }, "node_modules/morphdom": { "version": "2.7.7", "resolved": "https://registry.npmjs.org/morphdom/-/morphdom-2.7.7.tgz", @@ -13151,9 +15607,9 @@ } }, "node_modules/node-mock-http": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/node-mock-http/-/node-mock-http-1.0.3.tgz", - "integrity": "sha512-jN8dK25fsfnMrVsEhluUTPkBFY+6ybu7jSB1n+ri/vOGjJxU8J9CZhpSGkHXSkFjtUhbmoncG/YG9ta5Ludqog==", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/node-mock-http/-/node-mock-http-1.0.4.tgz", + "integrity": "sha512-8DY+kFsDkNXy1sJglUfuODx1/opAGJGyrTuFqEoN90oRc2Vk0ZbD4K2qmKXBBEhZQzdKHIVfEJpDU8Ak2NJEvQ==", "dev": true, "license": "MIT" }, @@ -13398,7 +15854,6 @@ "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", - "dev": true, "license": "ISC", "dependencies": { "wrappy": "1" @@ -13420,6 +15875,23 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/oniguruma-parser": { + "version": "0.12.1", + "resolved": "https://registry.npmjs.org/oniguruma-parser/-/oniguruma-parser-0.12.1.tgz", + "integrity": "sha512-8Unqkvk1RYc6yq2WBYRj4hdnsAxVze8i7iPfQr8e4uSP3tRv0rpZcbGUDvxfQQcdwHt/e9PrMvGCsa8OqG9X3w==", + "license": "MIT" + }, + "node_modules/oniguruma-to-es": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/oniguruma-to-es/-/oniguruma-to-es-4.3.4.tgz", + "integrity": "sha512-3VhUGN3w2eYxnTzHn+ikMI+fp/96KoRSVK9/kMTcFqj1NRDh2IhQCKvYxDnWePKRXY/AqH+Fuiyb7VHSzBjHfA==", + "license": "MIT", + "dependencies": { + "oniguruma-parser": "^0.12.1", + "regex": "^6.0.1", + "regex-recursion": "^6.0.2" + } + }, "node_modules/open": { "version": "8.4.2", "resolved": "https://registry.npmjs.org/open/-/open-8.4.2.tgz", @@ -13745,6 +16217,15 @@ "url": "https://github.com/sponsors/jonschlinkert" } }, + "node_modules/pify": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz", + "integrity": "sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, "node_modules/pkg-types": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/pkg-types/-/pkg-types-2.3.0.tgz", @@ -13761,7 +16242,6 @@ "version": "1.1.0", "resolved": "https://registry.npmjs.org/possible-typed-array-names/-/possible-typed-array-names-1.1.0.tgz", "integrity": "sha512-/+5VFTchJDoVj3bhoqi6UeymcD00DAwb1nJwamzPvHEszJ4FpF6SNNbUbOS8yI56qHzdV8eK0qEfOSiodkTdxg==", - "dev": true, "license": "MIT", "engines": { "node": ">= 0.4" @@ -13871,7 +16351,6 @@ "version": "0.11.10", "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", "integrity": "sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==", - "dev": true, "license": "MIT", "engines": { "node": ">= 0.6.0" @@ -13881,7 +16360,6 @@ "version": "2.0.1", "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==", - "dev": true, "license": "MIT" }, "node_modules/prop-types": { @@ -13901,6 +16379,16 @@ "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==", "license": "MIT" }, + "node_modules/property-information": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/property-information/-/property-information-7.1.0.tgz", + "integrity": "sha512-TwEZ+X+yCJmYfL7TPUOcvBZ4QfoT5YenQiJuX//0th53DE6w0xxLEtfK3iyryQFddXuvkIk51EEgrJQ0WJkOmQ==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/prr": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/prr/-/prr-1.0.1.tgz", @@ -14405,6 +16893,30 @@ "node": ">=4" } }, + "node_modules/regex": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/regex/-/regex-6.1.0.tgz", + "integrity": "sha512-6VwtthbV4o/7+OaAF9I5L5V3llLEsoPyq9P1JVXkedTP33c7MfCG0/5NOPcSJn0TzXcG9YUrR0gQSWioew3LDg==", + "license": "MIT", + "dependencies": { + "regex-utilities": "^2.3.0" + } + }, + "node_modules/regex-recursion": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/regex-recursion/-/regex-recursion-6.0.2.tgz", + "integrity": "sha512-0YCaSCq2VRIebiaUviZNs0cBz1kg5kVS2UKUfNIx8YVs1cN3AV7NTctO5FOKBA+UT2BPJIWZauYHPqJODG50cg==", + "license": "MIT", + "dependencies": { + "regex-utilities": "^2.3.0" + } + }, + "node_modules/regex-utilities": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/regex-utilities/-/regex-utilities-2.3.0.tgz", + "integrity": "sha512-8VhliFJAWRaUiVvREIiW2NXXTmHs4vMNnSzuJVhscgmGav3g9VDxLrQndI3dZZVVdp0ZO/5v0xmX516/7M9cng==", + "license": "MIT" + }, "node_modules/regexp.prototype.flags": { "version": "1.5.4", "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.4.tgz", @@ -14464,6 +16976,151 @@ "regjsparser": "bin/parser" } }, + "node_modules/rehype-katex": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/rehype-katex/-/rehype-katex-7.0.1.tgz", + "integrity": "sha512-OiM2wrZ/wuhKkigASodFoo8wimG3H12LWQaH8qSPVJn9apWKFSH3YOCtbKpBorTVw/eI7cuT21XBbvwEswbIOA==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0", + "@types/katex": "^0.16.0", + "hast-util-from-html-isomorphic": "^2.0.0", + "hast-util-to-text": "^4.0.0", + "katex": "^0.16.0", + "unist-util-visit-parents": "^6.0.0", + "vfile": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/rehype-sanitize": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/rehype-sanitize/-/rehype-sanitize-6.0.0.tgz", + "integrity": "sha512-CsnhKNsyI8Tub6L4sm5ZFsme4puGfc6pYylvXo1AeqaGbjOYyzNv3qZPwvs0oMJ39eryyeOdmxwUIo94IpEhqg==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0", + "hast-util-sanitize": "^5.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/rehype-stringify": { + "version": "10.0.1", + "resolved": "https://registry.npmjs.org/rehype-stringify/-/rehype-stringify-10.0.1.tgz", + "integrity": "sha512-k9ecfXHmIPuFVI61B9DeLPN0qFHfawM6RsuX48hoqlaKSF61RskNjSm1lI8PhBEM0MRdLxVVm4WmTqJQccH9mA==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0", + "hast-util-to-html": "^9.0.0", + "unified": "^11.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/remark-breaks": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/remark-breaks/-/remark-breaks-4.0.0.tgz", + "integrity": "sha512-IjEjJOkH4FuJvHZVIW0QCDWxcG96kCq7An/KVH2NfJe6rKZU2AsHeB3OEjPNRxi4QC34Xdx7I2KGYn6IpT7gxQ==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "mdast-util-newline-to-break": "^2.0.0", + "unified": "^11.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/remark-gfm": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/remark-gfm/-/remark-gfm-4.0.1.tgz", + "integrity": "sha512-1quofZ2RQ9EWdeN34S79+KExV1764+wCUGop5CPL1WGdD0ocPpu91lzPGbwWMECpEpd42kJGQwzRfyov9j4yNg==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "mdast-util-gfm": "^3.0.0", + "micromark-extension-gfm": "^3.0.0", + "remark-parse": "^11.0.0", + "remark-stringify": "^11.0.0", + "unified": "^11.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/remark-math": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/remark-math/-/remark-math-6.0.0.tgz", + "integrity": "sha512-MMqgnP74Igy+S3WwnhQ7kqGlEerTETXMvJhrUzDikVZ2/uogJCb+WHUg97hK9/jcfc0dkD73s3LN8zU49cTEtA==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "mdast-util-math": "^3.0.0", + "micromark-extension-math": "^3.0.0", + "unified": "^11.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/remark-parse": { + "version": "11.0.0", + "resolved": "https://registry.npmjs.org/remark-parse/-/remark-parse-11.0.0.tgz", + "integrity": "sha512-FCxlKLNGknS5ba/1lmpYijMUzX2esxW5xQqjWxw2eHFfS2MSdaHVINFmhjo+qN1WhZhNimq0dZATN9pH0IDrpA==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "mdast-util-from-markdown": "^2.0.0", + "micromark-util-types": "^2.0.0", + "unified": "^11.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/remark-rehype": { + "version": "11.1.2", + "resolved": "https://registry.npmjs.org/remark-rehype/-/remark-rehype-11.1.2.tgz", + "integrity": "sha512-Dh7l57ianaEoIpzbp0PC9UKAdCSVklD8E5Rpw7ETfbTl3FqcOOgq5q2LVDhgGCkaBv7p24JXikPdvhhmHvKMsw==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0", + "@types/mdast": "^4.0.0", + "mdast-util-to-hast": "^13.0.0", + "unified": "^11.0.0", + "vfile": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/remark-stringify": { + "version": "11.0.0", + "resolved": "https://registry.npmjs.org/remark-stringify/-/remark-stringify-11.0.0.tgz", + "integrity": "sha512-1OSmLd3awB/t8qdoEOMazZkNsfVTeY4fTsgzcQFdXNq8ToTN4ZGwrMnlda4K6smTFKD+GRV6O48i6Z4iKgPPpw==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "mdast-util-to-markdown": "^2.0.0", + "unified": "^11.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, "node_modules/require-directory": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", @@ -14681,7 +17338,6 @@ "version": "5.2.1", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", - "devOptional": true, "funding": [ { "type": "github", @@ -14833,6 +17489,7 @@ "version": "1.3.2", "resolved": "https://registry.npmjs.org/seroval/-/seroval-1.3.2.tgz", "integrity": "sha512-RbcPH1n5cfwKrru7v7+zrZvjLurgHhGyso3HTyGtRivGWgYjbOmGuivCQaORNELjNONoK35nj28EoWul9sb1zQ==", + "dev": true, "license": "MIT", "engines": { "node": ">=10" @@ -14842,6 +17499,7 @@ "version": "1.3.3", "resolved": "https://registry.npmjs.org/seroval-plugins/-/seroval-plugins-1.3.3.tgz", "integrity": "sha512-16OL3NnUBw8JG1jBLUoZJsLnQq0n5Ua6aHalhJK4fMQkz1lqR7Osz1sA30trBtd9VUDc2NgkuRCn8+/pBwqZ+w==", + "dev": true, "license": "MIT", "engines": { "node": ">=10" @@ -14880,7 +17538,6 @@ "version": "1.2.2", "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz", "integrity": "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==", - "dev": true, "license": "MIT", "dependencies": { "define-data-property": "^1.1.4", @@ -14925,6 +17582,12 @@ "node": ">= 0.4" } }, + "node_modules/setimmediate": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/setimmediate/-/setimmediate-1.0.5.tgz", + "integrity": "sha512-MATJdZp8sLqDl/68LfQmbP8zKPLQNV6BIZoIgrscFDQ+RsvK/BxeDQOgyxKKoh0y/8h3BqVFnCqQ/gd+reiIXA==", + "license": "MIT" + }, "node_modules/setprototypeof": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", @@ -14932,6 +17595,26 @@ "dev": true, "license": "ISC" }, + "node_modules/sha.js": { + "version": "2.4.12", + "resolved": "https://registry.npmjs.org/sha.js/-/sha.js-2.4.12.tgz", + "integrity": "sha512-8LzC5+bvI45BjpfXU8V5fdU2mfeKiQe1D1gIMn7XUlF3OTUrpdJpPPH4EMAnF0DsHHdSZqCdSss5qCmJKuiO3w==", + "license": "(MIT AND BSD-3-Clause)", + "dependencies": { + "inherits": "^2.0.4", + "safe-buffer": "^5.2.1", + "to-buffer": "^1.2.0" + }, + "bin": { + "sha.js": "bin.js" + }, + "engines": { + "node": ">= 0.10" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/shebang-command": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", @@ -14955,6 +17638,22 @@ "node": ">=8" } }, + "node_modules/shiki": { + "version": "3.21.0", + "resolved": "https://registry.npmjs.org/shiki/-/shiki-3.21.0.tgz", + "integrity": "sha512-N65B/3bqL/TI2crrXr+4UivctrAGEjmsib5rPMMPpFp1xAx/w03v8WZ9RDDFYteXoEgY7qZ4HGgl5KBIu1153w==", + "license": "MIT", + "dependencies": { + "@shikijs/core": "3.21.0", + "@shikijs/engine-javascript": "3.21.0", + "@shikijs/engine-oniguruma": "3.21.0", + "@shikijs/langs": "3.21.0", + "@shikijs/themes": "3.21.0", + "@shikijs/types": "3.21.0", + "@shikijs/vscode-textmate": "^10.0.2", + "@types/hast": "^3.0.4" + } + }, "node_modules/side-channel": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz", @@ -15051,6 +17750,51 @@ "url": "https://github.com/sponsors/isaacs" } }, + "node_modules/simple-concat": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/simple-concat/-/simple-concat-1.0.1.tgz", + "integrity": "sha512-cSFtAPtRhljv69IK0hTVZQ+OfE9nePi/rtJmw5UjHeVyVroEqJXP1sFztKUy1qU+xvz3u/sfYJLa947b7nAN2Q==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/simple-get": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/simple-get/-/simple-get-4.0.1.tgz", + "integrity": "sha512-brv7p5WgH0jmQJr1ZDDfKDOSeWWg+OVypG99A/5vYGPqJ6pxiaHLy8nxtFjBA7oMa01ebA9gfh1uMCFqOuXxvA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "dependencies": { + "decompress-response": "^6.0.0", + "once": "^1.3.1", + "simple-concat": "^1.0.0" + } + }, "node_modules/slash": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/slash/-/slash-5.1.0.tgz", @@ -15075,6 +17819,7 @@ "version": "1.9.9", "resolved": "https://registry.npmjs.org/solid-js/-/solid-js-1.9.9.tgz", "integrity": "sha512-A0ZBPJQldAeGCTW0YRYJmt7RCeh5rbFfPZ2aOttgYnctHE7HgKeHCBB/PVc2P7eOfmNXqMFFFoYYdm3S4dcbkA==", + "dev": true, "license": "MIT", "dependencies": { "csstype": "^3.1.0", @@ -15139,6 +17884,16 @@ "dev": true, "license": "MIT" }, + "node_modules/space-separated-tokens": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/space-separated-tokens/-/space-separated-tokens-2.0.2.tgz", + "integrity": "sha512-PEGlAwrG8yXGXRjW32fGbg66JAlOAwbObuqVoJpv/mRgoWDQfgH1wDPvtzWyUSNAXBGSk8h755YDbbcEy3SH2Q==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/srvx": { "version": "0.9.8", "resolved": "https://registry.npmjs.org/srvx/-/srvx-0.9.8.tgz", @@ -15225,19 +17980,11 @@ "version": "1.3.0", "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", - "devOptional": true, "license": "MIT", "dependencies": { "safe-buffer": "~5.2.0" } }, - "node_modules/string-similarity": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/string-similarity/-/string-similarity-3.0.0.tgz", - "integrity": "sha512-7kS7LyTp56OqOI2BDWQNVnLX/rCxIQn+/5M0op1WV6P8Xx6TZNdajpuqQdiJ7Xx+p1C5CsWMvdiBp9ApMhxzEQ==", - "deprecated": "Package no longer supported. Contact Support at https://www.npmjs.com/support for more info.", - "license": "ISC" - }, "node_modules/string-width": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", @@ -15385,6 +18132,20 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/stringify-entities": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/stringify-entities/-/stringify-entities-4.0.4.tgz", + "integrity": "sha512-IwfBptatlO+QCJUo19AqvrPNqlVMpW9YEL2LIVY+Rpv2qsjCGxaDLNRgeGsQWJhfItebuJhsGSLjaBbNSQ+ieg==", + "license": "MIT", + "dependencies": { + "character-entities-html4": "^2.0.0", + "character-entities-legacy": "^3.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/stringify-object": { "version": "3.3.0", "resolved": "https://registry.npmjs.org/stringify-object/-/stringify-object-3.3.0.tgz", @@ -15509,6 +18270,12 @@ "dev": true, "license": "MIT" }, + "node_modules/style-mod": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/style-mod/-/style-mod-4.1.3.tgz", + "integrity": "sha512-i/n8VsZydrugj3Iuzll8+x/00GH2vnYsk1eomD8QiRrSAeW6ItbCQDtfXCeJHd0iwiNagqjQkvpvREEPtW3IoQ==", + "license": "MIT" + }, "node_modules/supports-color": { "version": "7.2.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", @@ -15575,6 +18342,7 @@ "version": "2.2.3", "resolved": "https://registry.npmjs.org/tapable/-/tapable-2.2.3.tgz", "integrity": "sha512-ZL6DDuAlRlLGghwcfmSn9sK3Hr6ArtyudlSAiCqQ6IfE+b+HHbydbYDIG15IfS5do+7XQQBdBiubF/cV2dnDzg==", + "dev": true, "license": "MIT", "engines": { "node": ">=6" @@ -15588,6 +18356,7 @@ "version": "7.5.2", "resolved": "https://registry.npmjs.org/tar/-/tar-7.5.2.tgz", "integrity": "sha512-7NyxrTE4Anh8km8iEy7o0QYPs+0JKBTj5ZaqHg6B39erLg0qYXN3BijtShwbsNSvQ+LN75+KV+C4QR/f6Gwnpg==", + "dev": true, "license": "BlueOak-1.0.0", "dependencies": { "@isaacs/fs-minipass": "^4.0.0", @@ -15616,6 +18385,7 @@ "version": "5.0.0", "resolved": "https://registry.npmjs.org/yallist/-/yallist-5.0.0.tgz", "integrity": "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw==", + "dev": true, "license": "BlueOak-1.0.0", "engines": { "node": ">=18" @@ -15711,6 +18481,12 @@ "integrity": "sha512-+FbBPE1o9QAYvviau/qC5SE3caw21q3xkvWKBtja5vgqOWIHHJ3ioaq1VPfn/Szqctz2bU/oYeKd9/z5BL+PVg==", "license": "MIT" }, + "node_modules/tiny-segmenter": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/tiny-segmenter/-/tiny-segmenter-0.2.0.tgz", + "integrity": "sha512-m+aTJQ/CUBKurLaJRpLmJiwcL+Gpkzft5ZYnRU9AkuO45Y/k/2iJmuLEbN1XLrq6N3kDVyIUCCeqRzQx0feBag==", + "license": "SEE LICENSE IN http://chasen.org/~taku/software/TinySegmenter/LICENCE.txt" + }, "node_modules/tiny-warning": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/tiny-warning/-/tiny-warning-1.0.3.tgz", @@ -15797,6 +18573,20 @@ "dev": true, "license": "MIT" }, + "node_modules/to-buffer": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/to-buffer/-/to-buffer-1.2.2.tgz", + "integrity": "sha512-db0E3UJjcFhpDhAF4tLo03oli3pwl3dbnzXOUIlRKrp+ldk/VUxzpWYZENsw2SZiuBjHAk7DfB0VU7NKdpb6sw==", + "license": "MIT", + "dependencies": { + "isarray": "^2.0.5", + "safe-buffer": "^5.2.1", + "typed-array-buffer": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/to-regex-range": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", @@ -15845,6 +18635,26 @@ "node": ">=20" } }, + "node_modules/trim-lines": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/trim-lines/-/trim-lines-3.0.1.tgz", + "integrity": "sha512-kRj8B+YHZCc9kQYdWfJB2/oUl9rA99qbowYYBtr4ui4mZyAQ2JpvVBd/6U2YloATfqBhBTSMhTpgBHtU0Mf3Rg==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/trough": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/trough/-/trough-2.2.0.tgz", + "integrity": "sha512-tmMpK00BjZiUyVyvrBK7knerNgmgvcV/KLVyuma/SC+TQN167GrMRciANTz09+k3zW8L8t60jWO1GpfkZdjTaw==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/ts-api-utils": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.1.0.tgz", @@ -15862,6 +18672,7 @@ "version": "3.1.6", "resolved": "https://registry.npmjs.org/tsconfck/-/tsconfck-3.1.6.tgz", "integrity": "sha512-ks6Vjr/jEw0P1gmOVwutM3B7fWxoWBL2KRDb1JfqGVawBmO5UsvmWOQFGHBPl5yxYz4eERr19E6L7NMv+Fej4w==", + "dev": true, "license": "MIT", "bin": { "tsconfck": "bin/tsconfck.js" @@ -15968,7 +18779,6 @@ "version": "1.0.3", "resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.3.tgz", "integrity": "sha512-nAYYwfY3qnzX30IkA6AQZjVbtK6duGontcQm1WSG1MD94YLqK0515GNApXkoxKOWMusVssAHWLh9SeaoefYFGw==", - "dev": true, "license": "MIT", "dependencies": { "call-bound": "^1.0.3", @@ -16046,7 +18856,7 @@ "version": "5.9.3", "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.3.tgz", "integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==", - "devOptional": true, + "dev": true, "license": "Apache-2.0", "bin": { "tsc": "bin/tsc", @@ -16057,9 +18867,9 @@ } }, "node_modules/ufo": { - "version": "1.6.1", - "resolved": "https://registry.npmjs.org/ufo/-/ufo-1.6.1.tgz", - "integrity": "sha512-9a4/uxlTWJ4+a5i0ooc1rU7C7YOw3wT+UGqdeNNHWnOF9qcMBgLRS+4IYUqbczewFx4mLEig6gawh7X6mFlEkA==", + "version": "1.6.3", + "resolved": "https://registry.npmjs.org/ufo/-/ufo-1.6.3.tgz", + "integrity": "sha512-yDJTmhydvl5lJzBmy/hyOAA0d+aqCBuwl818haVdYCRrWV84o7YyeVm4QlVHStqNrrJSTb6jKuFAVqAFsr+K3Q==", "license": "MIT" }, "node_modules/uglify-js": { @@ -16123,9 +18933,9 @@ } }, "node_modules/undici": { - "version": "7.16.0", - "resolved": "https://registry.npmjs.org/undici/-/undici-7.16.0.tgz", - "integrity": "sha512-QEg3HPMll0o3t2ourKwOeUAZ159Kn9mx5pnzHRQO8+Wixmh88YdZRiIwat0iNzNNXn0yoEtXJqFpyW7eM8BV7g==", + "version": "7.18.2", + "resolved": "https://registry.npmjs.org/undici/-/undici-7.18.2.tgz", + "integrity": "sha512-y+8YjDFzWdQlSE9N5nzKMT3g4a5UBX1HKowfdXh0uvAnTaqqwqB92Jt4UXBAeKekDs5IaDKyJFR4X1gYVCgXcw==", "license": "MIT", "engines": { "node": ">=20.18.1" @@ -16219,6 +19029,25 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/unified": { + "version": "11.0.5", + "resolved": "https://registry.npmjs.org/unified/-/unified-11.0.5.tgz", + "integrity": "sha512-xKvGhPWw3k84Qjh8bI3ZeJjqnyadK+GEFtazSfZv/rKeTkTjOJho6mFqh2SM96iIcZokxiOpg78GazTSg8+KHA==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0", + "bail": "^2.0.0", + "devlop": "^1.0.0", + "extend": "^3.0.0", + "is-plain-obj": "^4.0.0", + "trough": "^2.0.0", + "vfile": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, "node_modules/unimport": { "version": "5.4.1", "resolved": "https://registry.npmjs.org/unimport/-/unimport-5.4.1.tgz", @@ -16271,6 +19100,102 @@ "node": ">=8" } }, + "node_modules/unist-util-find-after": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/unist-util-find-after/-/unist-util-find-after-5.0.0.tgz", + "integrity": "sha512-amQa0Ep2m6hE2g72AugUItjbuM8X8cGQnFoHk0pGfrFeT9GZhzN5SW8nRsiGKK7Aif4CrACPENkA6P/Lw6fHGQ==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0", + "unist-util-is": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/unist-util-is": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-6.0.1.tgz", + "integrity": "sha512-LsiILbtBETkDz8I9p1dQ0uyRUWuaQzd/cuEeS1hoRSyW5E5XGmTzlwY1OrNzzakGowI9Dr/I8HVaw4hTtnxy8g==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/unist-util-position": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/unist-util-position/-/unist-util-position-5.0.0.tgz", + "integrity": "sha512-fucsC7HjXvkB5R3kTCO7kUjRdrS0BJt3M/FPxmHMBOm8JQi2BsHAHFsy27E0EolP8rp0NzXsJ+jNPyDWvOJZPA==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/unist-util-remove-position": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/unist-util-remove-position/-/unist-util-remove-position-5.0.0.tgz", + "integrity": "sha512-Hp5Kh3wLxv0PHj9m2yZhhLt58KzPtEYKQQ4yxfYFEO7EvHwzyDYnduhHnY1mDxoqr7VUwVuHXk9RXKIiYS1N8Q==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0", + "unist-util-visit": "^5.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/unist-util-stringify-position": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-4.0.0.tgz", + "integrity": "sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/unist-util-visit": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-5.0.0.tgz", + "integrity": "sha512-MR04uvD+07cwl/yhVuVWAtw+3GOR/knlL55Nd/wAdblk27GCVt3lqpTivy/tkJcZoNPzTwS1Y+KMojlLDhoTzg==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0", + "unist-util-is": "^6.0.0", + "unist-util-visit-parents": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/unist-util-visit-parents": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-6.0.2.tgz", + "integrity": "sha512-goh1s1TBrqSqukSc8wrjwWhL0hiJxgA8m4kFxGlQ+8FYQ3C/m11FcTs4YYem7V664AhHVvgoQLk890Ssdsr2IQ==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0", + "unist-util-is": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, "node_modules/universalify": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.1.tgz", @@ -16650,7 +19575,6 @@ "version": "1.0.2", "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", - "devOptional": true, "license": "MIT" }, "node_modules/uuid": { @@ -16666,6 +19590,48 @@ "uuid": "dist/bin/uuid" } }, + "node_modules/vfile": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/vfile/-/vfile-6.0.3.tgz", + "integrity": "sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0", + "vfile-message": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/vfile-location": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/vfile-location/-/vfile-location-5.0.3.tgz", + "integrity": "sha512-5yXvWDEgqeiYiBe1lbxYF7UMAIm/IcopxMHrMQDq3nvKcjPKIhZklUKL+AE7J7uApI4kwe2snsK+eI6UTj9EHg==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0", + "vfile": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/vfile-message": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-4.0.3.tgz", + "integrity": "sha512-QTHzsGd1EhbZs4AsQ20JX1rC3cOlt/IWJruk893DfLRr57lcnOeMaWG4K0JrRta4mIJZKth2Au3mM3u03/JWKw==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0", + "unist-util-stringify-position": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, "node_modules/vite": { "version": "7.3.0", "resolved": "https://registry.npmjs.org/vite/-/vite-7.3.0.tgz", @@ -16807,10 +19773,51 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/vite-plugin-top-level-await": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/vite-plugin-top-level-await/-/vite-plugin-top-level-await-1.6.0.tgz", + "integrity": "sha512-bNhUreLamTIkoulCR9aDXbTbhLk6n1YE8NJUTTxl5RYskNRtzOR0ASzSjBVRtNdjIfngDXo11qOsybGLNsrdww==", + "dev": true, + "license": "MIT", + "dependencies": { + "@rollup/plugin-virtual": "^3.0.2", + "@swc/core": "^1.12.14", + "@swc/wasm": "^1.12.14", + "uuid": "10.0.0" + }, + "peerDependencies": { + "vite": ">=2.8" + } + }, + "node_modules/vite-plugin-top-level-await/node_modules/uuid": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-10.0.0.tgz", + "integrity": "sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ==", + "dev": true, + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "license": "MIT", + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/vite-plugin-wasm": { + "version": "3.5.0", + "resolved": "https://registry.npmjs.org/vite-plugin-wasm/-/vite-plugin-wasm-3.5.0.tgz", + "integrity": "sha512-X5VWgCnqiQEGb+omhlBVsvTfxikKtoOgAzQ95+BZ8gQ+VfMHIjSHr0wyvXFQCa0eKQ0fKyaL0kWcEnYqBac4lQ==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "vite": "^2 || ^3 || ^4 || ^5 || ^6 || ^7" + } + }, "node_modules/vite-tsconfig-paths": { "version": "5.1.4", "resolved": "https://registry.npmjs.org/vite-tsconfig-paths/-/vite-tsconfig-paths-5.1.4.tgz", "integrity": "sha512-cYj0LRuLV2c2sMqhqhGpaO3LretdtMn/BVX4cPLanIZuwwrkVl+lK84E/miEXkCHWXuq65rhNN4rXsBcOB3S4w==", + "dev": true, "license": "MIT", "dependencies": { "debug": "^4.1.1", @@ -17375,6 +20382,12 @@ } } }, + "node_modules/w3c-keyname": { + "version": "2.2.8", + "resolved": "https://registry.npmjs.org/w3c-keyname/-/w3c-keyname-2.2.8.tgz", + "integrity": "sha512-dpojBhNsCNN7T82Tm7k26A6G9ML3NkhDsnw9n/eoxSRlVBB4CEtIQ/KTCLI2Fwf3ataSXRhYFkQi3SlnFwPvPQ==", + "license": "MIT" + }, "node_modules/w3c-xmlserializer": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/w3c-xmlserializer/-/w3c-xmlserializer-5.0.0.tgz", @@ -17388,6 +20401,25 @@ "node": ">=18" } }, + "node_modules/wasm-pandoc": { + "version": "0.8.0", + "resolved": "https://registry.npmjs.org/wasm-pandoc/-/wasm-pandoc-0.8.0.tgz", + "integrity": "sha512-vpUEQH3ezOoGOuxGWbDLombfksrsBDTyV7aadQ2N/zVY9gtmDIS69xBEk3tRil9Qj2INQ7Xc11CZEqChLI5h8A==", + "license": "MIT", + "dependencies": { + "@bjorn3/browser_wasi_shim": "^0.4.1" + } + }, + "node_modules/web-namespaces": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/web-namespaces/-/web-namespaces-2.0.1.tgz", + "integrity": "sha512-bKr1DkiNa2krS7qxNtdrtHAmzuYGFQLiQ13TsorsdT6ULTkPLKuu5+GsFpDlg6JFjUTwX2DyhMPG2be8uPrqsQ==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/web-vitals": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/web-vitals/-/web-vitals-5.1.0.tgz", @@ -17533,7 +20565,6 @@ "version": "1.1.19", "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.19.tgz", "integrity": "sha512-rEvr90Bck4WZt9HHFC4DJMsjvu7x+r6bImz0/BrbWb7A2djJ8hnZMrWnHo9F8ssv0OMErasDhftrfROTyqSDrw==", - "dev": true, "license": "MIT", "dependencies": { "available-typed-arrays": "^1.0.7", @@ -18153,13 +21184,13 @@ "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", - "dev": true, "license": "ISC" }, "node_modules/ws": { "version": "8.18.3", "resolved": "https://registry.npmjs.org/ws/-/ws-8.18.3.tgz", "integrity": "sha512-PEIGCY5tSlUt50cqyMXfCzX+oOPqN0vuGqWzbcJ2xvnkzkq46oOpz7dQaTDBdfICb4N14+GARUDw2XV2N4tvzg==", + "dev": true, "license": "MIT", "engines": { "node": ">=10.0.0" @@ -18235,6 +21266,24 @@ "node": ">=0.4" } }, + "node_modules/y-codemirror.next": { + "version": "0.3.5", + "resolved": "https://registry.npmjs.org/y-codemirror.next/-/y-codemirror.next-0.3.5.tgz", + "integrity": "sha512-VluNu3e5HfEXybnypnsGwKAj+fKLd4iAnR7JuX1Sfyydmn1jCBS5wwEL/uS04Ch2ib0DnMAOF6ZRR/8kK3wyGw==", + "license": "MIT", + "dependencies": { + "lib0": "^0.2.42" + }, + "funding": { + "type": "GitHub Sponsors ❤", + "url": "https://github.com/sponsors/dmonad" + }, + "peerDependencies": { + "@codemirror/state": "^6.0.0", + "@codemirror/view": "^6.0.0", + "yjs": "^13.5.6" + } + }, "node_modules/y-indexeddb": { "version": "9.0.12", "resolved": "https://registry.npmjs.org/y-indexeddb/-/y-indexeddb-9.0.12.tgz", @@ -18273,23 +21322,6 @@ "yjs": "^13.0.0" } }, - "node_modules/y-monaco": { - "version": "0.1.6", - "resolved": "https://registry.npmjs.org/y-monaco/-/y-monaco-0.1.6.tgz", - "integrity": "sha512-sYRywMmcylt+Nupl+11AvizD2am06ST8lkVbUXuaEmrtV6Tf+TD4rsEm6u9YGGowYue+Vfg1IJ97SUP2J+PVXg==", - "license": "MIT", - "dependencies": { - "lib0": "^0.2.43" - }, - "engines": { - "node": ">=12.0.0", - "npm": ">=6.0.0" - }, - "peerDependencies": { - "monaco-editor": ">=0.20.0", - "yjs": "^13.3.1" - } - }, "node_modules/y-protocols": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/y-protocols/-/y-protocols-1.0.6.tgz", @@ -18506,31 +21538,6 @@ "node": ">= 14" } }, - "node_modules/zip-stream/node_modules/buffer": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz", - "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT", - "dependencies": { - "base64-js": "^1.3.1", - "ieee754": "^1.2.1" - } - }, "node_modules/zip-stream/node_modules/readable-stream": { "version": "4.7.0", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-4.7.0.tgz", @@ -18556,6 +21563,22 @@ "funding": { "url": "https://github.com/sponsors/colinhacks" } + }, + "node_modules/zwitch": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/zwitch/-/zwitch-2.0.4.tgz", + "integrity": "sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/zxcvbn": { + "version": "4.4.2", + "resolved": "https://registry.npmjs.org/zxcvbn/-/zxcvbn-4.4.2.tgz", + "integrity": "sha512-Bq0B+ixT/DMyG8kgX2xWcI5jUvCwqrMxSFam7m0lAf78nf04hv6lNCsyLYdyYTrCVMqNDY/206K7eExYCeSyUQ==", + "license": "MIT" } } } diff --git a/app/package.json b/app/package.json index 755cbc19..b76a1124 100644 --- a/app/package.json +++ b/app/package.json @@ -15,7 +15,19 @@ "gen:api": "npm run gen:openapi && npm run gen:client" }, "dependencies": { - "@monaco-editor/react": "^4.7.0", + "@codemirror/autocomplete": "^6.20.0", + "@codemirror/commands": "^6.10.1", + "@codemirror/lang-markdown": "^6.5.0", + "@codemirror/language": "^6.12.1", + "@codemirror/language-data": "^6.5.2", + "@codemirror/merge": "^6.11.2", + "@codemirror/search": "^6.5.11", + "@codemirror/state": "^6.5.3", + "@codemirror/view": "^6.39.9", + "@extism/extism": "2.0.0-rc13", + "@isomorphic-git/lightning-fs": "^4.6.2", + "@lezer/markdown": "^1.6.3", + "@noble/curves": "^2.0.1", "@radix-ui/react-alert-dialog": "^1.1.15", "@radix-ui/react-avatar": "^1.1.10", "@radix-ui/react-collapsible": "^1.1.12", @@ -29,40 +41,57 @@ "@radix-ui/react-switch": "^1.2.6", "@radix-ui/react-tabs": "^1.1.13", "@radix-ui/react-tooltip": "^1.2.8", + "@replit/codemirror-vim": "^6.3.0", "@resvg/resvg-js": "^2.6.2", - "@tailwindcss/vite": "^4.0.6", - "@tanstack/react-devtools": "^0.7.0", "@tanstack/react-query": "^5.51.21", "@tanstack/react-router": "^1.132.0", - "@tanstack/react-router-devtools": "^1.132.0", "@tanstack/react-router-ssr-query": "^1.131.7", "@tanstack/react-start": "^1.132.0", "@tanstack/react-virtual": "^3.10.8", - "@tanstack/router-plugin": "^1.132.0", + "@types/zxcvbn": "^4.4.5", + "bip39": "^3.1.0", + "buffer": "^6.0.3", + "canonicalize": "^2.1.0", "class-variance-authority": "^0.7.1", "clsx": "^2.1.1", "cmdk": "^1.1.1", + "hash-wasm": "^4.12.0", "is-hotkey": "^0.2.0", + "isomorphic-git": "^1.36.1", + "jszip": "^3.10.1", + "katex": "^0.16.27", + "libsodium-wrappers": "^0.8.0", "lucide-react": "^0.544.0", - "monaco-editor": "^0.52.2", - "monaco-markdown": "^0.0.12", - "monaco-vim": "^0.4.2", + "minisearch": "^7.2.0", "morphdom": "^2.7.7", "react": "^19.0.0", "react-dom": "^19.0.0", "react-mosaic-component": "^6.1.1", "react-resizable-panels": "^3.0.6", + "rehype-katex": "^7.0.1", + "rehype-sanitize": "^6.0.0", + "rehype-stringify": "^10.0.1", + "remark-breaks": "^4.0.0", + "remark-gfm": "^4.0.1", + "remark-math": "^6.0.0", + "remark-parse": "^11.0.0", + "remark-rehype": "^11.1.2", "satori": "^0.18.3", + "shiki": "^3.21.0", "sonner": "^2.0.7", "state-local": "^1.0.7", "tailwind-merge": "^3.0.2", "tailwindcss": "^4.0.6", + "tiny-segmenter": "^0.2.0", "tw-animate-css": "^1.3.6", - "vite-tsconfig-paths": "^5.1.4", + "unified": "^11.0.5", + "unist-util-visit": "^5.0.0", + "wasm-pandoc": "^0.8.0", + "y-codemirror.next": "^0.3.5", "y-indexeddb": "^9.0.12", - "y-monaco": "^0.1.6", "y-websocket": "^1.5.4", - "yjs": "^13.6.27" + "yjs": "^13.6.27", + "zxcvbn": "^4.4.2" }, "overrides": { "react-dnd-multi-backend": "^9.0.0", @@ -71,11 +100,19 @@ }, "devDependencies": { "@hey-api/openapi-ts": "^0.86.10", + "@tailwindcss/vite": "^4.0.6", "@tanstack/nitro-v2-vite-plugin": "^1.132.31", + "@tanstack/react-devtools": "^0.7.0", + "@tanstack/react-router-devtools": "^1.132.0", + "@tanstack/router-plugin": "^1.132.0", "@tanstack/virtual-file-routes": "^1.132.31", "@testing-library/dom": "^10.4.0", "@testing-library/react": "^16.2.0", + "@types/hast": "^3.0.4", "@types/is-hotkey": "^0.1.10", + "@types/katex": "^0.16.8", + "@types/libsodium-wrappers": "^0.7.14", + "@types/mdast": "^4.0.4", "@types/node": "^22.18.1", "@types/react": "^19.0.8", "@types/react-dom": "^19.0.3", @@ -91,6 +128,9 @@ "typescript": "^5.7.2", "vite": "^7.1.7", "vite-plugin-pwa": "^1.1.0", + "vite-plugin-top-level-await": "^1.6.0", + "vite-plugin-wasm": "^3.5.0", + "vite-tsconfig-paths": "^5.1.4", "vitest": "^3.0.5", "web-vitals": "^5.1.0" } diff --git a/app/src/entities/document/api/index.ts b/app/src/entities/document/api/index.ts index 7894aac5..4695145a 100644 --- a/app/src/entities/document/api/index.ts +++ b/app/src/entities/document/api/index.ts @@ -4,9 +4,6 @@ import { archiveDocument as apiArchiveDocument, createDocument as apiCreateDocument, deleteDocument as apiDeleteDocument, - downloadDocument as apiDownloadDocument, - downloadDocumentSnapshot as apiDownloadDocumentSnapshot, - downloadWorkspaceArchive as apiDownloadWorkspaceArchive, duplicateDocument as apiDuplicateDocument, getBacklinks as apiGetBacklinks, getDocument as apiGetDocument, @@ -30,12 +27,10 @@ import type { SnapshotDiffResponse, SnapshotRestoreResponse, SnapshotSummary, - DownloadFormat, } from '@/shared/api' -import { ApiError } from '@/shared/api/client/core/ApiError' + type DocumentListParams = { - query?: string tag?: string state?: 'active' | 'archived' | 'all' workspaceId?: string | null @@ -49,7 +44,6 @@ export const documentKeys = { 'list', params?.workspaceId ?? 'current', params?.state ?? 'active', - params?.query ?? null, params?.tag ?? null, ] as const, byId: (id: string) => ['documents', id] as const, @@ -70,7 +64,6 @@ export const listDocumentsQuery = (params?: DocumentListParams) => { queryKey: documentKeys.list({ ...params, state }), queryFn: () => apiListDocuments({ - query: params?.query ?? null, tag: params?.tag ?? null, state, }) as Promise, @@ -147,41 +140,26 @@ export async function triggerSnapshotRestore(params: { return response.snapshot } -export async function downloadSnapshot(params: { +// Note: Snapshot download is temporarily unavailable. +// Server-side export has been removed for E2EE compliance. +// Client-side snapshot export will be implemented in a future update. +export async function downloadSnapshot(_params: { documentId: string snapshotId: string token?: string | null filename?: string -}) { - const blob = (await apiDownloadDocumentSnapshot({ - id: params.documentId, - snapshotId: params.snapshotId, - token: params.token ?? null, - })) as Blob - const name = params.filename ?? `snapshot-${params.snapshotId}.zip` - const url = URL.createObjectURL(blob) - try { - const link = document.createElement('a') - link.href = url - link.download = name - link.style.display = 'none' - document.body.appendChild(link) - link.click() - document.body.removeChild(link) - } finally { - URL.revokeObjectURL(url) - } - return name +}): Promise { + throw new Error('Snapshot download is not yet available. This feature is being migrated to client-side export for E2EE compliance.') } export function useCreateDocument() { const qc = useQueryClient() return useMutation({ - mutationFn: (input: { title?: string; parent_id?: string | null; type?: 'folder' | 'document' }) => + mutationFn: (input: { title?: string; parentId?: string | null; type?: 'folder' | 'document' }) => apiCreateDocument({ requestBody: { title: input.title ?? 'Untitled', - parent_id: input.parent_id ?? null, + parentId: input.parentId ?? null, type: input.type, }, }), @@ -245,9 +223,8 @@ export async function fetchDocumentContent(id: string) { return apiGetDocumentContent({ id }) } -export async function listDocuments(params?: { query?: string | null; tag?: string | null; state?: 'active' | 'archived' | 'all' }) { +export async function listDocuments(params?: { tag?: string | null; state?: 'active' | 'archived' | 'all' }) { return apiListDocuments({ - query: params?.query ?? null, tag: params?.tag ?? null, state: params?.state ?? 'active', }) @@ -288,436 +265,3 @@ export async function updateDocumentContent(params: { id: string; content: strin export async function deleteDocument(id: string) { return apiDeleteDocument({ id }) } - -export type DocumentDownloadFormat = DownloadFormat - -export type DocumentDownloadFormatCategory = 'primary' | 'other' - -export type DocumentDownloadFormatMetadata = { - label: string - description: string - extension: string - category: DocumentDownloadFormatCategory - group?: string -} - -export const DOWNLOAD_FORMAT_METADATA: Record = { - archive: { - label: 'ZIP archive', - description: 'Markdown with all attachments bundled', - extension: 'zip', - category: 'primary', - }, - markdown: { - label: 'Markdown (.md)', - description: 'Plain markdown document only', - extension: 'md', - category: 'primary', - }, - html: { - label: 'HTML (.html)', - description: 'Self-contained HTML page', - extension: 'html', - category: 'primary', - }, - html5: { - label: 'HTML5 (.html)', - description: 'HTML5 output; self-contained page', - extension: 'html', - category: 'other', - group: 'Web & Slides', - }, - pdf: { - label: 'PDF (.pdf)', - description: 'Portable Document Format export', - extension: 'pdf', - category: 'primary', - }, - docx: { - label: 'Word (.docx)', - description: 'Microsoft Word compatible document', - extension: 'docx', - category: 'primary', - }, - latex: { - label: 'LaTeX (.tex)', - description: 'LaTeX document source', - extension: 'tex', - category: 'other', - group: 'TeX & Academic', - }, - beamer: { - label: 'Beamer slides (.tex)', - description: 'LaTeX Beamer slide deck', - extension: 'tex', - category: 'other', - group: 'TeX & Academic', - }, - context: { - label: 'ConTeXt (.tex)', - description: 'ConTeXt document source', - extension: 'tex', - category: 'other', - group: 'TeX & Academic', - }, - man: { - label: 'Man page (.man)', - description: 'Groff man page source', - extension: 'man', - category: 'other', - group: 'Manuals', - }, - mediawiki: { - label: 'MediaWiki (.mediawiki)', - description: 'MediaWiki markup', - extension: 'mediawiki', - category: 'other', - group: 'Wiki & Markup', - }, - dokuwiki: { - label: 'DokuWiki (.txt)', - description: 'DokuWiki markup', - extension: 'txt', - category: 'other', - group: 'Wiki & Markup', - }, - textile: { - label: 'Textile (.textile)', - description: 'Textile markup', - extension: 'textile', - category: 'other', - group: 'Wiki & Markup', - }, - org: { - label: 'Org-mode (.org)', - description: 'Emacs Org-mode document', - extension: 'org', - category: 'other', - group: 'Wiki & Markup', - }, - texinfo: { - label: 'Texinfo (.texi)', - description: 'GNU Texinfo document', - extension: 'texi', - category: 'other', - group: 'Wiki & Markup', - }, - opml: { - label: 'OPML (.opml)', - description: 'Outline Processor Markup Language document', - extension: 'opml', - category: 'other', - group: 'Data & Interchange', - }, - docbook: { - label: 'DocBook XML (.xml)', - description: 'DocBook XML document', - extension: 'xml', - category: 'other', - group: 'Data & Interchange', - }, - opendocument: { - label: 'OpenDocument Flat XML (.fodt)', - description: 'Flat OpenDocument Text document', - extension: 'fodt', - category: 'other', - group: 'Office & Rich Text', - }, - odt: { - label: 'ODT (.odt)', - description: 'OpenDocument Text document', - extension: 'odt', - category: 'other', - group: 'Office & Rich Text', - }, - rtf: { - label: 'RTF (.rtf)', - description: 'Rich Text Format document', - extension: 'rtf', - category: 'other', - group: 'Office & Rich Text', - }, - epub: { - label: 'EPUB 2 (.epub)', - description: 'EPUB eBook (v2)', - extension: 'epub', - category: 'other', - group: 'E-books', - }, - epub3: { - label: 'EPUB 3 (.epub)', - description: 'EPUB eBook (v3)', - extension: 'epub', - category: 'other', - group: 'E-books', - }, - fb2: { - label: 'FictionBook (.fb2)', - description: 'FictionBook eBook', - extension: 'fb2', - category: 'other', - group: 'E-books', - }, - asciidoc: { - label: 'AsciiDoc (.adoc)', - description: 'AsciiDoc markup', - extension: 'adoc', - category: 'other', - group: 'Wiki & Markup', - }, - icml: { - label: 'ICML (.icml)', - description: 'Adobe InCopy ICML document', - extension: 'icml', - category: 'other', - group: 'Office & Rich Text', - }, - slidy: { - label: 'Slidy (.html)', - description: 'Slidy HTML presentation', - extension: 'html', - category: 'other', - group: 'Web & Slides', - }, - slideous: { - label: 'Slideous (.html)', - description: 'Slideous HTML presentation', - extension: 'html', - category: 'other', - group: 'Web & Slides', - }, - dzslides: { - label: 'DZSlides (.html)', - description: 'DZSlides HTML presentation', - extension: 'html', - category: 'other', - group: 'Web & Slides', - }, - revealjs: { - label: 'reveal.js (.html)', - description: 'reveal.js HTML presentation', - extension: 'html', - category: 'other', - group: 'Web & Slides', - }, - s5: { - label: 'S5 (.html)', - description: 'S5 HTML presentation', - extension: 'html', - category: 'other', - group: 'Web & Slides', - }, - json: { - label: 'Pandoc JSON (.json)', - description: 'Pandoc JSON abstract syntax tree', - extension: 'json', - category: 'other', - group: 'Data & Interchange', - }, - plain: { - label: 'Plain text (.txt)', - description: 'Plain UTF-8 text output', - extension: 'txt', - category: 'other', - group: 'Wiki & Markup', - }, - commonmark: { - label: 'CommonMark (.md)', - description: 'CommonMark markdown', - extension: 'md', - category: 'other', - group: 'Wiki & Markup', - }, - commonmark_x: { - label: 'CommonMark+Extensions (.md)', - description: 'CommonMark with extensions', - extension: 'md', - category: 'other', - group: 'Wiki & Markup', - }, - markdown_strict: { - label: 'Markdown (strict) (.md)', - description: 'Original markdown syntax', - extension: 'md', - category: 'other', - group: 'Wiki & Markup', - }, - markdown_phpextra: { - label: 'Markdown (PHP Extra) (.md)', - description: 'Markdown PHP Extra dialect', - extension: 'md', - category: 'other', - group: 'Wiki & Markup', - }, - markdown_github: { - label: 'GitHub Markdown (.md)', - description: 'GitHub-flavoured markdown', - extension: 'md', - category: 'other', - group: 'Wiki & Markup', - }, - rst: { - label: 'reStructuredText (.rst)', - description: 'reStructuredText document', - extension: 'rst', - category: 'other', - group: 'Wiki & Markup', - }, - native: { - label: 'Pandoc native (.hs)', - description: 'Pandoc native Haskell AST', - extension: 'hs', - category: 'other', - group: 'Data & Interchange', - }, - haddock: { - label: 'Haddock (.txt)', - description: 'Haddock markup (Haskell docs)', - extension: 'txt', - category: 'other', - group: 'Wiki & Markup', - }, -} as const - -export async function downloadDocumentFile( - id: string, - options?: { token?: string; title?: string; format?: DocumentDownloadFormat }, -) { - const format: DocumentDownloadFormat = options?.format ?? 'archive' - let payload: unknown - try { - payload = await apiDownloadDocument({ id, token: options?.token ?? null, format }) - } catch (error) { - if (error instanceof ApiError) { - const body = error.body as { message?: unknown } | undefined - if (body && typeof body === 'object' && 'message' in body) { - const messageValue = (body as { message?: unknown }).message - if (typeof messageValue === 'string') { - throw new Error(messageValue) - } - } - } - throw error - } - const mimeType = resolveMimeType(format) - const blob = - payload instanceof Blob - ? payload - : typeof payload === 'string' - ? new Blob([payload], { type: mimeType }) - : payload && typeof payload === 'object' - ? new Blob([JSON.stringify(payload, null, 2)], { - type: 'application/json; charset=utf-8', - }) - : undefined - if (!(blob instanceof Blob)) { - throw new Error('Unexpected download payload') - } - const extension = resolveExtension(format) - const filename = `${sanitizeExportName(options?.title)}.${extension}` - const blobUrl = URL.createObjectURL(blob) - try { - const link = document.createElement('a') - link.href = blobUrl - link.download = filename - link.style.display = 'none' - document.body.appendChild(link) - link.click() - document.body.removeChild(link) - } finally { - URL.revokeObjectURL(blobUrl) - } - return filename -} - -export async function downloadWorkspaceArchive(params: { - workspaceId: string - workspaceName: string - format?: DocumentDownloadFormat -}) { - const format: DocumentDownloadFormat = params.format ?? 'archive' - let payload: unknown - try { - payload = await apiDownloadWorkspaceArchive({ id: params.workspaceId, format }) - } catch (error) { - if (error instanceof ApiError) { - const body = error.body as { message?: unknown } | undefined - if (body && typeof body === 'object' && 'message' in body) { - const messageValue = (body as { message?: unknown }).message - if (typeof messageValue === 'string') { - throw new Error(messageValue) - } - } - } - throw error - } - const mimeType = resolveMimeType(format) - const blob = - payload instanceof Blob - ? payload - : typeof payload === 'string' - ? new Blob([payload], { type: mimeType }) - : payload && typeof payload === 'object' - ? new Blob([JSON.stringify(payload, null, 2)], { - type: 'application/json; charset=utf-8', - }) - : undefined - if (!(blob instanceof Blob)) { - throw new Error('Unexpected download payload') - } - const extension = resolveExtension(format) - const filename = `${sanitizeExportName(params.workspaceName)}.${extension}` - const blobUrl = URL.createObjectURL(blob) - try { - const link = document.createElement('a') - link.href = blobUrl - link.download = filename - link.style.display = 'none' - document.body.appendChild(link) - link.click() - document.body.removeChild(link) - } finally { - URL.revokeObjectURL(blobUrl) - } - return filename -} - -function resolveExtension(format: DocumentDownloadFormat): string { - return DOWNLOAD_FORMAT_METADATA[format]?.extension ?? format -} - -function resolveMimeType(format: DocumentDownloadFormat): string { - const extension = resolveExtension(format).toLowerCase() - switch (extension) { - case 'json': - return 'application/json; charset=utf-8' - case 'xml': - case 'opml': - case 'fb2': - return 'application/xml; charset=utf-8' - case 'fodt': - return 'application/vnd.oasis.opendocument.text' - case 'html': - return 'text/html; charset=utf-8' - case 'md': - return 'text/markdown; charset=utf-8' - case 'tex': - return 'application/x-tex; charset=utf-8' - default: - return 'text/plain; charset=utf-8' - } -} - -function sanitizeExportName(input?: string) { - const invalid = new Set(['/','\\',':','*','?','"','<','>','|','\0']) - let base = (input ?? '').trim() - if (!base) base = 'document' - let sanitized = '' - for (const ch of base) { - sanitized += invalid.has(ch) ? '-' : ch - } - sanitized = sanitized.replace(/ /g, '_') - if (sanitized.length > 100) sanitized = sanitized.slice(0, 100) - if (!sanitized) sanitized = 'document' - return sanitized -} diff --git a/app/src/entities/document/wc/attachments/attachment.ts b/app/src/entities/document/wc/attachments/attachment.ts index 323b1711..3b88e0bd 100644 --- a/app/src/entities/document/wc/attachments/attachment.ts +++ b/app/src/entities/document/wc/attachments/attachment.ts @@ -1,3 +1,10 @@ +/** + * RefMD Attachment Web Component + * + * Displays file attachments with download capability and preview support. + * Integrates with E2EE for encrypted file decryption. + */ + function escapeHtml(value: string): string { return value .replace(/&/g, '&') @@ -11,7 +18,10 @@ function extFromUrl(url: string): string { try { const path = url.split('?')[0] const segs = path.split('/') - return decodeURIComponent(segs[segs.length - 1] || '').split('.').pop()?.toLowerCase() || '' + // Remove .rme extension if present + let name = decodeURIComponent(segs[segs.length - 1] || '') + if (name.endsWith('.rme')) name = name.slice(0, -4) + return name.split('.').pop()?.toLowerCase() || '' } catch { return '' } @@ -21,7 +31,10 @@ function fileName(url: string): string { try { const path = url.split('?')[0] const segs = path.split('/') - return decodeURIComponent(segs[segs.length - 1] || '') + let name = decodeURIComponent(segs[segs.length - 1] || '') + // Remove .rme extension for display + if (name.endsWith('.rme')) name = name.slice(0, -4) + return name } catch { return url } @@ -43,6 +56,21 @@ function downloadSvg(): string { return '' } +function loadingSvg(): string { + return '' +} + +/** + * Get decryption bridge from global + */ +function getDecryptionBridge(): { + resolveAndDecrypt: (logicalPath: string, documentId: string) => Promise<{ blobUrl: string; filename: string; mimeType: string } | null> + revokeBlobUrl: (url: string) => void +} | null { + if (typeof window === 'undefined') return null + return (window as any).__refmd_file_decryption__ ?? null +} + const canUseCustomElements = typeof globalThis !== 'undefined' && typeof (globalThis as any).HTMLElement !== 'undefined' && @@ -51,22 +79,126 @@ const canUseCustomElements = if (canUseCustomElements) { class RefmdAttachment extends (globalThis as any).HTMLElement { private previewOpen = false + private decryptedBlobUrl: string | null = null + private decryptedFilename: string | null = null + private isDecrypting = false + private decryptError: string | null = null connectedCallback() { if (!this.dataset.label) this.dataset.label = (this.getAttribute('label') || '').trim() this.render() } - static get observedAttributes() { return ['href','label'] } + disconnectedCallback() { + // Cleanup blob URL when element is removed + if (this.decryptedBlobUrl) { + const bridge = getDecryptionBridge() + bridge?.revokeBlobUrl(this.decryptedBlobUrl) + this.decryptedBlobUrl = null + } + } + + static get observedAttributes() { return ['href','label','data-document-id'] } attributeChangedCallback() { this.render() } + private async handleDownload(e: MouseEvent, href: string, label: string) { + e.preventDefault() + e.stopPropagation() + + // If already decrypted, use the cached blob + if (this.decryptedBlobUrl) { + this.triggerDownload(this.decryptedBlobUrl, this.decryptedFilename || label) + return + } + + const documentId = this.getAttribute('data-document-id') + const bridge = getDecryptionBridge() + if (!bridge || !documentId) { + this.decryptError = 'Decryption not available' + this.render() + return + } + + // Show loading state + this.isDecrypting = true + this.decryptError = null + this.render() + + try { + const result = await bridge.resolveAndDecrypt(href, documentId) + if (result) { + this.decryptedBlobUrl = result.blobUrl + this.decryptedFilename = result.filename + this.triggerDownload(result.blobUrl, result.filename) + } else { + throw new Error('Decryption failed') + } + } catch (err) { + console.error('[E2EE] Download failed:', err) + this.decryptError = 'Download failed' + } finally { + this.isDecrypting = false + this.render() + } + } + + private triggerDownload(url: string, filename: string) { + const a = document.createElement('a') + a.href = url + a.download = filename + document.body.appendChild(a) + a.click() + document.body.removeChild(a) + } + + private async handlePreviewOpen(href: string) { + // If already decrypted, just toggle + if (this.decryptedBlobUrl) { + this.previewOpen = !this.previewOpen + this.render() + return + } + + const documentId = this.getAttribute('data-document-id') + const bridge = getDecryptionBridge() + if (!bridge || !documentId) { + this.decryptError = 'Decryption not available' + this.previewOpen = true + this.render() + return + } + + // Decrypt for preview + this.isDecrypting = true + this.decryptError = null + this.previewOpen = true + this.render() + + try { + const result = await bridge.resolveAndDecrypt(href, documentId) + if (result) { + this.decryptedBlobUrl = result.blobUrl + this.decryptedFilename = result.filename + } else { + throw new Error('Decryption failed') + } + } catch (err) { + console.error('[E2EE] Preview decryption failed:', err) + this.decryptError = 'Decryption failed' + } finally { + this.isDecrypting = false + this.render() + } + } + render() { const href = this.getAttribute('href') || '#' const labelAttr = (this.getAttribute('label') || this.dataset.label || '').trim() - const label = labelAttr || fileName(href) + const displayFilename = this.decryptedFilename || labelAttr || fileName(href) + const label = displayFilename const ext = extFromUrl(label) const isFile = href.includes('/api/uploads/') || href.startsWith('./attachments/') || href.startsWith('./') @@ -80,7 +212,8 @@ if (canUseCustomElements) { const previewable = ['mp3','wav','flac','aac','ogg','wma','mp4','avi','mov','wmv','flv','webm','mkv','pdf'].includes(ext) if (!previewable) this.previewOpen = false - const preview = this.previewOpen && previewable ? this.previewContent(ext, href) : '' + const downloadIcon = this.isDecrypting ? loadingSvg() : downloadSvg() + const preview = this.previewOpen && previewable ? this.previewContent(ext) : '' this.innerHTML = `

@@ -88,33 +221,52 @@ if (canUseCustomElements) { ${icon} ${escapeHtml(label)} ${badge} - - ${downloadSvg()} - + ${preview}
` - const download = this.querySelector('[data-refmd-attachment-download]') as HTMLAnchorElement | null - download?.addEventListener('click', (e) => e.stopPropagation()) + const download = this.querySelector('[data-refmd-attachment-download]') as HTMLButtonElement | null + download?.addEventListener('click', (e) => this.handleDownload(e, href, label)) + if (!previewable) return const card = this.querySelector('[data-refmd-attachment-card]') as HTMLElement | null - card?.addEventListener('click', () => { - this.previewOpen = !this.previewOpen - this.render() + card?.addEventListener('click', (e) => { + // Don't toggle preview if clicking download button + const target = e.target as HTMLElement + if (target.closest('[data-refmd-attachment-download]')) return + this.handlePreviewOpen(href) }) } - private previewContent(ext: string, href: string): string { + private previewContent(ext: string): string { + if (this.isDecrypting) { + return `
+ + Decrypting... +
` + } + + if (this.decryptError || !this.decryptedBlobUrl) { + return `
+ Failed to decrypt file. The file may be corrupted or you may not have access. +
` + } + + const src = this.decryptedBlobUrl + if (['mp3','wav','flac','aac','ogg','wma'].includes(ext)) { - return `
` + return `
` } if (['mp4','avi','mov','wmv','flv','webm','mkv'].includes(ext)) { - return `
` + return `
` } - return `
` + // PDF + return `
` } } diff --git a/app/src/entities/document/wc/attachments/upgrade.ts b/app/src/entities/document/wc/attachments/upgrade.ts index 69bc3b43..9df26c8e 100644 --- a/app/src/entities/document/wc/attachments/upgrade.ts +++ b/app/src/entities/document/wc/attachments/upgrade.ts @@ -1,4 +1,4 @@ -export function upgradeAttachments(root: Element) { +export function upgradeAttachments(root: Element, documentId?: string) { if (typeof document === 'undefined' || typeof customElements === 'undefined') return const anchors = Array.from(root.querySelectorAll('a.file-attachment, a[href^="/api/uploads/"], a[href^="./attachments/"], a[href^="attachments/"]')) as HTMLAnchorElement[] @@ -10,6 +10,7 @@ export function upgradeAttachments(root: Element) { el.setAttribute('href', href) const text = (a.textContent || '').trim() if (text && text !== href) el.setAttribute('label', text) + if (documentId) el.setAttribute('data-document-id', documentId) a.replaceWith(el) } } diff --git a/app/src/entities/document/wc/markdown/hydrate-all.ts b/app/src/entities/document/wc/markdown/hydrate-all.ts index 6fa553f6..7c162f67 100644 --- a/app/src/entities/document/wc/markdown/hydrate-all.ts +++ b/app/src/entities/document/wc/markdown/hydrate-all.ts @@ -5,8 +5,8 @@ import { upgradeCodeBlocks } from '../code/upgrade' import { upgradePluginHydrators } from '../placeholder/hydrate' import { upgradeWikiLinks } from '../wiki/upgrade' -export function upgradeAll(root: Element) { - upgradeAttachments(root) +export function upgradeAll(root: Element, documentId?: string) { + upgradeAttachments(root, documentId) upgradeWikiLinks(root) upgradePluginHydrators(root) return upgradeCodeBlocks(root) diff --git a/app/src/entities/file/api/index.ts b/app/src/entities/file/api/index.ts index 51bc8a41..88b8440f 100644 --- a/app/src/entities/file/api/index.ts +++ b/app/src/entities/file/api/index.ts @@ -1,11 +1,277 @@ -import { uploadFile as apiUploadFile } from '@/shared/api' +import { + uploadFile as apiUploadFile, + listFiles, + type ListFileResponse, +} from '@/shared/api' +import { encryptFile, decryptFile, isRmeFile, decryptMetadata } from '@/shared/lib/files' export const fileKeys = { all: ['files'] as const, } -export async function uploadAttachment(documentId: string, file: File) { - return apiUploadFile({ - formData: { file: file as any, document_id: documentId } as any, +export interface UploadAttachmentOptions { + /** Document encryption key */ + dek: Uint8Array + /** Existing logical paths for collision detection */ + existingPaths?: Set +} + +/** + * Upload an attachment with E2EE encryption + * + * @param documentId - Document ID + * @param file - File to upload + * @param options - Upload options including DEK + */ +export async function uploadAttachment( + documentId: string, + file: File, + options: UploadAttachmentOptions +) { + // 1. Read file content + const content = new Uint8Array(await file.arrayBuffer()) + + // 2. Use provided DEK + const { dek } = options + + // 3. Resolve logical path with collision detection + const logicalPath = options.existingPaths + ? resolveLogicalPath(file.name, options.existingPaths) + : `attachments/${file.name}` + + // 4. Encrypt file + const result = await encryptFile(content, dek, { + filename: file.name, + mimeType: file.type || 'application/octet-stream', + logicalPath, + }) + + // 5. Create .rme file blob + const rmeBlob = new Blob([result.rmeBytes as BlobPart], { + type: 'application/octet-stream', + }) + const rmeFile = new File([rmeBlob], `${file.name}.rme`, { + type: 'application/octet-stream', + }) + + // 6. Build metadata JSON for API + const metadata = JSON.stringify({ + encryptedMetadata: result.encryptedMetadata, + encryptedMetadataNonce: result.metadataNonce, + encryptedHash: result.encryptedHash, + }) + + // 7. Upload encrypted file + const uploadResult = await apiUploadFile({ + docId: documentId, + formData: { + file: rmeFile, + metadata, + }, }) + + // Return with logicalPath for caller to use + return { + ...uploadResult, + logicalPath, + originalFilename: file.name, + mimeType: file.type || 'application/octet-stream', + } +} + +export interface DownloadAttachmentOptions { + /** Document encryption key */ + dek: Uint8Array + /** Share token for authentication */ + token?: string +} + +export interface DownloadAttachmentResult { + /** Decrypted file content as Blob */ + blob: Blob + /** Original filename (from encrypted metadata) */ + filename: string + /** MIME type (from encrypted metadata) */ + mimeType: string +} + +/** + * Download and decrypt an attachment + * + * @param _documentId - Document ID (unused, kept for backward compatibility) + * @param url - Full URL to the attachment + * @param options - Download options including DEK + * @returns Decrypted file content and metadata + */ +export async function downloadAttachment( + _documentId: string, + url: string, + options: DownloadAttachmentOptions +): Promise { + // 1. Fetch the file + const fetchUrl = options.token + ? url.includes('?') + ? `${url}&token=${encodeURIComponent(options.token)}` + : `${url}?token=${encodeURIComponent(options.token)}` + : url + + const response = await fetch(fetchUrl, { + credentials: 'include', + }) + if (!response.ok) { + throw new Error(`Failed to download file: ${response.status}`) + } + + const bytes = new Uint8Array(await response.arrayBuffer()) + + // 2. Check if file is encrypted + if (!isRmeFile(bytes)) { + // Not encrypted, return as-is + const contentType = + response.headers.get('content-type') || 'application/octet-stream' + const filename = extractFilenameFromUrl(url) + return { + blob: new Blob([bytes], { type: contentType }), + filename, + mimeType: contentType, + } + } + + // 3. Decrypt using provided DEK + const decrypted = await decryptFile(bytes, options.dek) + + // 4. Return decrypted content + return { + blob: new Blob([decrypted.content as BlobPart], { type: decrypted.metadata.mimeType }), + filename: decrypted.metadata.filename, + mimeType: decrypted.metadata.mimeType, + } +} + +/** + * Extract document ID from attachment URL + * @param url - URL like /api/uploads/{docId}/attachments/xxx + */ +export function extractDocumentIdFromUrl(url: string): string | null { + const match = url.match(/\/api\/uploads\/([^/]+)\//) + return match?.[1] ?? null +} + +/** + * Extract filename from URL + */ +function extractFilenameFromUrl(url: string): string { + try { + const path = url.split('?')[0] + const segments = path.split('/') + const filename = segments[segments.length - 1] || 'download' + return decodeURIComponent(filename) + } catch { + return 'download' + } +} + +// Re-export for use in file map +export type { ListFileResponse } + +/** File map entry with decrypted metadata */ +export interface FileMapEntry { + fileId: string + logicalPath: string + filename: string + mimeType: string +} + +/** File map: logicalPath → FileMapEntry */ +export type FileMap = Map + +/** + * List files for a document (API call only) + * @param documentId - Document ID + * @param token - Optional share token for authentication + */ +export async function listDocumentFiles(documentId: string, token?: string): Promise { + const response = await listFiles({ docId: documentId, token }) + return response +} + +/** + * Build a file map for a document by fetching and decrypting file metadata. + * + * @param documentId - Document ID + * @param dek - Document encryption key + * @param token - Optional share token for authentication + * @returns FileMap with logicalPath → FileMapEntry mapping + */ +export async function buildFileMap( + documentId: string, + dek: Uint8Array, + token?: string +): Promise { + // 1. Fetch file list + const files = await listDocumentFiles(documentId, token) + + // 2. Build map by decrypting each file's metadata using provided DEK + const map: FileMap = new Map() + + for (const file of files) { + if (!file.encryptedMetadata || !file.encryptedMetadataNonce) { + // Legacy file without E2EE metadata - skip + continue + } + + try { + // Decode base64 + const metadataBytes = Uint8Array.from(atob(file.encryptedMetadata), (c) => + c.charCodeAt(0) + ) + const nonceBytes = Uint8Array.from(atob(file.encryptedMetadataNonce), (c) => + c.charCodeAt(0) + ) + + // Decrypt metadata + const metadata = await decryptMetadata(metadataBytes, nonceBytes, dek) + + map.set(metadata.logicalPath, { + fileId: file.id, + logicalPath: metadata.logicalPath, + filename: metadata.filename, + mimeType: metadata.mimeType, + }) + } catch (error) { + console.warn('[FileMap] Failed to decrypt metadata for file:', file.id, error) + } + } + + return map +} + +/** + * Resolve a logical path with collision detection. + * If a file with the same name already exists, appends a suffix. + * + * @param filename - Original filename + * @param existingPaths - Set of existing logical paths + * @returns Unique logical path + */ +export function resolveLogicalPath( + filename: string, + existingPaths: Set +): string { + const base = `attachments/${filename}` + if (!existingPaths.has(base)) { + return base + } + + const ext = filename.includes('.') ? filename.slice(filename.lastIndexOf('.')) : '' + const name = filename.includes('.') + ? filename.slice(0, filename.lastIndexOf('.')) + : filename + + let counter = 2 + while (existingPaths.has(`attachments/${name}-${counter}${ext}`)) { + counter++ + } + + return `attachments/${name}-${counter}${ext}` } diff --git a/app/src/entities/file/decryption-bridge.ts b/app/src/entities/file/decryption-bridge.ts new file mode 100644 index 00000000..67567581 --- /dev/null +++ b/app/src/entities/file/decryption-bridge.ts @@ -0,0 +1,372 @@ +/** + * Decryption Bridge for Web Components + * + * Provides a global interface for Web Components to request file decryption + * without direct access to React context. + */ + +import { API_BASE_URL } from '@/shared/lib/config' + +import { + downloadAttachment, + extractDocumentIdFromUrl, + buildFileMap, + type FileMap, + type FileMapEntry, +} from './api' + +/** Decryption context set by React app */ +interface DecryptionContext { + /** Document encryption key */ + dek: Uint8Array | null + token?: string +} + +const contextRegistry = new Map() +let defaultContext: DecryptionContext | null = null + +// File map registry: documentId → FileMap +const fileMapRegistry = new Map() +const fileMapInitPromises = new Map>() +// Store DEK used for each document's file map (for fallback context) +const fileMapDekRegistry = new Map() + +// Blob URL cache: "documentId:logicalPath" → { blobUrl, filename, mimeType } +// This ensures each unique path gets a consistent blob URL across renders +const blobUrlCache = new Map() +// In-flight decryption requests to prevent duplicate downloads +const pendingDecryptions = new Map>() + +/** + * Set decryption context for a specific document + */ +export function setDecryptionContext(documentId: string, context: DecryptionContext): void { + contextRegistry.set(documentId, context) +} + +/** + * Remove decryption context for a document + */ +export function clearDecryptionContext(documentId: string): void { + contextRegistry.delete(documentId) +} + +/** + * Set default decryption context (used when documentId is not in registry) + */ +export function setDefaultDecryptionContext(context: DecryptionContext | null): void { + defaultContext = context +} + +/** + * Get decryption context for a document + */ +export function getDecryptionContext(documentId: string): DecryptionContext | null { + return contextRegistry.get(documentId) ?? defaultContext +} + +/** + * Download and decrypt a file, returning a blob URL + * + * @param url - File URL + * @param documentIdHint - Optional documentId hint for URLs that don't include it + * @returns Object with blob URL, filename, and mimeType, or null if decryption fails + */ +export async function downloadAndDecrypt( + url: string, + documentIdHint?: string +): Promise<{ blobUrl: string; filename: string; mimeType: string } | null> { + // Try to extract documentId from URL, fall back to hint + let documentId = extractDocumentIdFromUrl(url) + if (!documentId && documentIdHint) { + documentId = documentIdHint + } + if (!documentId) { + console.warn('[Decrypt] Could not extract documentId from URL:', url) + return null + } + + const context = getDecryptionContext(documentId) ?? defaultContext + // Fallback to DEK stored during initFileMap if context is not available + // This handles race conditions during SPA navigation where cleanup runs after new context is set + const dek = context?.dek ?? fileMapDekRegistry.get(documentId) + if (!dek) { + console.warn('[Decrypt] No DEK available for document:', documentId) + return null + } + + try { + const result = await downloadAttachment(documentId, url, { + dek, + token: context?.token, + }) + + const blobUrl = URL.createObjectURL(result.blob) + return { + blobUrl, + filename: result.filename, + mimeType: result.mimeType, + } + } catch (error) { + console.error('[Decrypt] Failed to decrypt file:', error) + return null + } +} + +/** + * Resolve a logical path and decrypt the file + * + * This is the unified entry point for decrypting files from logical paths. + * It handles: file map lookup → API URL construction → download & decrypt + * + * @param logicalPath - Logical path (e.g., "./attachments/photo.png" or "attachments/photo.png") + * @param documentId - Document ID + * @returns Object with blob URL, filename, and mimeType, or null if failed + */ +export async function resolveAndDecrypt( + logicalPath: string, + documentId: string +): Promise<{ blobUrl: string; filename: string; mimeType: string } | null> { + // Normalize path (remove leading ./) + const normalizedPath = logicalPath.startsWith('./') ? logicalPath.slice(2) : logicalPath + const cacheKey = `${documentId}:${normalizedPath}` + + // Check cache first + const cached = blobUrlCache.get(cacheKey) + if (cached) { + return cached + } + + // Check if there's already a pending request for this path + const pending = pendingDecryptions.get(cacheKey) + if (pending) { + return pending + } + + // Create the decryption promise + const decryptionPromise = (async (): Promise<{ blobUrl: string; filename: string; mimeType: string } | null> => { + try { + // Wait for file map to be ready + let fileMap = await waitForFileMap(documentId) + + // If file map not initialized yet, try to initialize with default context + // This handles SPA navigation where the context is set but initFileMap hasn't been called yet + if (!fileMap) { + const context = getDecryptionContext(documentId) ?? defaultContext + if (context?.dek) { + fileMap = await initFileMap(documentId, context.dek) + } + } + + if (!fileMap) { + console.warn('[resolveAndDecrypt] No file map available for document:', documentId) + return null + } + + // Look up file entry directly from the returned map (not from registry again) + // This avoids race conditions where the registry might have changed + const fileEntry = fileMap.get(normalizedPath) + if (!fileEntry) { + return null + } + + // Build API URL and decrypt + const apiUrl = `${API_BASE_URL}/api/files/${fileEntry.fileId}` + const result = await downloadAndDecrypt(apiUrl, documentId) + + // Cache successful results + if (result) { + blobUrlCache.set(cacheKey, result) + } + + return result + } finally { + pendingDecryptions.delete(cacheKey) + } + })() + + pendingDecryptions.set(cacheKey, decryptionPromise) + return decryptionPromise +} + +/** + * Revoke a blob URL to free memory + */ +export function revokeBlobUrl(blobUrl: string): void { + try { + URL.revokeObjectURL(blobUrl) + } catch { + // Ignore errors + } +} + +/** + * Initialize file map for a document + * + * This fetches the file list and decrypts metadata to build a + * logicalPath → fileId mapping. + * + * @param documentId - Document ID + * @param dek - Document encryption key + * @param token - Optional share token for authentication + */ +export async function initFileMap(documentId: string, dek: Uint8Array, token?: string): Promise { + // Check if initialization is in progress + const inProgress = fileMapInitPromises.get(documentId) + if (inProgress) { + return inProgress + } + + // Check if already initialized (and no pending uploads) + const existing = fileMapRegistry.get(documentId) + if (existing && existing.size > 0) { + return existing + } + + // Store DEK for fallback context + fileMapDekRegistry.set(documentId, dek) + + // Start initialization + const initPromise = (async () => { + try { + const fileMap = await buildFileMap(documentId, dek, token) + + // Merge with any entries added while we were fetching + // (e.g., from concurrent uploads via addFileToMap) + const currentMap = fileMapRegistry.get(documentId) + if (currentMap) { + for (const [key, value] of currentMap) { + // Only add if not already in server response + if (!fileMap.has(key)) { + fileMap.set(key, value) + } + } + } + + fileMapRegistry.set(documentId, fileMap) + return fileMap + } finally { + fileMapInitPromises.delete(documentId) + } + })() + + fileMapInitPromises.set(documentId, initPromise) + return initPromise +} + +/** + * Get file map for a document (must be initialized first) + */ +export function getFileMap(documentId: string): FileMap | undefined { + return fileMapRegistry.get(documentId) +} + +/** + * Wait for file map initialization to complete + * Returns the file map if initialized, or waits for pending initialization + */ +export async function waitForFileMap(documentId: string): Promise { + const existing = fileMapRegistry.get(documentId) + if (existing) { + return existing + } + + const pending = fileMapInitPromises.get(documentId) + if (pending) { + return pending + } + + return undefined +} + +/** + * Clear file map for a document + */ +export function clearFileMap(documentId: string): void { + fileMapRegistry.delete(documentId) + fileMapInitPromises.delete(documentId) + fileMapDekRegistry.delete(documentId) + + // Also clear blob URL cache for this document + const prefix = `${documentId}:` + for (const [key, value] of blobUrlCache) { + if (key.startsWith(prefix)) { + try { + URL.revokeObjectURL(value.blobUrl) + } catch { + // Ignore errors + } + blobUrlCache.delete(key) + } + } + + // Clear any pending decryptions + for (const key of pendingDecryptions.keys()) { + if (key.startsWith(prefix)) { + pendingDecryptions.delete(key) + } + } +} + +/** + * Resolve a logical path to a file ID + * + * @param documentId - Document ID + * @param logicalPath - Logical path (e.g., "attachments/photo.png") + * @returns FileMapEntry if found + */ +export function resolveFileByPath( + documentId: string, + logicalPath: string +): FileMapEntry | undefined { + const fileMap = fileMapRegistry.get(documentId) + if (!fileMap) { + return undefined + } + return fileMap.get(logicalPath) +} + +/** + * Add a file to the map (used after upload) + */ +export function addFileToMap(documentId: string, entry: FileMapEntry): void { + let fileMap = fileMapRegistry.get(documentId) + if (!fileMap) { + fileMap = new Map() + fileMapRegistry.set(documentId, fileMap) + } + fileMap.set(entry.logicalPath, entry) +} + +/** + * Get existing logical paths for collision detection + * Waits for file map initialization if pending + */ +export async function getExistingPaths(documentId: string): Promise> { + const fileMap = await waitForFileMap(documentId) + if (!fileMap) { + return new Set() + } + return new Set(fileMap.keys()) +} + +// Expose to global for Web Components +if (typeof window !== 'undefined') { + ;(window as any).__refmd_file_decryption__ = { + downloadAndDecrypt, + resolveAndDecrypt, + revokeBlobUrl, + setDecryptionContext, + clearDecryptionContext, + setDefaultDecryptionContext, + getDecryptionContext, + // File map functions + initFileMap, + getFileMap, + waitForFileMap, + clearFileMap, + resolveFileByPath, + addFileToMap, + getExistingPaths, + } +} diff --git a/app/src/entities/file/index.ts b/app/src/entities/file/index.ts index 0bf83f78..43678770 100644 --- a/app/src/entities/file/index.ts +++ b/app/src/entities/file/index.ts @@ -1,2 +1,3 @@ export * from './api' +export * from './decryption-bridge' diff --git a/app/src/entities/git/api/index.ts b/app/src/entities/git/api/index.ts index 520eeecc..2aec3922 100644 --- a/app/src/entities/git/api/index.ts +++ b/app/src/entities/git/api/index.ts @@ -1,89 +1,27 @@ +/** + * Git API layer - only exports server-side config API. + * All Git operations (status, sync, pull, history, etc.) are now handled client-side + * via @/features/git-sync for E2EE compatibility. + */ import { createOrUpdateConfig as apiCreateOrUpdateConfig, - deinitRepository as apiDeinitRepository, - getChanges as apiGetChanges, - getCommitDiff as apiGetCommitDiff, + deleteConfig as apiDeleteConfig, getConfig as apiGetConfig, - getHistory as apiGetHistory, - getStatus as apiGetStatus, - getWorkingDiff as apiGetWorkingDiff, - ignoreDocument as apiIgnoreDocument, - ignoreFolder as apiIgnoreFolder, - initRepository as apiInitRepository, - pullRepository as apiPullRepository, - startPullSession as apiStartPullSession, - getPullSession as apiGetPullSession, - resolvePullSession as apiResolvePullSession, - finalizePullSession as apiFinalizePullSession, - importRepository as apiImportRepository, - syncNow as apiSyncNow, } from '@/shared/api' import type { - GitChangesResponse, - GitHistoryResponse, - GitImportResponse, - GitPullResponse, - GitPullSessionResponse, - GitStatus, - ImportRepositoryData, - ImportRepositoryResponse, - PullRepositoryData, - TextDiffResult, + CreateGitConfigRequest, + GitConfigResponse, } from '@/shared/api' export const gitKeys = { all: ['git'] as const, - config: () => ['git','config'] as const, - status: () => ['git','status'] as const, - changes: () => ['git','changes'] as const, - history: () => ['git','history'] as const, - diffWorking: () => ['git','diff','working'] as const, - diffCommits: (from: string, to: string) => ['git','diff','commits', { from, to }] as const, -} - -// Use-case oriented helpers (thin wrappers) to decouple features from raw service signatures -export async function fetchStatus(): Promise { - return apiGetStatus() -} - -export async function fetchChanges(): Promise { - return apiGetChanges() -} - -export async function fetchHistory(): Promise { - return apiGetHistory() -} - -export async function fetchCommitDiff(from: string, to: string): Promise { - return apiGetCommitDiff({ _from: from, to }) + config: () => ['git', 'config'] as const, } export { - apiGetStatus as getStatus, apiGetConfig as getConfig, - apiGetChanges as getChanges, - apiGetHistory as getHistory, - apiGetWorkingDiff as getWorkingDiff, - apiGetCommitDiff as getCommitDiff, apiCreateOrUpdateConfig as createOrUpdateConfig, - apiDeinitRepository as deinitRepository, - apiInitRepository as initRepository, - apiPullRepository as pullRepository, - apiStartPullSession as startPullSession, - apiGetPullSession as getPullSession, - apiResolvePullSession as resolvePullSession, - apiFinalizePullSession as finalizePullSession, - apiImportRepository as importRepository, - apiSyncNow as syncNow, - apiIgnoreDocument as ignoreDocument, - apiIgnoreFolder as ignoreFolder, + apiDeleteConfig as deleteConfig, } -export type { - GitImportResponse, - GitPullResponse, - GitPullSessionResponse, - ImportRepositoryData, - ImportRepositoryResponse, - PullRepositoryData, -} +export type { CreateGitConfigRequest, GitConfigResponse } diff --git a/app/src/entities/markdown/index.ts b/app/src/entities/markdown/index.ts deleted file mode 100644 index ef75c852..00000000 --- a/app/src/entities/markdown/index.ts +++ /dev/null @@ -1,12 +0,0 @@ -import type { RenderManyRequest, RenderManyResponse, RenderRequest, RenderResponseBody } from '@/shared/api' -import { renderMarkdown as apiRenderMarkdown, renderMarkdownMany as apiRenderMarkdownMany } from '@/shared/api' - -export type { RenderRequest as MarkdownRenderRequest, RenderResponseBody as MarkdownRenderResponse } from '@/shared/api' - -export async function renderMarkdown(request: RenderRequest): Promise { - return apiRenderMarkdown({ requestBody: request }) -} - -export async function renderMarkdownMany(request: RenderManyRequest): Promise { - return apiRenderMarkdownMany({ requestBody: request }) -} diff --git a/app/src/entities/plugin/api/index.ts b/app/src/entities/plugin/api/index.ts index 4a336ad7..176d2340 100644 --- a/app/src/entities/plugin/api/index.ts +++ b/app/src/entities/plugin/api/index.ts @@ -2,7 +2,6 @@ import { listRecords as apiListRecords, pluginsCreateRecord as apiPluginsCreateRecord, pluginsDeleteRecord as apiPluginsDeleteRecord, - pluginsExecAction as apiPluginsExecAction, pluginsGetKv as apiPluginsGetKv, pluginsGetManifest as apiPluginsGetManifest, pluginsInstallFromUrl as apiPluginsInstallFromUrl, @@ -40,20 +39,7 @@ export async function getPluginManifest(token?: string): Promise apiPluginsGetManifest()) } -export async function execPluginAction( - pluginId: string, - action: string, - payload: Record | undefined, - token?: string, -) { - return withShareAuthorization(token, () => - apiPluginsExecAction({ - plugin: pluginId, - action, - requestBody: { payload }, - }), - ) -} +// execPluginAction removed: E2EE requires client-side WASM execution (see runtime.ts) export async function listPluginRecords( pluginId: string, diff --git a/app/src/entities/plugin/index.ts b/app/src/entities/plugin/index.ts index d6c108c9..ab8701c6 100644 --- a/app/src/entities/plugin/index.ts +++ b/app/src/entities/plugin/index.ts @@ -1,3 +1,4 @@ export * from './api' export * from './hooks/usePluginManifest' -export * from './hooks/usePluginExecutor' +// Re-export from features for backward compatibility +export { usePluginExecutor } from '@/features/plugins/hooks/usePluginExecutor' diff --git a/app/src/entities/public/api/index.ts b/app/src/entities/public/api/index.ts index a4707fc7..5a839526 100644 --- a/app/src/entities/public/api/index.ts +++ b/app/src/entities/public/api/index.ts @@ -3,13 +3,17 @@ import { useQuery } from '@tanstack/react-query' import { getPublicByWorkspaceAndId as apiGetPublicByWorkspaceAndId, getPublicContentByWorkspaceAndId as apiGetPublicContentByWorkspaceAndId, + getPublicFile as apiGetPublicFile, getPublishStatus as apiGetPublishStatus, getWorkspacePermissions as apiGetWorkspacePermissions, + listPublicFiles as apiListPublicFiles, listWorkspacePublicDocuments as apiListWorkspacePublicDocuments, publishDocument as apiPublishDocument, unpublishDocument as apiUnpublishDocument, + updatePublishSettings as apiUpdatePublishSettings, + uploadPublicFile as apiUploadPublicFile, } from '@/shared/api' -import type { PublicDocumentSummary } from '@/shared/api' +import type { PublicDocumentSummary, PublicFile } from '@/shared/api' export const publicKeys = { all: ['public'] as const, @@ -40,8 +44,21 @@ export async function getPublicContentByWorkspaceAndId(slug: string, id: string) return apiGetPublicContentByWorkspaceAndId({ slug, id }) } -export async function publishDocument(id: string) { - return apiPublishDocument({ id }) +export type PublishDocumentOptions = { + plaintextTitle?: string + plaintextContent?: string + noindex?: boolean +} + +export async function publishDocument(id: string, options?: PublishDocumentOptions) { + return apiPublishDocument({ + id, + requestBody: options ? { + plaintextTitle: options.plaintextTitle, + plaintextContent: options.plaintextContent, + noindex: options.noindex, + } : undefined, + }) } export async function unpublishDocument(id: string) { @@ -52,6 +69,44 @@ export async function getPublishStatus(id: string) { return apiGetPublishStatus({ id }) } +export async function updatePublishSettings(id: string, noindex: boolean) { + return apiUpdatePublishSettings({ + id, + requestBody: { noindex }, + }) +} + export async function getWorkspacePermissions(workspaceId: string) { return apiGetWorkspacePermissions({ id: workspaceId }) } + +// --- Public file helpers --- + +export type UploadPublicFileOptions = { + originalFilename: string + logicalFilename: string + mimeType: string + content: string // Base64 encoded +} + +export async function uploadPublicFile( + docId: string, + fileId: string, + options: UploadPublicFileOptions +) { + return apiUploadPublicFile({ + id: docId, + fileId, + requestBody: options, + }) +} + +export async function listPublicFiles(slug: string, docId: string): Promise { + return apiListPublicFiles({ slug, id: docId }) as Promise +} + +export async function getPublicFile(slug: string, docId: string, filename: string) { + return apiGetPublicFile({ slug, id: docId, filename }) +} + +export type { PublicFile } diff --git a/app/src/entities/public/index.ts b/app/src/entities/public/index.ts index d605d0d6..280a2ce1 100644 --- a/app/src/entities/public/index.ts +++ b/app/src/entities/public/index.ts @@ -1,3 +1,5 @@ export * from './api' export { buildCanonicalUrl, buildOgImageUrl } from './lib/seo' export type { OgImageUrlOptions } from './lib/seo' +export { uploadPublicFilesForDocument } from './lib/upload-public-files' +export { rewritePublicAttachmentUrls } from './lib/rewrite-attachment-urls' diff --git a/app/src/entities/public/lib/rewrite-attachment-urls.ts b/app/src/entities/public/lib/rewrite-attachment-urls.ts new file mode 100644 index 00000000..2ac44b69 --- /dev/null +++ b/app/src/entities/public/lib/rewrite-attachment-urls.ts @@ -0,0 +1,25 @@ +import { API_BASE_URL } from '@/shared/lib/config' + +/** + * Rewrites attachment URLs in markdown content for public pages. + * Transforms relative paths like `./attachments/filename.ext` or `attachments/filename.ext` + * to the public files API endpoint: `/api/public/workspaces/{slug}/{id}/files/{filename}` + */ +export function rewritePublicAttachmentUrls( + content: string, + slug: string, + documentId: string +): string { + // Match patterns like: + // - ./attachments/filename.ext + // - attachments/filename.ext + // Captures the full filename including extension + const attachmentPattern = /(?:\.\/)?attachments\/([^)\s"']+)/gi + + const apiBase = API_BASE_URL || '' + + return content.replace(attachmentPattern, (_match, filename: string) => { + // Build the public files API URL with the full filename + return `${apiBase}/api/public/workspaces/${encodeURIComponent(slug)}/${encodeURIComponent(documentId)}/files/${encodeURIComponent(filename)}` + }) +} diff --git a/app/src/entities/public/lib/upload-public-files.ts b/app/src/entities/public/lib/upload-public-files.ts new file mode 100644 index 00000000..108fee8c --- /dev/null +++ b/app/src/entities/public/lib/upload-public-files.ts @@ -0,0 +1,82 @@ +import { getFile } from '@/shared/api' +import { decryptFile, isRmeFile } from '@/shared/lib/files' + +import { buildFileMap } from '@/entities/file' + +import { uploadPublicFile } from '../api' + +export interface UploadPublicFilesOptions { + documentId: string + /** Document encryption key */ + dek: Uint8Array +} + +/** + * Upload decrypted attachments for a published E2EE document. + * Downloads each file, decrypts it, and uploads to the public files API. + * + * @param options - Options including document ID and DEK + */ +export async function uploadPublicFilesForDocument( + options: UploadPublicFilesOptions +): Promise<{ uploaded: number; failed: number }> { + const { documentId, dek } = options + + // Build file map to get decrypted metadata + const fileMap = await buildFileMap(documentId, dek) + if (fileMap.size === 0) { + return { uploaded: 0, failed: 0 } + } + + let uploaded = 0 + let failed = 0 + + await Promise.all( + Array.from(fileMap.values()).map(async (fileEntry) => { + try { + // Download file using API client + const fileBlob = await getFile({ id: fileEntry.fileId }) + const encryptedBytes = new Uint8Array(await fileBlob.arrayBuffer()) + + // Decrypt file if encrypted + let decryptedContent: Uint8Array + let filename = fileEntry.filename + let mimeType = fileEntry.mimeType + + if (isRmeFile(encryptedBytes)) { + const decrypted = await decryptFile(encryptedBytes, dek) + decryptedContent = decrypted.content + filename = decrypted.metadata.filename + mimeType = decrypted.metadata.mimeType + } else { + decryptedContent = encryptedBytes + } + + // Convert to base64 + let binary = '' + for (let i = 0; i < decryptedContent.length; i++) { + binary += String.fromCharCode(decryptedContent[i]) + } + const base64Content = btoa(binary) + + // Extract logical filename from logicalPath for matching in markdown + // logicalPath is like "attachments/filename.png" + const logicalFilename = fileEntry.logicalPath.split('/').pop() || filename + + // Upload to public files API + await uploadPublicFile(documentId, fileEntry.fileId, { + originalFilename: filename, + mimeType, + content: base64Content, + logicalFilename, + }) + uploaded++ + } catch (err) { + console.error('[uploadPublicFiles] Failed:', fileEntry.fileId, err) + failed++ + } + }) + ) + + return { uploaded, failed } +} diff --git a/app/src/entities/share/api/index.ts b/app/src/entities/share/api/index.ts index 603cd086..dbc37113 100644 --- a/app/src/entities/share/api/index.ts +++ b/app/src/entities/share/api/index.ts @@ -11,7 +11,7 @@ import { listShareMounts as apiListShareMounts, validateShareToken as apiValidateShareToken, } from '@/shared/api' -import type { ActiveShareItem, ShareMountItem } from '@/shared/api' +import type { ActiveShareItem, ShareMountItem, CreateShareRequest } from '@/shared/api' export const shareKeys = { all: ['shares'] as const, @@ -35,8 +35,8 @@ export function useActiveShares() { return useQuery(activeSharesQuery()) } -export function useShareMounts() { - return useQuery(shareMountsQuery()) +export function useShareMounts(options?: { enabled?: boolean }) { + return useQuery({ ...shareMountsQuery(), enabled: options?.enabled ?? true }) } // Use-case oriented helpers @@ -55,8 +55,8 @@ export async function listDocumentShares(id: string) { return apiListDocumentShares({ id }) } -export async function createShare(input: { document_id: string; permission: string; expires_at?: string | null; scope?: 'document' | 'folder'; parent_share_id?: string | null }) { - return apiCreateShare({ requestBody: input as any }) +export async function createShare(input: CreateShareRequest) { + return apiCreateShare({ requestBody: input }) } export async function deleteShare(token: string) { diff --git a/app/src/entities/tag/api/index.ts b/app/src/entities/tag/api/index.ts index d3294160..c40b682e 100644 --- a/app/src/entities/tag/api/index.ts +++ b/app/src/entities/tag/api/index.ts @@ -1,11 +1,200 @@ -import { listTags as apiListTags } from '@/shared/api' +/** + * Tag API with E2EE support + * + * All tags are deterministically encrypted using HMAC-SHA256. + * This allows server-side grouping while keeping tag names private. + */ +import { + listTags as apiListTags, + getDocumentTags as apiGetDocumentTags, + updateDocumentTags as apiUpdateDocumentTags, +} from '@/shared/api' +import { extractTags, getTagLookupManager } from '@/shared/lib/tags' + +// Query keys for React Query export const tagKeys = { all: ['tags'] as const, - list: (q?: string) => ['tags',{ q: q ?? '' }] as const, + list: (workspaceId: string) => ['tags', { workspaceId }] as const, + document: (documentId: string) => ['tags', 'document', documentId] as const, +} + +// Types +export interface DecryptedTag { + name: string + documentCount: number +} + +export interface DecryptedDocumentTag { + id: string + name: string + createdAt: string +} + +/** + * List all tags for a workspace (decrypted). + * + * @param kek - Workspace KEK for decryption + * @returns Array of decrypted tags with document counts + */ +export async function listDecryptedTags(kek: Uint8Array): Promise { + const response = await apiListTags({}) + + if (!response.tags || response.tags.length === 0) { + return [] + } + + // Setup lookup manager with provided KEK + const lookupManager = getTagLookupManager() + lookupManager.setKek(kek) + + // Try to decrypt each tag + const results: DecryptedTag[] = [] + for (const tag of response.tags) { + const decrypted = await lookupManager.decrypt(tag.encryptedName) + results.push({ + name: decrypted ?? tag.encryptedName, // Fallback to encrypted if unknown + documentCount: tag.documentCount, + }) + } + + return results +} + +/** + * Get tags for a specific document (decrypted). + * + * @param documentId - Document ID + * @param kek - Workspace KEK for decryption + * @returns Array of decrypted document tags + */ +export async function getDecryptedDocumentTags( + documentId: string, + kek: Uint8Array +): Promise { + const response = await apiGetDocumentTags({ id: documentId }) + + if (!response.tags || response.tags.length === 0) { + return [] + } + + // Setup lookup manager with provided KEK + const lookupManager = getTagLookupManager() + lookupManager.setKek(kek) + + // Try to decrypt each tag + const results: DecryptedDocumentTag[] = [] + for (const tag of response.tags) { + const decrypted = await lookupManager.decrypt(tag.encryptedName) + results.push({ + id: tag.id, + name: decrypted ?? tag.encryptedName, + createdAt: tag.createdAt, + }) + } + + return results +} + +/** + * Update document tags with encryption. + * + * @param documentId - Document ID + * @param kek - Workspace KEK for encryption + * @param tags - Array of plaintext tag names + */ +export async function updateEncryptedDocumentTags( + documentId: string, + kek: Uint8Array, + tags: string[] +): Promise { + if (tags.length === 0) { + // Clear all tags + await apiUpdateDocumentTags({ + id: documentId, + requestBody: { encryptedTags: [] }, + }) + return + } + + // Setup lookup manager with provided KEK + const lookupManager = getTagLookupManager() + lookupManager.setKek(kek) + + // Encrypt each tag and add to known tags + const encryptedTags = await Promise.all( + tags.map(async (tag) => { + const encrypted = await lookupManager.encrypt(tag) + return { encryptedName: encrypted } + }) + ) + + await apiUpdateDocumentTags({ + id: documentId, + requestBody: { encryptedTags }, + }) +} + +/** + * Extract tags from markdown content and update document tags. + * + * This is the main function to call when a document is saved. + * It extracts #tags from the markdown and sends encrypted tags to the server. + * + * @param documentId - Document ID + * @param kek - Workspace KEK for encryption + * @param markdownContent - Raw markdown content + * @returns Array of extracted tag names + */ +export async function updateDocumentTagsFromContent( + documentId: string, + kek: Uint8Array, + markdownContent: string +): Promise { + // Extract tags from markdown + const tags = extractTags(markdownContent) + + // Update with encrypted tags + await updateEncryptedDocumentTags(documentId, kek, tags) + + return tags +} + +/** + * Add known tags to the lookup manager for decryption. + * + * Call this when you know the plaintext of some tags + * (e.g., from document content extraction). + * + * @param tags - Array of plaintext tag names + */ +export function addKnownTags(tags: string[]): void { + const lookupManager = getTagLookupManager() + lookupManager.addKnownTags(tags) +} + +/** + * Encrypt a plaintext tag for API calls. + * + * @param tag - Plaintext tag name + * @param kek - Workspace KEK for encryption + * @returns Base64-encoded encrypted tag + */ +export async function encryptTagForApi( + tag: string, + kek: Uint8Array +): Promise { + const lookupManager = getTagLookupManager() + lookupManager.setKek(kek) + return lookupManager.encrypt(tag) } -// Use-case oriented helpers +/** + * Legacy function for backward compatibility. + * Simply calls the API without decryption. + * + * @deprecated Use listDecryptedTags instead + */ export async function listTags(q?: string) { - return apiListTags({ q: q as any }) + return apiListTags({ q: q as string | undefined }) } diff --git a/app/src/entities/tag/index.ts b/app/src/entities/tag/index.ts index 0bf83f78..72c3c7c7 100644 --- a/app/src/entities/tag/index.ts +++ b/app/src/entities/tag/index.ts @@ -1,2 +1,3 @@ export * from './api' +export type { DecryptedTag, DecryptedDocumentTag } from './api' diff --git a/app/src/entities/user/api/index.ts b/app/src/entities/user/api/index.ts index f0109259..7c73716b 100644 --- a/app/src/entities/user/api/index.ts +++ b/app/src/entities/user/api/index.ts @@ -13,8 +13,32 @@ import { oauthState as apiOauthState, refreshSession as apiRefreshSession, listOauthProviders as apiListOauthProviders, + // Security APIs + getEncryptionStatus as apiGetSecurityStatus, + needsMigration as apiNeedsMigration, + migrate as apiMigrateUserData, + markEncryptionSetupComplete as apiMarkSecuritySetupComplete, + getMyPublicKey as apiGetMyPublicKey, + registerPublicKey as apiRegisterPublicKey, + getMasterKeyBackup as apiGetMasterKeyBackup, + storeMasterKeyBackup as apiStoreMasterKeyBackup, + getEncryptedPrivateKey as apiGetEncryptedPrivateKey, + storeEncryptedPrivateKey as apiStoreEncryptedPrivateKey, +} from '@/shared/api' +import type { + SessionResponse, + AuthProvidersResponse, + EncryptionStatusResponse, + NeedsMigrationResponse, + MigrationResponse, + UserPublicKeyResponse, + MasterKeyBackupResponse, + EncryptedPrivateKeyResponse, + MigrateRequest, + RegisterPublicKeyRequest, + StoreMasterKeyBackupRequest, + StoreEncryptedPrivateKeyRequest, } from '@/shared/api' -import type { SessionResponse, AuthProvidersResponse } from '@/shared/api' export const userKeys = { me: () => ['me'] as const, @@ -126,3 +150,83 @@ export function useRevokeSession(options?: { }, }) } + +// ============================================ +// Security / E2EE +// ============================================ + +export const securityKeys = { + status: () => ['security', 'status'] as const, + needsMigration: () => ['security', 'needs-migration'] as const, + publicKey: () => ['security', 'public-key'] as const, + masterKeyBackup: () => ['security', 'master-key-backup'] as const, + encryptedPrivateKey: () => ['security', 'encrypted-private-key'] as const, +} + +// API wrapper functions +export async function getSecurityStatus(): Promise { + return apiGetSecurityStatus() +} + +export async function checkNeedsMigration(): Promise { + return apiNeedsMigration() +} + +export async function migrateUserData(request: MigrateRequest): Promise { + return apiMigrateUserData({ requestBody: request }) +} + +export async function markSecuritySetupComplete(): Promise { + await apiMarkSecuritySetupComplete() +} + +export async function getMyPublicKey(): Promise { + return apiGetMyPublicKey() +} + +export async function registerPublicKey(request: RegisterPublicKeyRequest): Promise { + return apiRegisterPublicKey({ requestBody: request }) +} + +export async function getMasterKeyBackup(): Promise { + return apiGetMasterKeyBackup() +} + +export async function storeMasterKeyBackup(request: StoreMasterKeyBackupRequest): Promise { + return apiStoreMasterKeyBackup({ requestBody: request }) +} + +export async function getEncryptedPrivateKey(): Promise { + return apiGetEncryptedPrivateKey() +} + +export async function storeEncryptedPrivateKey(request: StoreEncryptedPrivateKeyRequest): Promise { + return apiStoreEncryptedPrivateKey({ requestBody: request }) +} + +// Query definitions +export const securityStatusQuery = () => ({ + queryKey: securityKeys.status(), + queryFn: () => getSecurityStatus(), + staleTime: 30_000, +}) + +export const needsMigrationQuery = () => ({ + queryKey: securityKeys.needsMigration(), + queryFn: () => checkNeedsMigration(), + staleTime: 30_000, +}) + +// Re-export types +export type { + EncryptionStatusResponse as SecurityStatusResponse, + NeedsMigrationResponse, + MigrationResponse, + UserPublicKeyResponse, + MasterKeyBackupResponse, + EncryptedPrivateKeyResponse, + MigrateRequest, + RegisterPublicKeyRequest, + StoreMasterKeyBackupRequest, + StoreEncryptedPrivateKeyRequest, +} diff --git a/app/src/entities/workspace/api.ts b/app/src/entities/workspace/api.ts index d6366112..67aa810e 100644 --- a/app/src/entities/workspace/api.ts +++ b/app/src/entities/workspace/api.ts @@ -1,17 +1,21 @@ import { - OpenAPI, acceptInvitation as apiAcceptWorkspaceInvitation, createInvitation as apiCreateWorkspaceInvitation, createRole as apiCreateWorkspaceRole, createWorkspace as apiCreateWorkspace, deleteRole as apiDeleteWorkspaceRole, + deleteWorkspace as apiDeleteWorkspace, + getWorkspaceDetail as apiGetWorkspaceDetail, + leaveWorkspace as apiLeaveWorkspace, listInvitations as apiListWorkspaceInvitations, listMembers as apiListWorkspaceMembers, listRoles as apiListWorkspaceRoles, removeMember as apiRemoveWorkspaceMember, revokeInvitation as apiRevokeWorkspaceInvitation, + updateInvitationKek as apiUpdateInvitationKek, updateMemberRole as apiUpdateWorkspaceMemberRole, updateRole as apiUpdateWorkspaceRole, + updateWorkspace as apiUpdateWorkspace, } from '@/shared/api' import type { PermissionOverridePayload as ApiPermissionOverridePayload, @@ -20,7 +24,6 @@ import type { WorkspaceResponse, WorkspaceRoleResponse as ApiWorkspaceRoleResponse, } from '@/shared/api' -import { request as __request } from '@/shared/api/client/core/request' export const workspaceKeys = { members: (workspaceId?: string | null) => ['workspace-members', workspaceId] as const, @@ -46,37 +49,19 @@ export type WorkspaceMemberResponse = ApiWorkspaceMemberResponse export type WorkspaceRoleResponse = ApiWorkspaceRoleResponse export function getWorkspace(id: string): Promise { - return __request(OpenAPI, { - method: 'GET', - url: '/api/workspaces/{id}', - path: { id }, - }) as Promise + return apiGetWorkspaceDetail({ id }) as Promise } export function updateWorkspace(id: string, body: UpdateWorkspacePayload): Promise { - return __request(OpenAPI, { - method: 'PUT', - url: '/api/workspaces/{id}', - path: { id }, - body, - mediaType: 'application/json', - }) as Promise + return apiUpdateWorkspace({ id, requestBody: body }) as Promise } export function deleteWorkspace(id: string): Promise { - return __request(OpenAPI, { - method: 'DELETE', - url: '/api/workspaces/{id}', - path: { id }, - }) as Promise + return apiDeleteWorkspace({ id }) as Promise } export function leaveWorkspace(id: string): Promise { - return __request(OpenAPI, { - method: 'POST', - url: '/api/workspaces/{id}/leave', - path: { id }, - }) as Promise + return apiLeaveWorkspace({ id }) as Promise } export function createWorkspace(body: CreateWorkspacePayload) { @@ -164,3 +149,26 @@ export function updateWorkspaceRole( export function deleteWorkspaceRole(workspaceId: string, roleId: string) { return apiDeleteWorkspaceRole({ id: workspaceId, roleId }) } + +/** + * Update invitation with encrypted KEK. + * Called after creating an invitation to attach the encrypted workspace KEK. + * + * @param workspaceId - Workspace ID + * @param invitationId - Invitation ID + * @param payload - Encrypted KEK data + */ +export async function updateInvitationKek( + workspaceId: string, + invitationId: string, + payload: { + encryptedKekForInvite: string + kekVersion: number + } +): Promise { + await apiUpdateInvitationKek({ + id: workspaceId, + invitationId, + requestBody: payload, + }) +} diff --git a/app/src/features/auth/lib/guards.ts b/app/src/features/auth/lib/guards.ts index 86cc69d4..d32deabc 100644 --- a/app/src/features/auth/lib/guards.ts +++ b/app/src/features/auth/lib/guards.ts @@ -4,7 +4,7 @@ import { redirect } from '@tanstack/react-router' import type { UserResponse } from '@/shared/api' import { validateShareToken } from '@/entities/share' -import { me as fetchCurrentUser, userKeys } from '@/entities/user' +import { me as fetchCurrentUser, userKeys, getSecurityStatus, securityKeys } from '@/entities/user' import { getRuntimeAuthContext } from './runtime-context' import type { AuthMiddlewareContext, AuthRedirectTarget, AuthResolution } from './types' @@ -135,6 +135,26 @@ function isAuthRoute(pathname: string) { return pathname.startsWith('/auth/') } +async function needsSecuritySetup(ctx?: any): Promise { + // Check cached status first + const queryClient = ctx?.context?.queryClient as QueryClient | undefined + if (queryClient) { + const cachedStatus = queryClient.getQueryData(securityKeys.status()) + if (cachedStatus && typeof cachedStatus === 'object' && 'isSetupCompleted' in cachedStatus) { + return !(cachedStatus as { isSetupCompleted: boolean }).isSetupCompleted + } + } + + // Fetch status from API + try { + const status = await getSecurityStatus() + return !status.isSetupCompleted + } catch { + // If we can't determine, assume setup is not needed to avoid blocking + return false + } +} + function getCachedUser(ctx?: any): UserResponse | null { const queryClient = ctx?.context?.queryClient as QueryClient | undefined if (!queryClient) return null @@ -217,10 +237,20 @@ export async function resolveAuthRedirect(ctx?: any): Promise { } export async function appBeforeLoadGuard(ctx?: any) { - const { redirect: target } = await resolveAuthRedirect(ctx) + const { pathname } = resolveLocation(ctx) + const { redirect: target, authenticated } = await resolveAuthRedirect(ctx) if (target) { throw redirect(target) } + + // Check if E2EE setup is needed for authenticated users + // Skip for auth routes (/auth/*) to prevent redirect loops + if (authenticated && !isAuthRoute(pathname)) { + const setupNeeded = await needsSecuritySetup(ctx) + if (setupNeeded) { + throw redirect({ to: '/auth/setup' as '/dashboard' }) + } + } } export const requireAuthGuard = appBeforeLoadGuard diff --git a/app/src/features/auth/model/auth-context.tsx b/app/src/features/auth/model/auth-context.tsx index 331a32b9..14032f67 100644 --- a/app/src/features/auth/model/auth-context.tsx +++ b/app/src/features/auth/model/auth-context.tsx @@ -19,8 +19,10 @@ import { import { updateRuntimeAuthContext } from '@/features/auth/lib/runtime-context' import type { AuthMiddlewareContext } from '@/features/auth/lib/types' +import { getKeyManager } from '@/features/security/lib/keys' const WORKSPACE_STORAGE_KEY = 'refmd.activeWorkspaceId' +const REMEMBER_ME_STORAGE_KEY = 'refmd.rememberMe' type AuthState = { user: UserResponse | null @@ -29,6 +31,8 @@ type AuthState = { activeWorkspace: UserResponse['workspaces'][number] | null permissions: string[] loading: boolean + /** Whether "Remember Me" was selected during login */ + rememberMe: boolean signIn: (email: string, password: string, options?: SignInOptions) => Promise signInWithProvider: (provider: string, payload: OAuthPayload) => Promise signUp: (email: string, name: string, password: string) => Promise @@ -65,6 +69,28 @@ function readStoredWorkspaceId() { } } +function readStoredRememberMe(): boolean { + if (typeof window === 'undefined') return false + try { + return window.localStorage.getItem(REMEMBER_ME_STORAGE_KEY) === 'true' + } catch { + return false + } +} + +function storeRememberMe(value: boolean): void { + if (typeof window === 'undefined') return + try { + if (value) { + window.localStorage.setItem(REMEMBER_ME_STORAGE_KEY, 'true') + } else { + window.localStorage.removeItem(REMEMBER_ME_STORAGE_KEY) + } + } catch { + // Ignore storage errors + } +} + export function AuthProvider({ children }: { children: React.ReactNode }) { const navigate = useNavigate() const queryClient = useQueryClient() @@ -114,6 +140,7 @@ export function AuthProvider({ children }: { children: React.ReactNode }) { } return stored }) + const [rememberMe, setRememberMe] = useState(() => readStoredRememberMe()) useEffect(() => { if (hasInitialData) { @@ -257,6 +284,10 @@ export function AuthProvider({ children }: { children: React.ReactNode }) { setUser(res.user) setRuntimeHasRefreshToken(true) runtimeRefreshRef.current = true + // Store rememberMe preference for E2EE + const remember = options?.remember ?? false + setRememberMe(remember) + storeRememberMe(remember) updateAuthContext({ user: res.user, isAuthenticated: true, @@ -277,6 +308,10 @@ export function AuthProvider({ children }: { children: React.ReactNode }) { setUser(res.user) setRuntimeHasRefreshToken(true) runtimeRefreshRef.current = true + // Store rememberMe preference for E2EE + const remember = payload.remember_me ?? false + setRememberMe(remember) + storeRememberMe(remember) updateAuthContext({ user: res.user, isAuthenticated: true, @@ -299,10 +334,20 @@ export function AuthProvider({ children }: { children: React.ReactNode }) { } catch (error) { console.warn('[auth] logout failed', error) } + // Clear E2EE keys from memory and storage + try { + const km = getKeyManager() + await km.logout() + } catch (error) { + console.warn('[auth] e2ee logout failed', error) + } queryClient.clear() setUser(null) setRuntimeHasRefreshToken(false) runtimeRefreshRef.current = false + // Clear rememberMe preference + setRememberMe(false) + storeRememberMe(false) updateAuthContext({ user: null, isAuthenticated: false, @@ -315,10 +360,20 @@ export function AuthProvider({ children }: { children: React.ReactNode }) { const deleteAccount = useCallback(async () => { await deleteAccountApi() + // Clear E2EE keys from memory and storage + try { + const km = getKeyManager() + await km.logout() + } catch (error) { + console.warn('[auth] e2ee logout failed', error) + } queryClient.clear() setUser(null) setRuntimeHasRefreshToken(false) runtimeRefreshRef.current = false + // Clear rememberMe preference + setRememberMe(false) + storeRememberMe(false) updateAuthContext({ user: null, isAuthenticated: false, @@ -410,6 +465,7 @@ export function AuthProvider({ children }: { children: React.ReactNode }) { activeWorkspace, permissions, loading, + rememberMe, signIn, signInWithProvider, signUp, @@ -424,6 +480,7 @@ export function AuthProvider({ children }: { children: React.ReactNode }) { activeWorkspace, permissions, loading, + rememberMe, signIn, signInWithProvider, signUp, diff --git a/app/src/features/document-download/model/options.ts b/app/src/features/document-download/model/options.ts index 9438c55f..6fa2a9cc 100644 --- a/app/src/features/document-download/model/options.ts +++ b/app/src/features/document-download/model/options.ts @@ -1,11 +1,11 @@ import { - DOWNLOAD_FORMAT_METADATA, - type DocumentDownloadFormat, - type DocumentDownloadFormatMetadata, -} from '@/entities/document' + EXPORT_FORMATS, + type ExportFormat, + type ExportFormatMetadata, +} from '@/features/export' export type DownloadOption = { - format: DocumentDownloadFormat + format: ExportFormat label: string description: string } @@ -16,10 +16,10 @@ export type DownloadOptionGroup = { items: DownloadOption[] } -const PRIMARY_FORMATS: DocumentDownloadFormat[] = ['archive', 'markdown', 'html', 'pdf', 'docx'] +const PRIMARY_FORMATS: ExportFormat[] = ['archive', 'markdown', 'html', 'pdf', 'docx'] export const PRIMARY_DOWNLOAD_OPTIONS: DownloadOption[] = PRIMARY_FORMATS.map((format) => { - const meta = DOWNLOAD_FORMAT_METADATA[format] + const meta = EXPORT_FORMATS[format] return { format, label: meta.label, description: meta.description } }) @@ -43,8 +43,8 @@ const GROUP_DESCRIPTIONS: Record = { Manuals: 'Formats suited for manuals and reference pages.', } -const METADATA_ENTRIES = Object.entries(DOWNLOAD_FORMAT_METADATA) as Array< - [DocumentDownloadFormat, DocumentDownloadFormatMetadata] +const METADATA_ENTRIES = Object.entries(EXPORT_FORMATS) as Array< + [ExportFormat, ExportFormatMetadata] > export const OTHER_DOWNLOAD_FORMAT_GROUPS: DownloadOptionGroup[] = (() => { diff --git a/app/src/features/document-download/ui/DocumentDownloadDialog.tsx b/app/src/features/document-download/ui/DocumentDownloadDialog.tsx index c6d7b59b..ee129c7a 100644 --- a/app/src/features/document-download/ui/DocumentDownloadDialog.tsx +++ b/app/src/features/document-download/ui/DocumentDownloadDialog.tsx @@ -1,4 +1,4 @@ -import { Archive, Book, ChevronLeft, ChevronRight, FileDigit, FileText, FileType, Globe, Loader2 } from 'lucide-react' +import { Archive, ChevronLeft, ChevronRight, FileDigit, FileText, FileType, Globe, Loader2 } from 'lucide-react' import { useCallback, useEffect, useState } from 'react' @@ -8,51 +8,17 @@ import { Button } from '@/shared/ui/button' import { Dialog, DialogContent, DialogDescription, DialogFooter, DialogHeader, DialogTitle } from '@/shared/ui/dialog' import { ScrollArea } from '@/shared/ui/scroll-area' -import type { DocumentDownloadFormat } from '@/entities/document' +import type { ExportFormat } from '@/features/export' import type { DownloadOption, DownloadOptionGroup } from '../model/options' -const formatIcons: Partial>> = { +// Icons for supported client-side export formats +const formatIcons: Partial>> = { archive: Archive, markdown: FileText, html: Globe, - html5: Globe, pdf: FileDigit, docx: FileType, - latex: FileDigit, - beamer: FileDigit, - context: FileDigit, - man: FileText, - mediawiki: FileText, - dokuwiki: FileText, - textile: FileText, - org: FileText, - texinfo: FileText, - opml: FileDigit, - docbook: FileDigit, - opendocument: FileType, - odt: FileType, - rtf: FileType, - epub: Book, - epub3: Book, - fb2: Book, - asciidoc: FileText, - icml: FileType, - slidy: Globe, - slideous: Globe, - dzslides: Globe, - revealjs: Globe, - s5: Globe, - json: FileDigit, - plain: FileText, - commonmark: FileText, - commonmark_x: FileText, - markdown_strict: FileText, - markdown_phpextra: FileText, - markdown_github: FileText, - rst: FileText, - native: FileDigit, - haddock: FileText, } type DocumentDownloadDialogProps = { @@ -60,7 +26,7 @@ type DocumentDownloadDialogProps = { onOpenChange: (value: boolean) => void primaryOptions: DownloadOption[] otherGroups: DownloadOptionGroup[] - onSelect: (format: DocumentDownloadFormat) => void | Promise + onSelect: (format: ExportFormat) => void | Promise isPending: boolean } diff --git a/app/src/features/edit-document/hooks/useAwarenessStyles.ts b/app/src/features/edit-document/hooks/useAwarenessStyles.ts deleted file mode 100644 index 2269a6b7..00000000 --- a/app/src/features/edit-document/hooks/useAwarenessStyles.ts +++ /dev/null @@ -1,87 +0,0 @@ -import { useEffect } from 'react' -import type { Awareness } from 'y-protocols/awareness' - -type Options = { - userId?: string - userName?: string -} - -/** - * Syncs local user metadata to Yjs awareness and injects remote cursor styles. - */ -export function useAwarenessStyles(awareness: Awareness | null | undefined, { userId, userName }: Options) { - useEffect(() => { - if (!awareness || (awareness as any)?._destroyed) return - - const info = { - name: userName || `User-${awareness.clientID}`, - color: generateUserColor(userId), - colorLight: generateUserColor(userId, true), - id: userId || String(awareness.clientID), - } - awareness.setLocalStateField('user', info) - - const style = document.createElement('style') - style.id = 'y-remote-cursor-styles' - document.head.appendChild(style) - - const update = () => { - const states = awareness.getStates() - let css = '' - states.forEach((state: any, clientId: number) => { - if (state?.user && clientId !== awareness.clientID) { - const c = state.user.color || '#000' - const cl = state.user.colorLight || c - css += ` - .yRemoteSelection-${clientId} { background-color: ${cl}; opacity: .5; } - .yRemoteSelectionHead-${clientId} { border-color: ${c}; border-width: 2px; } - .yRemoteSelectionHead-${clientId}::after { - content: ''; - position: absolute; - left: -1px; - top: 0; - bottom: 0; - border-left: 2px solid ${c}; - } - .yRemoteCursorLabel-${clientId} { - background-color: ${c}; - color: #fff; - opacity: 1; - padding: 2px 4px; - border-radius: 2px; - font-size: 11px; - position: absolute; - z-index: 100; - } - ` - } - }) - style.textContent = css - } - - update() - const handler = () => update() - awareness.on('update', handler) - - return () => { - try { awareness.off('update', handler) } catch {} - style.remove() - } - }, [awareness, userId, userName]) -} - -function generateUserColor(userId?: string, light = false): string { - let hash = 0 - const str = userId || Math.random().toString() - for (let i = 0; i < str.length; i++) { - const char = str.charCodeAt(i) - hash = ((hash << 5) - hash) + char - hash = hash & hash - } - const hue = Math.abs(hash) % 360 - const saturation = light ? 30 : 70 - const lightness = light ? 80 : 50 - return `hsl(${hue}, ${saturation}%, ${lightness}%)` -} - -export default useAwarenessStyles diff --git a/app/src/features/edit-document/hooks/useCollaborativeDocument.ts b/app/src/features/edit-document/hooks/useCollaborativeDocument.ts index 647ef65e..64a070bc 100644 --- a/app/src/features/edit-document/hooks/useCollaborativeDocument.ts +++ b/app/src/features/edit-document/hooks/useCollaborativeDocument.ts @@ -1,15 +1,24 @@ import { useQueryClient } from '@tanstack/react-query' import * as React from 'react' import { toast } from 'sonner' +import type { Awareness } from 'y-protocols/awareness' +import type * as Y from 'yjs' import { useRealtime } from '@/shared/contexts/realtime-context' import { createYjsConnection, destroyYjsConnection } from '@/shared/lib/yjsConnection' -import type { YjsConnection } from '@/shared/lib/yjsConnection' +import type { YjsConnection, YjsConnectionOptions } from '@/shared/lib/yjsConnection' import { fetchDocumentMeta } from '@/entities/document' import { validateShareToken } from '@/entities/share' import { useAuthContext } from '@/features/auth' +import { + extractShareKeyFromFragment, + decryptDekWithShareKey, +} from '@/features/security' +import { useShareContextOptional, type ShareContextValue } from '@/features/sharing' + +import { useKeyVaultStatus } from './useKeyVaultStatus' export type RealtimeStatus = 'connecting' | 'connected' | 'disconnected' @@ -21,7 +30,6 @@ export type UseCollaborativeDocumentOptions = { validateShareToken?: boolean loadMeta?: boolean trackAwareness?: boolean - disablePersistence?: boolean } type ConnectionCacheEntry = { @@ -38,29 +46,103 @@ const DOCUMENT_META_STALE_MS = 60 * 1000 function buildCollaborativeDocumentConnectionCacheKey(args: { documentId: string token: string | undefined - disablePersistence: boolean workspaceId: string | null | undefined }) { const workspaceScope = typeof args.workspaceId === 'string' ? args.workspaceId.trim() : '' - return `${args.documentId}::${args.token ?? ''}::ws:${workspaceScope}::p:${args.disablePersistence ? '0' : '1'}` + return `${args.documentId}::${args.token ?? ''}::ws:${workspaceScope}` } function buildCacheKey( documentId: string, token: string | undefined, - disablePersistence: boolean, workspaceId: string | null | undefined, ) { - return buildCollaborativeDocumentConnectionCacheKey({ documentId, token, disablePersistence, workspaceId }) + return buildCollaborativeDocumentConnectionCacheKey({ documentId, token, workspaceId }) +} + +/** Resolve share mode options by extracting share key from URL and decrypting DEK */ +async function resolveShareMode( + token: string, + queryClient: ReturnType, + documentId?: string, + shareCtx?: ShareContextValue | null, +): Promise { + // Try ShareContext first (available when navigating from folder share page) + let shareKey: Uint8Array | null = shareCtx?.shareKey ?? null + let encryptedDekBase64: string | null = null + + // Try to get encrypted DEK from ShareContext (folder share navigation) + if (shareCtx?.encryptedDeks && documentId) { + encryptedDekBase64 = shareCtx.encryptedDeks.get(documentId) ?? null + } + + // Fallback: extract share key from URL fragment (direct document share links) + if (!shareKey) { + const fragment = typeof window !== 'undefined' ? window.location.hash : '' + if (!fragment) { + return null + } + shareKey = await extractShareKeyFromFragment(fragment) + } + + if (!shareKey) { + return null + } + + // Fallback: fetch encrypted DEK from API if not in context + if (!encryptedDekBase64) { + const shareInfo = await queryClient.fetchQuery({ + queryKey: ['share-token', token], + queryFn: () => validateShareToken(token), + staleTime: SHARE_TOKEN_VALIDATION_STALE_MS, + }) + encryptedDekBase64 = shareInfo?.encryptedDek ?? null + } + + if (!encryptedDekBase64) { + // Document might not be encrypted or share key not stored + return null + } + + // For password-protected shares, salt/kdfParams would be present + // For URL fragment mode, we just have encryptedDek + // The nonce is stored together with the encrypted DEK (first 24 bytes) + // Try to decrypt - assume nonce is prepended to ciphertext (common pattern) + try { + const { getSodium } = await import('@/features/security') + const sodium = await getSodium() + const combined = sodium.from_base64(encryptedDekBase64, sodium.base64_variants.ORIGINAL) + + // XChaCha20-Poly1305 nonce is 24 bytes + const NONCE_LENGTH = 24 + if (combined.length <= NONCE_LENGTH) { + console.warn('[share] Encrypted DEK too short') + return null + } + + const nonce = combined.slice(0, NONCE_LENGTH) + const ciphertext = combined.slice(NONCE_LENGTH) + + const nonceBase64 = sodium.to_base64(nonce, sodium.base64_variants.ORIGINAL) + const ciphertextBase64 = sodium.to_base64(ciphertext, sodium.base64_variants.ORIGINAL) + + const dek = await decryptDekWithShareKey(ciphertextBase64, nonceBase64, shareKey) + + return { dek } + } catch (err) { + console.warn('[share] Failed to decrypt DEK with share key:', err) + return null + } } async function acquireConnection( documentId: string, token: string | undefined, - disablePersistence: boolean, workspaceId: string | null | undefined, + queryClient?: ReturnType, + shareCtx?: ShareContextValue | null, ) { - const cacheKey = buildCacheKey(documentId, token, disablePersistence, workspaceId) + const cacheKey = buildCacheKey(documentId, token, workspaceId) const existing = connectionCache.get(cacheKey) if (existing) { existing.refs += 1 @@ -68,11 +150,22 @@ async function acquireConnection( if (existing.promise) return { cacheKey, connection: await existing.promise } } + // For share token access, try to resolve share mode + let shareMode: YjsConnectionOptions['shareMode'] = undefined + if (token && queryClient) { + try { + shareMode = await resolveShareMode(token, queryClient, documentId, shareCtx) ?? undefined + } catch (err) { + console.warn('[share] Failed to resolve share mode:', err) + } + } + const entry: ConnectionCacheEntry = { refs: 1, connection: null, promise: null } entry.promise = createYjsConnection(documentId, { token: token ?? null, connect: false, - disablePersistence, + workspaceId: workspaceId ?? undefined, + shareMode, }) connectionCache.set(cacheKey, entry) try { @@ -80,9 +173,9 @@ async function acquireConnection( entry.connection = connection entry.promise = null return { cacheKey, connection } - } catch (error) { + } catch { connectionCache.delete(cacheKey) - throw error + throw new Error('Failed to create Yjs connection') } } @@ -102,13 +195,13 @@ export function useCollaborativeDocument( ) { const queryClient = useQueryClient() const { permissions, loading: authLoading, activeWorkspaceId } = useAuthContext() + const shareCtx = useShareContextOptional() const enabled = options.enabled ?? true const contributeToRealtimeContext = options.contributeToRealtimeContext ?? true const useUrlShareTokenFallback = options.useUrlShareTokenFallback ?? true const shouldValidateShareToken = options.validateShareToken ?? true const shouldLoadMeta = options.loadMeta ?? true const trackAwareness = options.trackAwareness ?? true - const disablePersistence = options.disablePersistence ?? !contributeToRealtimeContext const { setDocumentId: setRealtimeDocumentId, setDocumentTitle, @@ -123,11 +216,36 @@ export function useCollaborativeDocument( setOnlineUsers, userCount, } = useRealtime() + // KeyVault status check + const { keyVaultUnlocked, needsKeyVaultUnlock, retryKeyVaultCheck } = useKeyVaultStatus({ + enabled, + shareToken, + useUrlShareTokenFallback, + }) + + // Track if KeyVault is ready - once true, stays true (one-way transition) + // This prevents effect re-runs when keyVaultUnlocked changes from null to true + const [keyVaultReady, setKeyVaultReady] = React.useState(false) + React.useEffect(() => { + if (keyVaultUnlocked === true && !keyVaultReady) { + setKeyVaultReady(true) + } + }, [keyVaultUnlocked, keyVaultReady]) + + // Handle KeyVault lock state separately (show error if locked) + React.useEffect(() => { + if (needsKeyVaultUnlock) { + setError('Session locked. Please unlock to continue.') + } + }, [needsKeyVaultUnlock]) + const [status, setStatus] = React.useState('connecting') const [isReadOnly, setIsReadOnly] = React.useState(false) const [archived, setArchived] = React.useState(false) const [shareReadOnly, setShareReadOnly] = React.useState(false) const [error, setError] = React.useState(null) + const [doc, setDoc] = React.useState(null) + const [awareness, setAwareness] = React.useState(null) const connectionRef = React.useRef(null) const cacheKeyRef = React.useRef(null) @@ -241,11 +359,19 @@ export function useCollaborativeDocument( if (!enabled) { setStatus('disconnected') setError(null) - if (cacheKeyRef.current) { - releaseConnection(cacheKeyRef.current) - cacheKeyRef.current = null - } - connectionRef.current = null + return () => {} + } + + // Wait for KeyVault to be ready (keyVaultReady transitions from false to true only once) + if (!keyVaultReady) { + setStatus('connecting') + return () => {} + } + + // For authenticated access (no share token), require activeWorkspaceId + const urlShareToken = resolveShareToken(shareToken, useUrlShareTokenFallback) + if (!urlShareToken && !activeWorkspaceId) { + setStatus('connecting') return () => {} } @@ -265,9 +391,7 @@ export function useCollaborativeDocument( ;(async () => { try { - const urlShareToken = resolveShareToken(shareToken, useUrlShareTokenFallback) - - const acquired = await acquireConnection(id, urlShareToken ?? undefined, disablePersistence, activeWorkspaceId) + const acquired = await acquireConnection(id, urlShareToken ?? undefined, activeWorkspaceId, queryClient, shareCtx) if (cancelled) { releaseConnection(acquired.cacheKey) return @@ -277,6 +401,10 @@ export function useCollaborativeDocument( connectionRef.current = connection cleanupCacheKey = cacheKey + // Set doc and awareness state to trigger re-render + setDoc(connection.doc) + setAwareness(connection.provider.awareness) + const { provider } = connection cleanupProvider = provider @@ -306,17 +434,9 @@ export function useCollaborativeDocument( const isOnline = typeof navigator === 'undefined' ? true : navigator.onLine provider.shouldConnect = isOnline - const isProviderConnected = (() => { - const anyProvider = provider as any - if (typeof anyProvider?.wsconnected === 'boolean') return anyProvider.wsconnected - const ws = anyProvider?.ws - return Boolean(ws && typeof ws.readyState === 'number' && ws.readyState === 1) - })() if (!isOnline) { updateStatus('disconnected') - } else if (isProviderConnected) { - updateStatus('connected') } else { updateStatus('connecting') provider.connect() @@ -385,8 +505,7 @@ export function useCollaborativeDocument( } await loadMeta() - } catch (err) { - console.error('[collaboration] failed to initialise realtime session', id, err) + } catch { if (!cancelled) { setStatus('disconnected') setError('Failed to establish realtime connection. Please reload.') @@ -448,17 +567,20 @@ export function useCollaborativeDocument( setShareReadOnly(false) setIsReadOnly(false) setError(null) + setDoc(null) + setAwareness(null) } }, [ id, shareToken, loadMeta, contributeToRealtimeContext, - disablePersistence, enabled, useUrlShareTokenFallback, trackAwareness, activeWorkspaceId, + keyVaultReady, + shareCtx, ]) React.useEffect(() => { @@ -480,10 +602,12 @@ export function useCollaborativeDocument( status, isReadOnly, setIsReadOnly, - doc: connectionRef.current?.doc ?? null, - awareness: connectionRef.current?.provider.awareness ?? null, + doc, + awareness, error, archived, + needsKeyVaultUnlock, + retryKeyVaultCheck, } } diff --git a/app/src/features/edit-document/hooks/useEditorBinding.ts b/app/src/features/edit-document/hooks/useEditorBinding.ts new file mode 100644 index 00000000..5cbbeb85 --- /dev/null +++ b/app/src/features/edit-document/hooks/useEditorBinding.ts @@ -0,0 +1,129 @@ +import { Extension } from '@codemirror/state' +import { EditorView, ViewUpdate } from '@codemirror/view' +import { useCallback, useEffect, useMemo, useRef, useState } from 'react' +import { yCollab } from 'y-codemirror.next' +import type { Awareness } from 'y-protocols/awareness' +import type * as Y from 'yjs' + +export type UseEditorBindingParams = { + doc: Y.Doc + awareness: Awareness + onTextChange?: (text: string) => void + onCaretAtEnd?: (isAtEnd: boolean) => void +} + +export function useEditorBinding(params: UseEditorBindingParams) { + const { doc, awareness, onTextChange, onCaretAtEnd } = params + + const editorRef = useRef(null) + const [text, setText] = useState('') + const onTextChangeRef = useRef(onTextChange) + const onCaretAtEndRef = useRef(onCaretAtEnd) + + // Keep refs up to date + useEffect(() => { + onTextChangeRef.current = onTextChange + onCaretAtEndRef.current = onCaretAtEnd + }, [onTextChange, onCaretAtEnd]) + + // EOL normalization + useEffect(() => { + const metaMap = doc.getMap('__refmd_internal') + if (metaMap.get('eol') === 'lf') return + const ytext = doc.getText('content') + const current = ytext.toString() + const hasCR = current.includes('\r') + doc.transact(() => { + if (hasCR) { + const normalized = current.replace(/\r\n?/g, '\n') + ytext.delete(0, ytext.length) + ytext.insert(0, normalized) + } + metaMap.set('eol', 'lf') + }) + }, [doc]) + + // Observe Y.Text changes for external updates + useEffect(() => { + const ytext = doc.getText('content') + const update = () => { + const value = ytext.toString() + setText(value) + try { + onTextChangeRef.current?.(value) + } catch {} + } + update() + const observer = () => update() + ytext.observe(observer) + return () => { + try { + ytext.unobserve(observer) + } catch {} + } + }, [doc]) + + // Create update listener extension for tracking changes + const updateListenerExtension = useMemo((): Extension => { + return EditorView.updateListener.of((update: ViewUpdate) => { + if (update.docChanged) { + const value = update.state.doc.toString() + setText(value) + try { + onTextChangeRef.current?.(value) + } catch {} + + // Check if caret is at end of document + try { + const { head } = update.state.selection.main + const docLength = update.state.doc.length + const isAtEnd = head >= docLength - 1 + onCaretAtEndRef.current?.(isAtEnd) + } catch {} + } + }) + }, []) + + // Create yCollab extension + const collabExtension = useMemo((): Extension => { + const ytext = doc.getText('content') + return yCollab(ytext, awareness, { undoManager: false }) + }, [doc, awareness]) + + // Combined extensions for binding + const bindingExtensions = useMemo((): Extension[] => { + return [collabExtension, updateListenerExtension] + }, [collabExtension, updateListenerExtension]) + + // Set editor ref callback + const setEditorView = useCallback((view: EditorView | null) => { + editorRef.current = view + if (view) { + const value = view.state.doc.toString() + setText(value) + } + }, []) + + // Cleanup + useEffect(() => { + return () => { + editorRef.current = null + } + }, []) + + // Get initial Y.Text content for editor initialization + const getInitialContent = useCallback(() => { + const ytext = doc.getText('content') + return ytext.toString() + }, [doc]) + + return { + text, + editorRef, + bindingExtensions, + setEditorView, + getInitialContent, + } +} + +export type UseEditorBindingReturn = ReturnType diff --git a/app/src/features/edit-document/hooks/useEditorUploads.ts b/app/src/features/edit-document/hooks/useEditorUploads.ts index 6b695986..e71befdb 100644 --- a/app/src/features/edit-document/hooks/useEditorUploads.ts +++ b/app/src/features/edit-document/hooks/useEditorUploads.ts @@ -1,8 +1,11 @@ -import type * as monacoNs from 'monaco-editor' +import { EditorView } from '@codemirror/view' import { useCallback, useEffect, useRef, useState } from 'react' import { toast } from 'sonner' +import { addFileToMap, getExistingPaths } from '@/entities/file' + import { useEditorContext } from '@/features/edit-document/model/editor-context' +import { fetchDocumentKeys } from '@/features/security' export type UploadStatus = | { state: 'idle'; total: 0; completed: 0 } @@ -10,7 +13,12 @@ export type UploadStatus = | { state: 'success'; total: number; completed: number } | { state: 'error'; total: number; completed: number; failed: number } -export function useEditorUploads(documentId: string, readOnly?: boolean, onReadOnlyAttempt?: () => void) { +export function useEditorUploads( + documentId: string, + workspaceId: string | null | undefined, + readOnly?: boolean, + onReadOnlyAttempt?: () => void +) { const { editor } = useEditorContext() const [uploadStatus, setUploadStatus] = useState({ state: 'idle', total: 0, completed: 0 }) const resetTimeoutRef = useRef | null>(null) @@ -38,7 +46,15 @@ export function useEditorUploads(documentId: string, readOnly?: boolean, onReadO return } if (!files?.length) return + if (!workspaceId) { + toast.error('No workspace selected') + return + } const { uploadAttachment } = await import('@/entities/file') + + // Fetch DEK first + const { dek } = await fetchDocumentKeys(documentId, workspaceId) + let completed = 0 let failed = 0 setUploadStatus({ state: 'uploading', total: files.length, completed: 0, currentFile: files[0]?.name }) @@ -50,32 +66,49 @@ export function useEditorUploads(documentId: string, readOnly?: boolean, onReadO currentFile: f.name, }) try { - const resp = await uploadAttachment(documentId, f) - const name: string = (resp as any).filename || f.name - const ed = editor - if (ed) { - const selection = ed.getSelection() as monacoNs.Selection | null - let targetRange: monacoNs.IRange | null = selection || null - if (!targetRange) { + // Get existing paths for collision detection (await for file map init) + const existingPaths = await getExistingPaths(documentId) + + const resp = await uploadAttachment(documentId, f, { + dek, + existingPaths, + }) + + // Use logicalPath from response (handles collision with -2, -3 suffix) + const logicalPath = resp.logicalPath + + // Add to file map for rendering + addFileToMap(documentId, { + fileId: resp.id, + logicalPath, + filename: resp.originalFilename, + mimeType: resp.mimeType, + }) + + const view = editor as EditorView | null + if (view) { + const { from, to } = view.state.selection.main + let targetFrom = from + let targetTo = to + + // If no selection, insert at end of document + if (from === to && from === 0) { try { - const model = ed.getModel() - if (model) { - const lastLine = model.getLineCount() - const lastCol = model.getLineMaxColumn(lastLine) - targetRange = { - startLineNumber: lastLine, - startColumn: lastCol, - endLineNumber: lastLine, - endColumn: lastCol, - } - } + const docLength = view.state.doc.length + targetFrom = docLength + targetTo = docLength } catch {} } - if (!targetRange) continue - const rel = `./attachments/${(resp as any).filename || f.name}` - const text = f.type.startsWith('image/') ? `![${name}](${rel})` : `[${name}](${rel})` - ed.executeEdits('insertUpload', [{ range: targetRange, text, forceMoveMarkers: true }]) - ed.focus() + + // Use logicalPath (original filename based) for Markdown + const rel = `./${logicalPath}` + const displayName = resp.originalFilename + const text = f.type.startsWith('image/') ? `![${displayName}](${rel})` : `[${displayName}](${rel})` + view.dispatch({ + changes: { from: targetFrom, to: targetTo, insert: text }, + selection: { anchor: targetFrom + text.length }, + }) + view.focus() } completed += 1 } catch { @@ -93,7 +126,7 @@ export function useEditorUploads(documentId: string, readOnly?: boolean, onReadO } else { setUploadStatus({ state: 'idle', total: 0, completed: 0 }) } - }, [documentId, editor, onReadOnlyAttempt, readOnly, scheduleReset]) + }, [documentId, workspaceId, editor, onReadOnlyAttempt, readOnly, scheduleReset]) return { uploadFiles, uploadStatus } } diff --git a/app/src/features/edit-document/hooks/useKeyVaultStatus.ts b/app/src/features/edit-document/hooks/useKeyVaultStatus.ts new file mode 100644 index 00000000..d7bddaac --- /dev/null +++ b/app/src/features/edit-document/hooks/useKeyVaultStatus.ts @@ -0,0 +1,160 @@ +import * as React from 'react' + +import { useAuthContext } from '@/features/auth' +import { getKeyManager, SessionLockedError, useKeyVault } from '@/features/security' + +function normalizeShareToken(token?: string | null): string | undefined { + if (typeof token !== 'string') return undefined + const trimmed = token.trim() + return trimmed.length > 0 ? trimmed : undefined +} + +function resolveShareToken(explicitToken: string | undefined, useUrlFallback: boolean): string | undefined { + const normalized = normalizeShareToken(explicitToken) + if (normalized) return normalized + + if (typeof window === 'undefined') return undefined + if (!useUrlFallback) return undefined + + try { + const candidate = new URLSearchParams(window.location.search).get('token') + return normalizeShareToken(candidate) + } catch { + return undefined + } +} + +export type UseKeyVaultStatusOptions = { + enabled: boolean + shareToken?: string + useUrlShareTokenFallback: boolean +} + +export type UseKeyVaultStatusReturn = { + /** null = checking, true = unlocked or not needed, false = locked */ + keyVaultUnlocked: boolean | null + /** true if user needs to unlock KeyVault to continue */ + needsKeyVaultUnlock: boolean + /** Call after user unlocks to retry the check */ + retryKeyVaultCheck: () => void +} + +/** + * Hook to check KeyVault unlock status before establishing connections. + * Returns whether KeyVault is unlocked or if unlock is required. + */ +export function useKeyVaultStatus(options: UseKeyVaultStatusOptions): UseKeyVaultStatusReturn { + const { enabled, shareToken, useUrlShareTokenFallback } = options + const { user, loading: authLoading } = useAuthContext() + const { needsRestore, loading: keyVaultLoading } = useKeyVault() + + const [keyVaultUnlocked, setKeyVaultUnlocked] = React.useState(null) + const [needsKeyVaultUnlock, setNeedsKeyVaultUnlock] = React.useState(false) + const [keyVaultCheckKey, setKeyVaultCheckKey] = React.useState(0) + + const retryKeyVaultCheck = React.useCallback(() => { + setKeyVaultUnlocked(null) + setNeedsKeyVaultUnlock(false) + setKeyVaultCheckKey((k) => k + 1) + }, []) + + React.useEffect(() => { + if (!enabled) { + setKeyVaultUnlocked(null) + setNeedsKeyVaultUnlock(false) + return + } + + // Share token access doesn't require KeyVault unlock (share keys handle decryption) + const token = resolveShareToken(shareToken, useUrlShareTokenFallback) + if (token) { + setKeyVaultUnlocked(true) + setNeedsKeyVaultUnlock(false) + return + } + + // Wait for auth and KeyVault context to load + if (authLoading || keyVaultLoading) { + setKeyVaultUnlocked(null) + setNeedsKeyVaultUnlock(false) + return + } + + // User not authenticated - don't attempt connection + if (!user) { + setKeyVaultUnlocked(null) + setNeedsKeyVaultUnlock(false) + return + } + + // Reset state to pending before async check + setKeyVaultUnlocked(null) + setNeedsKeyVaultUnlock(false) + + let cancelled = false + ;(async () => { + try { + const keyManager = getKeyManager() + await keyManager.initialize() + + // Check if encryption is set up for this user + const hasKeys = await keyManager.hasKeys() + + if (!hasKeys) { + // No local keys - check if we need to restore from server + // Skip needsRestore check if this is a retry (keyVaultCheckKey > 0) + // because the user just completed restore and context might not be updated yet + if (keyVaultCheckKey === 0 && needsRestore) { + // First check and needs restore - show unlock prompt + if (!cancelled) { + setKeyVaultUnlocked(false) + setNeedsKeyVaultUnlock(true) + } + return + } + // Encryption not set up or just restored - allow connection + if (!cancelled) { + setKeyVaultUnlocked(true) + setNeedsKeyVaultUnlock(false) + } + return + } + + // Encryption is set up - check if session is unlocked + if (keyManager.isUnlocked) { + if (!cancelled) { + setKeyVaultUnlocked(true) + setNeedsKeyVaultUnlock(false) + } + } else { + // Session locked - need unlock + if (!cancelled) { + setKeyVaultUnlocked(false) + setNeedsKeyVaultUnlock(true) + } + } + } catch (err) { + if (cancelled) return + if (err instanceof SessionLockedError) { + setKeyVaultUnlocked(false) + setNeedsKeyVaultUnlock(true) + } else { + // Other errors - allow connection attempt + setKeyVaultUnlocked(true) + setNeedsKeyVaultUnlock(false) + } + } + })() + + return () => { + cancelled = true + } + }, [enabled, shareToken, useUrlShareTokenFallback, authLoading, keyVaultLoading, user, needsRestore, keyVaultCheckKey]) + + return { + keyVaultUnlocked, + needsKeyVaultUnlock, + retryKeyVaultCheck, + } +} + diff --git a/app/src/features/edit-document/hooks/useMarkdownCommands.ts b/app/src/features/edit-document/hooks/useMarkdownCommands.ts index 932ccf33..7e3b1183 100644 --- a/app/src/features/edit-document/hooks/useMarkdownCommands.ts +++ b/app/src/features/edit-document/hooks/useMarkdownCommands.ts @@ -1,4 +1,4 @@ -import type * as monacoNs from 'monaco-editor' +import { EditorView } from '@codemirror/view' import { useCallback } from 'react' export type MarkdownCommand = @@ -16,49 +16,46 @@ export type MarkdownCommand = | 'link' export function useMarkdownCommands( - editorRef: React.MutableRefObject, + editorRef: React.MutableRefObject, ) { const applyEdit = useCallback( - (fn: (editor: monacoNs.editor.IStandaloneCodeEditor) => void) => { - const editor = editorRef.current - if (!editor) return - fn(editor) + (fn: (view: EditorView) => void) => { + const view = editorRef.current + if (!view) return + fn(view) }, [editorRef], ) const insertAround = useCallback( (start: string, end: string = start) => - applyEdit((editor) => { - const selection = editor.getSelection() - if (!selection) return - const model = editor.getModel() - if (!model) return - const selected = model.getValueInRange(selection) - editor.executeEdits('insertAround', [ - { range: selection, text: `${start}${selected}${end}`, forceMoveMarkers: true }, - ]) - editor.focus() + applyEdit((view) => { + const { from, to } = view.state.selection.main + const selected = view.state.sliceDoc(from, to) + view.dispatch({ + changes: { from, to, insert: `${start}${selected}${end}` }, + selection: { anchor: from + start.length, head: to + start.length }, + }) + view.focus() }), [applyEdit], ) const prefixLines = useCallback( (prefix: string) => - applyEdit((editor) => { - const selection = editor.getSelection() - if (!selection) return - const model = editor.getModel() - if (!model) return - const startLine = selection.startLineNumber - const endLine = selection.endLineNumber - const edits: monacoNs.editor.IIdentifiedSingleEditOperation[] = [] - for (let line = startLine; line <= endLine; line += 1) { - const range = new (window as any).monaco.Range(line, 1, line, 1) - edits.push({ range, text: prefix }) + applyEdit((view) => { + const { from, to } = view.state.selection.main + const startLine = view.state.doc.lineAt(from) + const endLine = view.state.doc.lineAt(to) + const changes: { from: number; insert: string }[] = [] + + for (let lineNum = startLine.number; lineNum <= endLine.number; lineNum++) { + const line = view.state.doc.line(lineNum) + changes.push({ from: line.from, insert: prefix }) } - editor.executeEdits('prefixLines', edits) - editor.focus() + + view.dispatch({ changes }) + view.focus() }), [applyEdit], ) @@ -83,54 +80,47 @@ export function useMarkdownCommands( case 'quote': return prefixLines('> ') case 'code': - return applyEdit((editor) => { - const selection = editor.getSelection() - if (!selection) return - const model = editor.getModel() - if (!model) return - const text = model.getValueInRange(selection) + return applyEdit((view) => { + const { from, to } = view.state.selection.main + const text = view.state.sliceDoc(from, to) if (!text.includes('\n')) { - editor.executeEdits('codeInline', [ - { range: selection, text: `\`${text}\``, forceMoveMarkers: true }, - ]) + view.dispatch({ + changes: { from, to, insert: `\`${text}\`` }, + selection: { anchor: from + 1, head: to + 1 }, + }) } else { - editor.executeEdits('codeBlock', [ - { - range: selection, - text: `\n\n\`\`\`\n${text}\n\`\`\`\n\n`, - forceMoveMarkers: true, - }, - ]) + view.dispatch({ + changes: { from, to, insert: `\n\n\`\`\`\n${text}\n\`\`\`\n\n` }, + }) } + view.focus() }) case 'table': - return applyEdit((editor) => { - const selection = editor.getSelection() - if (!selection) return + return applyEdit((view) => { + const { from, to } = view.state.selection.main const snippet = '\n\n| Header 1 | Header 2 |\n| --- | --- |\n| Cell 1 | Cell 2 |\n\n' - editor.executeEdits('table', [ - { range: selection, text: snippet, forceMoveMarkers: true }, - ]) + view.dispatch({ + changes: { from, to, insert: snippet }, + }) + view.focus() }) case 'horizontal-rule': - return applyEdit((editor) => { - const selection = editor.getSelection() - if (!selection) return - editor.executeEdits('hr', [ - { range: selection, text: '\n\n---\n\n', forceMoveMarkers: true }, - ]) + return applyEdit((view) => { + const { from, to } = view.state.selection.main + view.dispatch({ + changes: { from, to, insert: '\n\n---\n\n' }, + }) + view.focus() }) case 'link': - return applyEdit((editor) => { - const selection = editor.getSelection() - if (!selection) return - const model = editor.getModel() - if (!model) return - const text = model.getValueInRange(selection) || 'text' + return applyEdit((view) => { + const { from, to } = view.state.selection.main + const text = view.state.sliceDoc(from, to) || 'text' const url = prompt('URL?') || 'https://' - editor.executeEdits('link', [ - { range: selection, text: `[${text}](${url})`, forceMoveMarkers: true }, - ]) + view.dispatch({ + changes: { from, to, insert: `[${text}](${url})` }, + }) + view.focus() }) default: return undefined diff --git a/app/src/features/edit-document/hooks/useMonacoBinding.ts b/app/src/features/edit-document/hooks/useMonacoBinding.ts deleted file mode 100644 index 71973245..00000000 --- a/app/src/features/edit-document/hooks/useMonacoBinding.ts +++ /dev/null @@ -1,106 +0,0 @@ -import type { OnMount } from '@monaco-editor/react' -import type * as monacoNs from 'monaco-editor' -import { useCallback, useEffect, useRef, useState } from 'react' -import { MonacoBinding } from 'y-monaco' -import type { Awareness } from 'y-protocols/awareness' -import type * as Y from 'yjs' - -export type UseMonacoBindingParams = { - doc: Y.Doc - awareness: Awareness - language?: string - onTextChange?: (text: string) => void -} - -export function useMonacoBinding(params: UseMonacoBindingParams) { - const { doc, awareness, language = 'markdown', onTextChange } = params - - const editorRef = useRef(null) - const modelRef = useRef(null) - const bindingRef = useRef(null) - const [text, setText] = useState('') - useEffect(() => { - const metaMap = doc.getMap('__refmd_internal') - if (metaMap.get('eol') === 'lf') return - const ytext = doc.getText('content') - const current = ytext.toString() - const hasCR = current.includes('\r') - doc.transact(() => { - if (hasCR) { - const normalized = current.replace(/\r\n?/g, '\n') - ytext.delete(0, ytext.length) - ytext.insert(0, normalized) - } - metaMap.set('eol', 'lf') - }) - }, [doc]) - - const onMount: OnMount = useCallback((editor, monaco) => { - editorRef.current = editor - const model = monaco.editor.createModel('', language) - model.setEOL(monaco.editor.EndOfLineSequence.LF) - editor.setModel(model) - modelRef.current = model - - const ytext = doc.getText('content') - const editors = new Set([editor]) - bindingRef.current = new MonacoBinding(ytext, model, editors, awareness) - - const sub = model.onDidChangeContent(() => { - const v = model.getValue() - setText(v) - try { - if (typeof onTextChange === 'function') onTextChange(v) - // Support wiring after hook init (Editor side) - const anyMount = onMount as any - if (typeof anyMount._onTextChange === 'function') anyMount._onTextChange(v) - // Notify caret-at-end status for scroll lock logic - try { - const ed = editorRef.current - const pos = ed?.getPosition?.() - const lineCount = model.getLineCount() - const isAtEnd = !!pos && pos.lineNumber >= lineCount - if (typeof anyMount._onCaretAtEnd === 'function') anyMount._onCaretAtEnd(isAtEnd) - } catch {} - } catch {} - }) - setText(model.getValue()) - - ;(editor as any).__disposeChange = () => { - try { sub.dispose() } catch {} - } - }, [doc, awareness, language, onTextChange]) - - useEffect(() => { - const ytext = doc.getText('content') - const update = () => { - const value = ytext.toString() - setText(value) - try { onTextChange?.(value) } catch {} - } - update() - const observer = () => update() - ytext.observe(observer) - return () => { try { ytext.unobserve(observer) } catch {} } - }, [doc, onTextChange]) - - useEffect(() => { - return () => { - try { bindingRef.current?.destroy?.() } catch {} - try { modelRef.current?.dispose?.() } catch {} - bindingRef.current = null - modelRef.current = null - editorRef.current = null - } - }, []) - - return { - onMount, - text, - editorRef, - modelRef, - bindingRef, - } -} - -export type UseMonacoBindingReturn = ReturnType diff --git a/app/src/features/edit-document/hooks/useScrollSync.ts b/app/src/features/edit-document/hooks/useScrollSync.ts index bc604775..764498b9 100644 --- a/app/src/features/edit-document/hooks/useScrollSync.ts +++ b/app/src/features/edit-document/hooks/useScrollSync.ts @@ -1,11 +1,7 @@ -import type * as monacoNs from 'monaco-editor' +import { EditorView } from '@codemirror/view' import { useCallback, useRef, useState } from 'react' -function isMonacoDisposedError(error: unknown) { - return error instanceof Error && /InstantiationService has been disposed/i.test(error.message) -} - -export function useScrollSync(editorRef: React.MutableRefObject) { +export function useScrollSync(editorRef: React.MutableRefObject) { const isSyncingRef = useRef(false) const [previewScrollPct, setPreviewScrollPct] = useState(undefined) const [previewAnchorLine, setPreviewAnchorLine] = useState(undefined) @@ -17,85 +13,88 @@ export function useScrollSync(editorRef: React.MutableRefObject(null) - const handleEditorScroll = useCallback((e: any) => { - const ed = editorRef.current - if (!ed) return + const handleEditorScroll = useCallback(() => { + const view = editorRef.current + if (!view) return if (isSyncingRef.current) return if (rafRef.current != null) cancelAnimationFrame(rafRef.current) + rafRef.current = requestAnimationFrame(() => { try { + const scrollDOM = view.scrollDOM + const scrollTop = scrollDOM.scrollTop + const scrollHeight = scrollDOM.scrollHeight + const clientHeight = scrollDOM.clientHeight + const denom = Math.max(1, scrollHeight - clientHeight) + const top = scrollTop + + const prevDenom = prevDenomRef.current || denom + const prevTop = prevTopRef.current || 0 + + // Heuristic: if content height grew but scrollTop barely changed, + // treat this as content insertion (not user scroll) and anchor + // preview percentage to previous denominator to avoid upward drift. + const denomIncreased = denom > prevDenom + 0.5 + const topUnchanged = Math.abs(top - prevTop) <= 2 + const baselineDenom = denomIncreased && topUnchanged ? prevDenom : denom + + // Determine if editor was pinned to bottom as of previous metrics. + const distFromBottomPrev = Math.max(0, prevDenom - prevTop) + const pinnedPrev = distFromBottomPrev <= 4 + pinnedEditorBottomRef.current = pinnedPrev + + const now = Date.now() + const locked = lockUntilRef.current > now + + // Visible top line for source-anchored sync + let topLine: number | undefined try { - const height = ed.getScrollHeight?.() ?? 0 - const viewHeight = ed.getLayoutInfo?.().height ?? 0 - const denom = Math.max(1, height - viewHeight) - const top = e?.scrollTop ?? ed.getScrollTop() - const prevDenom = prevDenomRef.current || denom - const prevTop = prevTopRef.current || 0 - - // Heuristic: if content height grew but scrollTop barely changed, - // treat this as content insertion (not user scroll) and anchor - // preview percentage to previous denominator to avoid upward drift. - const denomIncreased = denom > prevDenom + 0.5 - const topUnchanged = Math.abs(top - prevTop) <= 2 - const baselineDenom = (denomIncreased && topUnchanged) ? prevDenom : denom - - // Determine if editor was pinned to bottom as of previous metrics. - const distFromBottomPrev = Math.max(0, prevDenom - prevTop) - const pinnedPrev = distFromBottomPrev <= 4 - pinnedEditorBottomRef.current = pinnedPrev - const now = Date.now() - const locked = lockUntilRef.current > now - // Visible top line for source-anchored sync - let topLine: number | undefined - try { - const vrs = (ed as any).getVisibleRanges?.() || [] - if (vrs && vrs.length > 0) topLine = vrs[0].startLineNumber - else topLine = (ed as any).getPosition?.()?.lineNumber - } catch {} - - const pct = (pinnedPrev || locked) - ? 1 - : Math.min(1, Math.max(0, top / baselineDenom)) - - // Prefer anchor-line when not pinned/locked; else rely on bottom lock - if (pinnedPrev || locked) setPreviewAnchorLine(undefined) - else if (typeof topLine === 'number' && Number.isFinite(topLine)) setPreviewAnchorLine(topLine) - else setPreviewAnchorLine(undefined) - prevDenomRef.current = denom - prevTopRef.current = top - isSyncingRef.current = true - setPreviewScrollPct(pct) - } catch (error) { - if (isMonacoDisposedError(error)) return - throw error - } + const block = view.lineBlockAtHeight(scrollTop) + topLine = view.state.doc.lineAt(block.from).number + } catch {} + + const pct = + pinnedPrev || locked ? 1 : Math.min(1, Math.max(0, top / baselineDenom)) + + // Prefer anchor-line when not pinned/locked; else rely on bottom lock + if (pinnedPrev || locked) setPreviewAnchorLine(undefined) + else if (typeof topLine === 'number' && Number.isFinite(topLine)) + setPreviewAnchorLine(topLine) + else setPreviewAnchorLine(undefined) + + prevDenomRef.current = denom + prevTopRef.current = top + isSyncingRef.current = true + setPreviewScrollPct(pct) } finally { - setTimeout(() => { isSyncingRef.current = false }, 0) + setTimeout(() => { + isSyncingRef.current = false + }, 0) rafRef.current = null } }) }, [editorRef]) - const handlePreviewScroll = useCallback((pct: number) => { - const ed = editorRef.current - if (!ed) return - if (isSyncingRef.current) return - try { - isSyncingRef.current = true - const height = ed.getScrollHeight?.() ?? 0 - const viewHeight = ed.getLayoutInfo?.().height ?? 0 - const denom = Math.max(1, height - viewHeight) - const target = pct >= 0.999 ? denom : Math.round(denom * pct) + const handlePreviewScroll = useCallback( + (pct: number) => { + const view = editorRef.current + if (!view) return + if (isSyncingRef.current) return + try { - ed.setScrollTop(target) - } catch (error) { - if (isMonacoDisposedError(error)) return - throw error + isSyncingRef.current = true + const scrollDOM = view.scrollDOM + const scrollHeight = scrollDOM.scrollHeight + const clientHeight = scrollDOM.clientHeight + const denom = Math.max(1, scrollHeight - clientHeight) + const target = pct >= 0.999 ? denom : Math.round(denom * pct) + scrollDOM.scrollTop = target + } finally { + isSyncingRef.current = false } - } finally { - isSyncingRef.current = false - } - }, [editorRef]) + }, + [editorRef], + ) const onEditorContentChange = useCallback(() => { if (pinnedEditorBottomRef.current) { @@ -119,5 +118,13 @@ export function useScrollSync(editorRef: React.MutableRefObject() + const decorations: Array<{ from: number; to: number; decoration: Decoration }> = [] + + awareness.getStates().forEach((state: AwarenessState, clientId: number) => { + if (clientId === localClientId) return + if (!state.cursor) return + + const { anchor, head } = state.cursor + const { light } = getColorForClient(clientId) + const userLight = state.user?.colorLight || light + + // Selection decoration + const from = Math.min(anchor, head) + const to = Math.max(anchor, head) + + if (from !== to) { + decorations.push({ + from, + to, + decoration: Decoration.mark({ + class: 'cm-yjs-selection', + attributes: { + style: `background-color: ${userLight};`, + }, + }), + }) + } + + // Cursor widget at head position + decorations.push({ + from: head, + to: head, + decoration: Decoration.widget({ + widget: new CursorWidget(state.user || {}, clientId), + side: 1, + }), + }) + }) + + // Sort by position for RangeSetBuilder + decorations.sort((a, b) => a.from - b.from || a.to - b.to) + + for (const { from, to, decoration } of decorations) { + builder.add(from, to, decoration) + } + + return builder.finish() +} + +export function awarenessExtension(awareness: Awareness): Extension { + const localClientId = awareness.clientID + + return ViewPlugin.fromClass( + class { + decorations: DecorationSet + + constructor(_view: EditorView) { + this.decorations = createCursorDecorations(awareness, localClientId) + } + + update(_update: ViewUpdate) { + this.decorations = createCursorDecorations(awareness, localClientId) + } + }, + { + decorations: (v) => v.decorations, + } + ) +} + +export function awarenessStyles(): Extension { + return EditorView.baseTheme({ + '.cm-yjs-cursor': { + position: 'relative', + }, + '.cm-yjs-cursor-label': { + fontFamily: 'system-ui, -apple-system, sans-serif', + }, + '.cm-yjs-selection': { + mixBlendMode: 'multiply', + }, + '.dark .cm-yjs-selection': { + mixBlendMode: 'screen', + }, + }) +} diff --git a/app/src/features/edit-document/lib/editor/index.ts b/app/src/features/edit-document/lib/editor/index.ts new file mode 100644 index 00000000..ff724f55 --- /dev/null +++ b/app/src/features/edit-document/lib/editor/index.ts @@ -0,0 +1,84 @@ +import { closeBrackets, closeBracketsKeymap, completionKeymap } from '@codemirror/autocomplete' +import { defaultKeymap, history, historyKeymap, indentWithTab } from '@codemirror/commands' +import { markdown, markdownLanguage } from '@codemirror/lang-markdown' +import { bracketMatching, indentOnInput, foldGutter, foldKeymap } from '@codemirror/language' +import { languages } from '@codemirror/language-data' +import { searchKeymap, highlightSelectionMatches } from '@codemirror/search' +import { Extension, EditorState, Compartment } from '@codemirror/state' +import { EditorView, keymap, lineNumbers, highlightActiveLine, highlightActiveLineGutter, drawSelection, dropCursor, rectangularSelection, crosshairCursor, highlightSpecialChars } from '@codemirror/view' + +import { refmdLightTheme, refmdDarkTheme } from './theme' +import { getVimPlaceholder } from './vim' +import { wikiLinkExtension } from './wiki-link' + +export interface EditorConfig { + isDarkMode: boolean + readOnly: boolean + vimMode: boolean + isMobile: boolean + lineWrapping?: boolean +} + +export const themeCompartment = new Compartment() +export const readOnlyCompartment = new Compartment() +export const collabCompartment = new Compartment() + +export function createBaseExtensions(config: EditorConfig): Extension[] { + const theme = config.isDarkMode ? refmdDarkTheme : refmdLightTheme + + const extensions: Extension[] = [ + highlightSpecialChars(), + history(), + drawSelection(), + dropCursor(), + EditorState.allowMultipleSelections.of(true), + indentOnInput(), + bracketMatching(), + closeBrackets(), + rectangularSelection(), + crosshairCursor(), + highlightActiveLine(), + highlightActiveLineGutter(), + highlightSelectionMatches(), + keymap.of([ + ...closeBracketsKeymap, + ...defaultKeymap, + ...searchKeymap, + ...historyKeymap, + ...foldKeymap, + ...completionKeymap, + indentWithTab, + ]), + markdown({ + base: markdownLanguage, + codeLanguages: languages, + }), + wikiLinkExtension(), + themeCompartment.of(theme), + readOnlyCompartment.of(EditorState.readOnly.of(config.readOnly)), + getVimPlaceholder(), + ] + + if (!config.isMobile) { + extensions.push(lineNumbers()) + extensions.push(foldGutter()) + } + + if (config.lineWrapping !== false) { + extensions.push(EditorView.lineWrapping) + } + + return extensions +} + +export function createEditorExtensions(config: EditorConfig): Extension[] { + return createBaseExtensions(config) +} + +export function getThemeExtension(isDarkMode: boolean): Extension { + return isDarkMode ? refmdDarkTheme : refmdLightTheme +} + +export { refmdLightTheme, refmdDarkTheme } from './theme' +export { vimCompartment, enableVimMode, disableVimMode, toggleVimMode } from './vim' +export { wikiLinkExtension } from './wiki-link' diff --git a/app/src/features/edit-document/lib/editor/keymaps.ts b/app/src/features/edit-document/lib/editor/keymaps.ts new file mode 100644 index 00000000..acf9ac2a --- /dev/null +++ b/app/src/features/edit-document/lib/editor/keymaps.ts @@ -0,0 +1,175 @@ +import { Extension } from '@codemirror/state' +import { keymap, KeyBinding } from '@codemirror/view' +import { EditorView } from '@codemirror/view' + +export type MarkdownAction = + | 'bold' + | 'italic' + | 'strikethrough' + | 'code' + | 'link' + | 'heading1' + | 'heading2' + | 'heading3' + | 'bulletList' + | 'numberedList' + | 'taskList' + | 'blockquote' + | 'codeBlock' + | 'horizontalRule' + +export interface MarkdownKeymapConfig { + onAction?: (action: MarkdownAction) => void +} + +function wrapSelection(view: EditorView, before: string, after: string): boolean { + const { from, to } = view.state.selection.main + const selectedText = view.state.sliceDoc(from, to) + + view.dispatch({ + changes: { + from, + to, + insert: `${before}${selectedText}${after}`, + }, + selection: { + anchor: from + before.length, + head: to + before.length, + }, + }) + view.focus() + return true +} + +function insertAtLineStart(view: EditorView, prefix: string): boolean { + const line = view.state.doc.lineAt(view.state.selection.main.head) + const content = view.state.sliceDoc(line.from, line.to) + + view.dispatch({ + changes: { + from: line.from, + to: line.to, + insert: `${prefix}${content}`, + }, + selection: { anchor: line.from + prefix.length + content.length }, + }) + view.focus() + return true +} + +export function createMarkdownKeymap(config?: MarkdownKeymapConfig): Extension { + const bindings: KeyBinding[] = [ + { + key: 'Mod-b', + run: (view) => { + config?.onAction?.('bold') + return wrapSelection(view, '**', '**') + }, + }, + { + key: 'Mod-i', + run: (view) => { + config?.onAction?.('italic') + return wrapSelection(view, '_', '_') + }, + }, + { + key: 'Mod-Shift-s', + run: (view) => { + config?.onAction?.('strikethrough') + return wrapSelection(view, '~~', '~~') + }, + }, + { + key: 'Mod-e', + run: (view) => { + config?.onAction?.('code') + return wrapSelection(view, '`', '`') + }, + }, + { + key: 'Mod-k', + run: (view) => { + config?.onAction?.('link') + const { from, to } = view.state.selection.main + const selectedText = view.state.sliceDoc(from, to) + + if (selectedText) { + view.dispatch({ + changes: { + from, + to, + insert: `[${selectedText}](url)`, + }, + selection: { anchor: from + selectedText.length + 3, head: from + selectedText.length + 6 }, + }) + } else { + view.dispatch({ + changes: { + from, + insert: '[](url)', + }, + selection: { anchor: from + 1 }, + }) + } + view.focus() + return true + }, + }, + { + key: 'Mod-1', + run: (view) => { + config?.onAction?.('heading1') + return insertAtLineStart(view, '# ') + }, + }, + { + key: 'Mod-2', + run: (view) => { + config?.onAction?.('heading2') + return insertAtLineStart(view, '## ') + }, + }, + { + key: 'Mod-3', + run: (view) => { + config?.onAction?.('heading3') + return insertAtLineStart(view, '### ') + }, + }, + { + key: 'Mod-Shift-8', + run: (view) => { + config?.onAction?.('bulletList') + return insertAtLineStart(view, '- ') + }, + }, + { + key: 'Mod-Shift-7', + run: (view) => { + config?.onAction?.('numberedList') + return insertAtLineStart(view, '1. ') + }, + }, + { + key: 'Mod-Shift-9', + run: (view) => { + config?.onAction?.('taskList') + return insertAtLineStart(view, '- [ ] ') + }, + }, + { + key: 'Mod-Shift-.', + run: (view) => { + config?.onAction?.('blockquote') + return insertAtLineStart(view, '> ') + }, + }, + ] + + return keymap.of(bindings) +} + +export function createCustomKeymap(bindings: KeyBinding[]): Extension { + return keymap.of(bindings) +} diff --git a/app/src/features/edit-document/lib/editor/theme.ts b/app/src/features/edit-document/lib/editor/theme.ts new file mode 100644 index 00000000..23d63628 --- /dev/null +++ b/app/src/features/edit-document/lib/editor/theme.ts @@ -0,0 +1,203 @@ +import { HighlightStyle, syntaxHighlighting } from '@codemirror/language' +import { Extension } from '@codemirror/state' +import { EditorView } from '@codemirror/view' +import { tags } from '@lezer/highlight' + +type Palette = { + primary: string + background: string + foreground: string + mutedForeground: string + codeBlockBg: string + codeBlockFg: string +} + +const LIGHT_PALETTE: Palette = { + primary: '#6e63d6', + background: '#ffffff', + foreground: '#252a33', + mutedForeground: '#596272', + codeBlockBg: '#fafafa', + codeBlockFg: '#24292e', +} + +const DARK_PALETTE: Palette = { + primary: '#8f86e8', + background: '#1e1e1e', + foreground: '#e4e7eb', + mutedForeground: '#9aa1b0', + codeBlockBg: '#242424', + codeBlockFg: '#f3f4f6', +} + +const hexToRgb = (hex: string) => { + const h = hex.replace('#', '') + const bigint = parseInt(h, 16) + const r = (bigint >> 16) & 255 + const g = (bigint >> 8) & 255 + const b = bigint & 255 + return { r, g, b } +} + +const hexWithAlpha = (hex: string, alpha: number) => { + const { r, g, b } = hexToRgb(hex) + return `rgba(${r}, ${g}, ${b}, ${alpha})` +} + +const mixHexWithWhite = (hex: string, weight: number) => { + const { r, g, b } = hexToRgb(hex) + const w = Math.min(Math.max(weight, 0), 1) + const mix = (c: number) => Math.round(c + (255 - c) * w) + const pad = (n: number) => n.toString(16).padStart(2, '0') + return `#${pad(mix(r))}${pad(mix(g))}${pad(mix(b))}` +} + +function buildTheme(palette: Palette, isDark: boolean): Extension { + const softPrimary = mixHexWithWhite(palette.primary, isDark ? 0.3 : 0.55) + const linkColor = mixHexWithWhite(palette.primary, isDark ? 0.2 : 0.35) + + const theme = EditorView.theme( + { + '&': { + color: palette.foreground, + backgroundColor: palette.background, + height: '100%', + }, + '.cm-content': { + caretColor: palette.primary, + fontFamily: 'ui-monospace, SFMono-Regular, "SF Mono", Menlo, Consolas, "Liberation Mono", monospace', + fontSize: '14px', + lineHeight: '1.6', + }, + '.cm-cursor, .cm-dropCursor': { + borderLeftColor: palette.primary, + borderLeftWidth: '2px', + }, + '&.cm-focused .cm-selectionBackground, .cm-selectionBackground, .cm-content ::selection': { + backgroundColor: hexWithAlpha(palette.primary, isDark ? 0.22 : 0.16), + }, + '.cm-panels': { + backgroundColor: palette.background, + color: palette.foreground, + }, + '.cm-panels.cm-panels-top': { + borderBottom: `1px solid ${hexWithAlpha(palette.foreground, 0.1)}`, + }, + '.cm-panels.cm-panels-bottom': { + borderTop: `1px solid ${hexWithAlpha(palette.foreground, 0.1)}`, + }, + '.cm-searchMatch': { + backgroundColor: hexWithAlpha(palette.primary, isDark ? 0.2 : 0.14), + outline: `1px solid ${hexWithAlpha(palette.primary, 0.3)}`, + }, + '.cm-searchMatch.cm-searchMatch-selected': { + backgroundColor: hexWithAlpha(palette.primary, isDark ? 0.35 : 0.25), + }, + '.cm-activeLine': { + backgroundColor: hexWithAlpha(palette.primary, isDark ? 0.08 : 0.06), + }, + '.cm-selectionMatch': { + backgroundColor: hexWithAlpha(palette.primary, isDark ? 0.16 : 0.12), + }, + '&.cm-focused .cm-matchingBracket, &.cm-focused .cm-nonmatchingBracket': { + backgroundColor: hexWithAlpha(palette.primary, isDark ? 0.16 : 0.1), + outline: `1px solid ${hexWithAlpha(palette.primary, isDark ? 0.55 : 0.42)}`, + }, + '.cm-gutters': { + backgroundColor: palette.background, + color: hexWithAlpha(palette.foreground, 0.45), + border: 'none', + }, + '.cm-activeLineGutter': { + backgroundColor: hexWithAlpha(palette.primary, isDark ? 0.08 : 0.06), + color: palette.primary, + }, + '.cm-foldPlaceholder': { + backgroundColor: 'transparent', + border: 'none', + color: palette.mutedForeground, + }, + '.cm-tooltip': { + border: `1px solid ${hexWithAlpha(palette.foreground, 0.15)}`, + backgroundColor: palette.background, + color: palette.foreground, + borderRadius: '6px', + boxShadow: isDark + ? '0 4px 12px rgba(0, 0, 0, 0.4)' + : '0 4px 12px rgba(0, 0, 0, 0.1)', + }, + '.cm-tooltip .cm-tooltip-arrow:before': { + borderTopColor: 'transparent', + borderBottomColor: 'transparent', + }, + '.cm-tooltip .cm-tooltip-arrow:after': { + borderTopColor: palette.background, + borderBottomColor: palette.background, + }, + '.cm-tooltip-autocomplete': { + '& > ul > li[aria-selected]': { + backgroundColor: hexWithAlpha(palette.primary, isDark ? 0.2 : 0.12), + color: palette.foreground, + }, + }, + '.cm-completionLabel': { + color: palette.foreground, + }, + '.cm-completionDetail': { + color: palette.mutedForeground, + }, + '.cm-completionMatchedText': { + color: palette.primary, + fontWeight: '600', + textDecoration: 'none', + }, + '.cm-line': { + padding: '0 4px', + }, + '.cm-scroller': { + overflow: 'auto', + }, + }, + { dark: isDark } + ) + + const highlightStyle = HighlightStyle.define([ + { tag: tags.heading, fontWeight: 'bold', color: palette.primary }, + { tag: tags.strong, fontWeight: 'bold', color: palette.primary }, + { tag: tags.emphasis, fontStyle: 'italic', color: palette.primary }, + { tag: tags.strikethrough, textDecoration: 'line-through' }, + { tag: tags.link, color: linkColor, textDecoration: 'underline' }, + { tag: tags.url, color: linkColor }, + { tag: tags.monospace, fontFamily: 'monospace', color: palette.codeBlockFg, backgroundColor: palette.codeBlockBg }, + { tag: tags.quote, color: palette.mutedForeground, fontStyle: 'italic' }, + { tag: tags.list, color: palette.foreground }, + { tag: tags.meta, color: palette.mutedForeground }, + { tag: tags.processingInstruction, color: softPrimary }, + { tag: tags.comment, color: palette.mutedForeground }, + { tag: tags.keyword, color: palette.primary, fontWeight: 'bold' }, + { tag: tags.string, color: softPrimary }, + { tag: tags.number, color: softPrimary }, + { tag: tags.operator, color: palette.foreground }, + { tag: tags.punctuation, color: palette.mutedForeground }, + { tag: tags.bracket, color: palette.foreground }, + { tag: tags.variableName, color: palette.foreground }, + { tag: tags.propertyName, color: palette.primary }, + { tag: tags.function(tags.variableName), color: palette.primary }, + { tag: tags.typeName, color: softPrimary }, + { tag: tags.className, color: softPrimary }, + { tag: tags.labelName, color: palette.primary }, + { tag: tags.attributeName, color: palette.primary }, + { tag: tags.attributeValue, color: softPrimary }, + { tag: tags.tagName, color: palette.primary }, + { tag: tags.angleBracket, color: palette.mutedForeground }, + { tag: tags.contentSeparator, color: palette.mutedForeground }, + ]) + + return [theme, syntaxHighlighting(highlightStyle)] +} + +export const refmdLightTheme: Extension = buildTheme(LIGHT_PALETTE, false) +export const refmdDarkTheme: Extension = buildTheme(DARK_PALETTE, true) + +export { LIGHT_PALETTE, DARK_PALETTE } +export type { Palette } diff --git a/app/src/features/edit-document/lib/editor/vim.ts b/app/src/features/edit-document/lib/editor/vim.ts new file mode 100644 index 00000000..437ff75a --- /dev/null +++ b/app/src/features/edit-document/lib/editor/vim.ts @@ -0,0 +1,41 @@ +import { Extension, Compartment } from '@codemirror/state' +import { EditorView } from '@codemirror/view' +import { vim } from '@replit/codemirror-vim' + +export const vimCompartment = new Compartment() + +export interface VimModeState { + enabled: boolean + statusBarElement?: HTMLElement | null +} + +export function createVimExtension(): Extension { + return vim() +} + +export function enableVimMode(view: EditorView): void { + const vimExt = createVimExtension() + view.dispatch({ + effects: vimCompartment.reconfigure(vimExt), + }) +} + +export function disableVimMode(view: EditorView): void { + view.dispatch({ + effects: vimCompartment.reconfigure([]), + }) +} + +export function toggleVimMode(view: EditorView, enabled: boolean): void { + if (enabled) { + enableVimMode(view) + } else { + disableVimMode(view) + } +} + +export function getVimPlaceholder(): Extension { + return vimCompartment.of([]) +} + +export { vim } diff --git a/app/src/features/edit-document/lib/editor/wiki-link.ts b/app/src/features/edit-document/lib/editor/wiki-link.ts new file mode 100644 index 00000000..29ade9b2 --- /dev/null +++ b/app/src/features/edit-document/lib/editor/wiki-link.ts @@ -0,0 +1,156 @@ +import { + autocompletion, + CompletionContext, + CompletionResult, + Completion, +} from '@codemirror/autocomplete' +import { Extension } from '@codemirror/state' +import { EditorView } from '@codemirror/view' + +import { listDocuments } from '@/entities/document' + +type SearchResult = { + id: string + title: string + document_type: string + path?: string | null + updated_at?: string +} + +async function wikiLinkCompletionSource( + context: CompletionContext +): Promise { + // Match [[, ![[, or @[[ patterns + const wiki = context.matchBefore(/\[\[[^\]]*$/) + const embed = context.matchBefore(/!\[\[[^\]]*$/) + const mention = context.matchBefore(/@\[\[[^\]]*$/) + const match = wiki || embed || mention + if (!match) return null + + // Extract the prefix and query + const text = match.text + let prefix: string + let query: string + + if (text.startsWith('![[')) { + prefix = '![[' + query = text.slice(3) + } else if (text.startsWith('@[[')) { + prefix = '@[[' + query = text.slice(3) + } else { + prefix = '[[' + query = text.slice(2) + } + + // Check if ]] already exists after cursor + const line = context.state.doc.lineAt(context.pos) + const after = context.state.sliceDoc(context.pos, line.to) + const hasClosing = after.startsWith(']]') + + // Fetch documents + let items: SearchResult[] = [] + try { + const resp = await listDocuments({}) + const docs = Array.isArray((resp as any)?.items) + ? ((resp as any).items as Array<{ + id: string + title: string + type: string + path?: string + updated_at?: string + }>) + : [] + items = docs.map((d) => ({ + id: d.id, + title: d.title, + document_type: d.type, + path: (d as any).path, + updated_at: (d as any).updated_at, + })) + } catch {} + + // Deduplicate + const seen = new Set() + const uniq: SearchResult[] = [] + for (const it of items) { + if (it && it.id && !seen.has(it.id)) { + seen.add(it.id) + uniq.push(it) + } + } + + // Track duplicates by title + const titleCounts = new Map() + for (const it of uniq) { + const t = (it.title || '').toLowerCase() + if (!t) continue + titleCounts.set(t, (titleCounts.get(t) || 0) + 1) + } + const duplicates = new Set() + titleCounts.forEach((c, t) => { + if (c > 1) duplicates.add(t) + }) + + // Build completions + const options: Completion[] = [] + + // Add "Create new" option if query is not empty + if (query && query.length > 0) { + options.push({ + label: `Create "${query}"`, + detail: 'Create a new document', + info: 'Create a new document with this title (link will use document ID)', + apply: (view: EditorView, _completion: Completion, _from: number, _to: number) => { + const insertText = hasClosing ? query : `${query}]]` + view.dispatch({ + changes: { from: match.from + prefix.length, to: context.pos, insert: insertText }, + selection: { anchor: match.from + prefix.length + insertText.length }, + }) + }, + boost: 99, + }) + } + + // Add document completions + for (const doc of uniq) { + const isDup = duplicates.has((doc.title || '').toLowerCase()) + const typeLower = (doc.document_type || '').toLowerCase() + const typeDisplay = + typeLower === 'folder' ? 'Folder' : typeLower === 'scrap' ? 'Scrap' : 'Document' + + options.push({ + label: doc.title || 'Untitled', + detail: isDup ? doc.path || '' : typeDisplay, + info: () => { + const el = document.createElement('div') + el.innerHTML = `${doc.title || 'Untitled'}
${ + isDup ? `Path: ${doc.path || ''}
` : '' + }Type: ${typeDisplay}
ID: ${doc.id}${doc.updated_at ? `
Updated: ${doc.updated_at}` : ''}` + el.style.cssText = 'font-size: 12px; line-height: 1.5;' + return el + }, + apply: (view: EditorView, _completion: Completion, _from: number, _to: number) => { + const insertText = hasClosing ? doc.id : `${doc.id}]]` + view.dispatch({ + changes: { from: match.from + prefix.length, to: context.pos, insert: insertText }, + selection: { anchor: match.from + prefix.length + insertText.length + (hasClosing ? 2 : 0) }, + }) + }, + }) + } + + return { + from: match.from + prefix.length, + options, + validFor: /^[^\]]*$/, + } +} + +export function wikiLinkExtension(): Extension { + return autocompletion({ + override: [wikiLinkCompletionSource], + activateOnTyping: true, + closeOnBlur: true, + }) +} diff --git a/app/src/features/edit-document/lib/monaco/theme.ts b/app/src/features/edit-document/lib/monaco/theme.ts deleted file mode 100644 index 3ef832d2..00000000 --- a/app/src/features/edit-document/lib/monaco/theme.ts +++ /dev/null @@ -1,142 +0,0 @@ -import type * as monacoNs from 'monaco-editor' - -export const REFMD_LIGHT_THEME = 'refmd-light' -export const REFMD_DARK_THEME = 'refmd-dark' - -const stripHash = (hex: string) => hex.replace('#', '') - -const pad = (n: number) => n.toString(16).padStart(2, '0') - -const hexToRgb = (hex: string) => { - const h = stripHash(hex) - const bigint = parseInt(h, 16) - const r = (bigint >> 16) & 255 - const g = (bigint >> 8) & 255 - const b = bigint & 255 - return { r, g, b } -} - -const hexWithAlpha = (hex: string, alpha: number) => { - const a = Math.min(Math.max(alpha, 0), 1) - const { r, g, b } = hexToRgb(hex) - const alphaHex = pad(Math.round(a * 255)) - return `#${pad(r)}${pad(g)}${pad(b)}${alphaHex}` -} - -const mixHexWithWhite = (hex: string, weight: number) => { - const { r, g, b } = hexToRgb(hex) - const w = Math.min(Math.max(weight, 0), 1) - const mix = (c: number) => Math.round(c + (255 - c) * w) - return `#${pad(mix(r))}${pad(mix(g))}${pad(mix(b))}` -} - -type Palette = { - primary: string - background: string - foreground: string - mutedForeground: string - codeBlockBg: string - codeBlockFg: string -} - -const LIGHT_PALETTE: Palette = { - primary: '#6e63d6', - background: '#ffffff', - foreground: '#252a33', - mutedForeground: '#596272', - codeBlockBg: '#fafafa', - codeBlockFg: '#24292e', -} - -const DARK_PALETTE: Palette = { - primary: '#8f86e8', - background: '#1e1e1e', - foreground: '#e4e7eb', - mutedForeground: '#9aa1b0', - codeBlockBg: '#242424', - codeBlockFg: '#f3f4f6', -} - -type ThemeDefinition = { - name: string - data: monacoNs.editor.IStandaloneThemeData -} - -const buildTheme = (name: string, palette: Palette, isDark: boolean): ThemeDefinition => { - const softPrimary = mixHexWithWhite(palette.primary, isDark ? 0.3 : 0.55) - const linkColor = mixHexWithWhite(palette.primary, isDark ? 0.2 : 0.35) - const keywordColor = stripHash(palette.primary) - const defaultFg = stripHash(palette.foreground) - - return { - name, - data: { - base: isDark ? 'vs-dark' : 'vs', - inherit: false, - rules: [ - { token: '', foreground: defaultFg }, - { token: 'keyword', foreground: keywordColor, fontStyle: 'bold' }, - { token: 'keyword.table.header', foreground: keywordColor, fontStyle: 'bold' }, - { token: 'keyword.table.left', foreground: keywordColor, fontStyle: 'bold' }, - { token: 'keyword.table.middle', foreground: keywordColor, fontStyle: 'bold' }, - { token: 'keyword.table.right', foreground: keywordColor, fontStyle: 'bold' }, - { token: 'strong', foreground: keywordColor, fontStyle: 'bold' }, - { token: 'emphasis', foreground: keywordColor, fontStyle: 'italic' }, - { token: 'string', foreground: stripHash(softPrimary) }, - { token: 'string.link', foreground: stripHash(linkColor), fontStyle: 'underline' }, - { token: 'string.target', foreground: stripHash(linkColor) }, - { token: 'variable', foreground: stripHash(mixHexWithWhite(palette.foreground, isDark ? 0.08 : 0.12)) }, - { token: 'delimiter', foreground: defaultFg }, - { token: 'delimiter.parenthesis', foreground: defaultFg }, - { token: 'delimiter.bracket', foreground: defaultFg }, - { token: 'delimiter.curly', foreground: defaultFg }, - { token: 'operator', foreground: defaultFg }, - { - token: 'variable.source', - foreground: stripHash(palette.codeBlockFg), - background: stripHash(palette.codeBlockBg), - }, - ], - colors: { - 'editor.background': palette.background, - 'editor.foreground': palette.foreground, - 'editorLineNumber.foreground': hexWithAlpha(palette.foreground, 0.45), - 'editorLineNumber.activeForeground': palette.primary, - 'editor.selectionBackground': hexWithAlpha(palette.primary, isDark ? 0.22 : 0.16), - 'editor.selectionHighlightBackground': hexWithAlpha(palette.primary, isDark ? 0.16 : 0.12), - 'editor.wordHighlightBackground': hexWithAlpha(palette.primary, isDark ? 0.16 : 0.12), - 'editor.findMatchHighlightBackground': hexWithAlpha(palette.primary, isDark ? 0.2 : 0.14), - 'editorBracketMatch.background': hexWithAlpha(palette.primary, isDark ? 0.16 : 0.1), - 'editorBracketMatch.border': hexWithAlpha(palette.primary, isDark ? 0.55 : 0.42), - 'editorBracketHighlight.foreground1': palette.foreground, - 'editorBracketHighlight.foreground2': palette.foreground, - 'editorBracketHighlight.foreground3': palette.foreground, - 'editorBracketHighlight.foreground4': palette.foreground, - 'editorBracketHighlight.foreground5': palette.foreground, - 'editorBracketHighlight.foreground6': palette.foreground, - 'editorBracketHighlight.unexpectedBracket.foreground': palette.foreground, - 'editorCursor.foreground': palette.primary, - 'editorIndentGuide.background': hexWithAlpha(palette.foreground, 0.12), - 'editorIndentGuide.activeBackground': hexWithAlpha(palette.primary, isDark ? 0.3 : 0.24), - 'editor.lineHighlightBackground': hexWithAlpha(palette.primary, isDark ? 0.08 : 0.06), - 'editor.selectionHighlightBorder': hexWithAlpha(palette.foreground, 0.16), - // Diff: align with Git History/Changes/Snapshot viewer colors - 'diffEditor.insertedTextBackground': isDark ? hexWithAlpha('#052e16', 0.4) : '#f0fdf4', - 'diffEditor.removedTextBackground': isDark ? hexWithAlpha('#450a0a', 0.4) : '#fef2f2', - }, - }, - } -} - -const themeDefinitions: ThemeDefinition[] = [ - buildTheme(REFMD_LIGHT_THEME, LIGHT_PALETTE, false), - buildTheme(REFMD_DARK_THEME, DARK_PALETTE, true), -] - -type MonacoNamespace = typeof import('monaco-editor') - -export function ensureRefmdThemes(monaco: MonacoNamespace) { - themeDefinitions.forEach(({ name, data }) => { - monaco.editor.defineTheme(name, data) - }) -} diff --git a/app/src/features/edit-document/lib/monaco/vim-loader.ts b/app/src/features/edit-document/lib/monaco/vim-loader.ts deleted file mode 100644 index fb5251a8..00000000 --- a/app/src/features/edit-document/lib/monaco/vim-loader.ts +++ /dev/null @@ -1,227 +0,0 @@ -import type * as monacoNs from 'monaco-editor' -import type { CodeMirrorShim, RegisterController } from 'monaco-vim' - -type MonacoVimModule = typeof import('monaco-vim') - -let cachedModulePromise: Promise | null = null -let vimPatched = false - -const clamp = (value: number, min: number, max: number) => Math.max(min, Math.min(max, value)) -type ScrollPosition = 'top' | 'center' | 'bottom' - -type ScrollToCursorArgs = { - position?: ScrollPosition -} - -type CmAdapter = { - editor?: monacoNs.editor.IStandaloneCodeEditor - clipPos: (pos: { line: number; ch: number }) => { line: number; ch: number } - moveCurrentLineTo?: (viewPosition: ScrollPosition) => void -} - -type VimState = { - lastMotion?: unknown - lastHPos?: number - lastHSPos?: number -} - -type MotionArgs = { - forward: boolean - repeat: number -} - -type MotionHandler = ( - cm: CmAdapter, - head: { line: number; ch: number }, - motionArgs: MotionArgs, - vim: VimState, -) => { line: number; ch: number } - -type MotionsShape = Record - -const getViewModel = (editor: monacoNs.editor.IStandaloneCodeEditor) => (editor as any)?._getViewModel?.() - -const toModelPosition = (pos: { line: number; ch: number }) => ({ - lineNumber: pos.line + 1, - column: pos.ch + 1, -}) - -type PatchedRegisterController = RegisterController & { __clipboardHooked?: boolean } -type ResetFn = ((...args: unknown[]) => void) & { __clipboardWrapped?: boolean } - -const maybeCopyToSystemClipboard = (operator?: string, text?: string) => { - if (operator !== 'yank') return - if (!text) return - if (typeof navigator === 'undefined') return - const clipboard = navigator.clipboard - if (!clipboard?.writeText) return - void clipboard.writeText(text).catch(() => {}) -} - -const patchRegisterClipboardSync = (vimApi: CodeMirrorShim['Vim']) => { - if (!vimApi?.getRegisterController) return - const controller = vimApi.getRegisterController() as PatchedRegisterController | undefined - if (!controller || controller.__clipboardHooked) return - const originalPushText = controller.pushText.bind(controller) - controller.pushText = function patchedPushText(registerName, operator, text, linewise, blockwise) { - maybeCopyToSystemClipboard(operator, text) - return originalPushText(registerName, operator, text, linewise, blockwise) - } - controller.__clipboardHooked = true -} - -const patchResetForClipboard = (vimApi: CodeMirrorShim['Vim']) => { - if (!vimApi) return - const api = vimApi as NonNullable - const reset = api.resetVimGlobalState_ as ResetFn | undefined - if (!reset || reset.__clipboardWrapped) return - const wrapped: ResetFn = function wrappedReset(this: unknown, ...args: unknown[]) { - reset.apply(this, args) - patchRegisterClipboardSync(api) - } - wrapped.__clipboardWrapped = true - api.resetVimGlobalState_ = wrapped as NonNullable['resetVimGlobalState_'] -} - -const setupClipboardSync = (vimApi: CodeMirrorShim['Vim']) => { - if (!vimApi) return - patchRegisterClipboardSync(vimApi) - patchResetForClipboard(vimApi) -} - -const resolveScrollPosition = (position?: ScrollPosition): ScrollPosition => position ?? 'center' - -const getViewHeight = (editor: monacoNs.editor.IStandaloneCodeEditor) => { - const layoutHeight = editor.getLayoutInfo?.()?.height - if (typeof layoutHeight === 'number' && layoutHeight > 0) { - return layoutHeight - } - const domHeight = editor.getDomNode?.()?.clientHeight - return typeof domHeight === 'number' ? domHeight : 0 -} - -const scrollLineToPosition = ( - editor: monacoNs.editor.IStandaloneCodeEditor, - lineNumber: number, - position: ScrollPosition, -) => { - const viewHeight = getViewHeight(editor) - const lineTop = editor.getTopForLineNumber?.(lineNumber) ?? 0 - const lineBottom = editor.getBottomForLineNumber?.(lineNumber) ?? lineTop - const lineHeight = Math.max(1, lineBottom - lineTop) - - let targetTop = lineTop - switch (position) { - case 'center': - targetTop = lineTop - Math.max(0, viewHeight - lineHeight) / 2 - break - case 'bottom': - targetTop = lineBottom - viewHeight - break - case 'top': - default: - targetTop = lineTop - } - - const maxScrollTop = typeof editor.getScrollHeight === 'function' && viewHeight - ? editor.getScrollHeight() - viewHeight - : undefined - const clampedTop = typeof maxScrollTop === 'number' ? clamp(targetTop, 0, Math.max(0, maxScrollTop)) : Math.max(0, targetTop) - editor.setScrollTop(Math.max(0, clampedTop)) -} - -const patchScrollToCursorAction = (vimApi: CodeMirrorShim['Vim']) => { - if (!vimApi?.defineAction) return - - const defineAction = vimApi.defineAction.bind(vimApi) - defineAction('scrollToCursor', (cm: CmAdapter, actionArgs?: ScrollToCursorArgs) => { - const editor = cm?.editor - if (!editor) { - return - } - - const position = resolveScrollPosition(actionArgs?.position) - const lineNumber = editor.getPosition?.()?.lineNumber - if (!lineNumber) { - return - } - - scrollLineToPosition(editor, lineNumber, position) - }) -} - -const patchDisplayLineMotion = (module: MonacoVimModule) => { - if (vimPatched) return - - const codeMirror = module.VimMode - const vimApi = codeMirror?.Vim - if (!vimApi?.defineMotion) return - - const defineMotion = vimApi.defineMotion.bind(vimApi) - - defineMotion('moveByDisplayLines', function moveByDisplayLines(this: MotionsShape, cm, head, motionArgs, vim) { - const editor = cm.editor - const viewModel = editor ? getViewModel(editor) : null - const fallback = () => (typeof this.moveByLines === 'function' ? this.moveByLines(cm, head, motionArgs, vim) : head) - - if (!editor || !viewModel) { - return fallback() - } - - const converter = viewModel.coordinatesConverter - const modelStart = toModelPosition(head) - const viewStart = converter.convertModelPositionToViewPosition(modelStart) - const viewLineCount = typeof viewModel.getLineCount === 'function' ? viewModel.getLineCount() : 0 - if (!viewLineCount) { - return fallback() - } - - const repeat = Math.max(1, motionArgs.repeat || 1) - const direction = motionArgs.forward ? 1 : -1 - - const startZeroColumn = Math.max(0, viewStart.column - 1) - let goalColumn = typeof vim.lastHSPos === 'number' ? vim.lastHSPos : startZeroColumn - switch (vim.lastMotion) { - case this.moveByDisplayLines: - case this.moveByScroll: - case this.moveByLines: - case this.moveToColumn: - case this.moveToEol: - break - default: - goalColumn = startZeroColumn - vim.lastHSPos = goalColumn - } - - const rawTargetLine = viewStart.lineNumber + direction * repeat - const targetLine = clamp(rawTargetLine, 1, viewLineCount) - const maxColumn = Math.max(1, viewModel.getLineMaxColumn(targetLine) ?? 1) - const maxZeroColumn = Math.max(0, maxColumn - 1) - const resolvedColumn = clamp(goalColumn, 0, maxZeroColumn) - - const targetViewPos = { lineNumber: targetLine, column: resolvedColumn + 1 } - const targetModelPos = converter.convertViewPositionToModelPosition(targetViewPos) - const candidate = cm.clipPos({ - line: targetModelPos.lineNumber - 1, - ch: targetModelPos.column - 1, - }) - - vim.lastHSPos = goalColumn - vim.lastHPos = candidate.ch - return candidate - }) - - patchScrollToCursorAction(vimApi) - setupClipboardSync(vimApi) - vimPatched = true -} - -export async function loadMonacoVim() { - if (!cachedModulePromise) { - cachedModulePromise = import('monaco-vim').then((module) => { - patchDisplayLineMotion(module) - return module - }) - } - return cachedModulePromise -} diff --git a/app/src/features/edit-document/lib/monaco/wiki-link-provider.ts b/app/src/features/edit-document/lib/monaco/wiki-link-provider.ts deleted file mode 100644 index 46091f4c..00000000 --- a/app/src/features/edit-document/lib/monaco/wiki-link-provider.ts +++ /dev/null @@ -1,76 +0,0 @@ -import type * as monacoNs from 'monaco-editor' - -import { listDocuments } from '@/entities/document' - -type SearchResult = { - id: string - title: string - document_type: string - path?: string | null - updated_at?: string -} - -export function registerWikiLinkCompletion(monaco: typeof monacoNs) { - const provider: monacoNs.languages.CompletionItemProvider = { - triggerCharacters: ['[', '!', '@', '|', ' ', '-'], - async provideCompletionItems(model, position) { - const before = model.getValueInRange({ startLineNumber: position.lineNumber, startColumn: 1, endLineNumber: position.lineNumber, endColumn: position.column }) - const after = model.getValueInRange({ startLineNumber: position.lineNumber, startColumn: position.column, endLineNumber: position.lineNumber, endColumn: model.getLineMaxColumn(position.lineNumber) }) - const wiki = before.match(/(\[\[)([^\]]*?)$/) - const embed = before.match(/(!\[\[)([^\]]*?)$/) - const mention = before.match(/(@\[\[)([^\]]*?)$/) - const match = wiki || embed || mention - if (!match) return { suggestions: [] } - const q = match[2] || '' - const hasClosing = after.startsWith(']]') - let items: SearchResult[] = [] - try { - const resp = await listDocuments({ query: q || null }) - const docs = Array.isArray((resp as any)?.items) ? (resp as any).items as Array<{ id: string; title: string; type: string; path?: string; updated_at?: string }> : [] - items = docs.map(d => ({ id: d.id, title: d.title, document_type: d.type, path: (d as any).path, updated_at: (d as any).updated_at })) - } catch {} - const seen = new Set() - const uniq: SearchResult[] = [] - for (const it of items) { if (it && it.id && !seen.has(it.id)) { seen.add(it.id); uniq.push(it) } } - const titleCounts = new Map() - for (const it of uniq) { const t = (it.title || '').toLowerCase(); if (!t) continue; titleCounts.set(t, (titleCounts.get(t) || 0) + 1) } - const duplicates = new Set() - titleCounts.forEach((c, t) => { if (c > 1) duplicates.add(t) }) - - const range: monacoNs.IRange = { startLineNumber: position.lineNumber, startColumn: position.column - q.length, endLineNumber: position.lineNumber, endColumn: position.column } - const suggestions: monacoNs.languages.CompletionItem[] = uniq.map((doc) => { - const isDup = duplicates.has((doc.title || '').toLowerCase()) - const insertText = hasClosing ? (doc.id || '') : `${doc.id}]]` - const typeLower = (doc.document_type || '').toLowerCase() - const typeDisplay = typeLower === 'folder' ? 'Folder' : typeLower === 'scrap' ? 'Scrap' : 'Document' - const updated = doc.updated_at || '' - const path = doc.path || '' - const documentation = `**${doc.title || 'Untitled'}**\n\n${isDup ? `Path: ${path}\n\n` : ''}Type: ${typeDisplay}\nID: ${doc.id}\n${updated ? `Updated: ${updated}` : ''}` - return { - label: doc.title || 'Untitled', - kind: monaco.languages.CompletionItemKind.File, - detail: isDup ? (path || '') : typeDisplay, - documentation: { value: documentation }, - insertText, - range, - command: hasClosing - ? { id: 'cursorMove', title: 'Move cursor', arguments: [{ to: 'right', by: 'character', value: 2 }] } - : { id: 'editor.action.triggerSuggest', title: 'Re-trigger suggestions' }, - } - }) - if (q && q.length > 0) { - suggestions.unshift({ - label: `Create "${q}"`, - kind: monaco.languages.CompletionItemKind.Constant, - detail: 'Create a new document', - documentation: 'Create a new document with this title (link will use document ID)', - insertText: hasClosing ? q : `${q}]]`, - range, - }) - } - return { suggestions } - }, - } - const disp = monaco.languages.registerCompletionItemProvider('markdown', provider) - return disp -} diff --git a/app/src/features/edit-document/model/editor-context.tsx b/app/src/features/edit-document/model/editor-context.tsx index db7ae6d3..a6a8c9cc 100644 --- a/app/src/features/edit-document/model/editor-context.tsx +++ b/app/src/features/edit-document/model/editor-context.tsx @@ -1,19 +1,19 @@ -import type * as monaco from 'monaco-editor' +import { EditorView } from '@codemirror/view' import React, { createContext, useCallback, useContext, useMemo, useRef, useState } from 'react' type Ctx = { - editor: monaco.editor.IStandaloneCodeEditor | null - setEditor: (ed: monaco.editor.IStandaloneCodeEditor | null) => void - registerEditor: (ed: monaco.editor.IStandaloneCodeEditor) => () => void + editor: EditorView | null + setEditor: (ed: EditorView | null) => void + registerEditor: (ed: EditorView) => () => void } const EditorCtx = createContext(null) export function EditorProvider({ children }: { children: React.ReactNode }) { - const [editor, setEditor] = useState(null) - const editorsRef = useRef>(new Set()) + const [editor, setEditor] = useState(null) + const editorsRef = useRef>(new Set()) - const registerEditor = useCallback((ed: monaco.editor.IStandaloneCodeEditor) => { + const registerEditor = useCallback((ed: EditorView) => { editorsRef.current.add(ed) setEditor((current) => current ?? ed) let released = false diff --git a/app/src/features/edit-document/ui/Editor.tsx b/app/src/features/edit-document/ui/Editor.tsx index 864fc976..d65e7e6a 100644 --- a/app/src/features/edit-document/ui/Editor.tsx +++ b/app/src/features/edit-document/ui/Editor.tsx @@ -1,6 +1,5 @@ -import type { OnMount } from '@monaco-editor/react' +import { EditorView } from '@codemirror/view' import { useNavigate, useRouterState } from '@tanstack/react-router' -import type * as monacoNs from 'monaco-editor' import React, { useCallback, useEffect, useMemo, useRef, useState } from 'react' import { toast } from 'sonner' import type { Awareness } from 'y-protocols/awareness' @@ -15,27 +14,21 @@ import type { ViewMode } from '@/shared/types/view-mode' import { listDocuments } from '@/entities/document' -import { useAwarenessStyles } from '@/features/edit-document/hooks/useAwarenessStyles' +import { useEditorBinding } from '@/features/edit-document/hooks/useEditorBinding' import { useEditorUploads } from '@/features/edit-document/hooks/useEditorUploads' import { useMarkdownCommands, type MarkdownCommand } from '@/features/edit-document/hooks/useMarkdownCommands' -import { useMonacoBinding } from '@/features/edit-document/hooks/useMonacoBinding' import { useScrollSync } from '@/features/edit-document/hooks/useScrollSync' -import { ensureRefmdThemes, REFMD_DARK_THEME, REFMD_LIGHT_THEME } from '@/features/edit-document/lib/monaco/theme' -import { registerWikiLinkCompletion } from '@/features/edit-document/lib/monaco/wiki-link-provider' +import { awarenessExtension, awarenessStyles } from '@/features/edit-document/lib/editor/awareness' +import { enableVimMode, disableVimMode } from '@/features/edit-document/lib/editor/vim' import { useEditorContext } from '@/features/edit-document/model/editor-context' import { useViewContext } from '@/features/edit-document/model/view-context' -import { loadMonacoVim } from '../lib/monaco/vim-loader' - import CursorDisplay from './CursorDisplay' import EditorLayout from './EditorLayout' import type { PreviewPaneProps } from './PreviewPane' import EditorToolbar from './Toolbar' const logEditorError = (scope: string, error: unknown) => { - if (error instanceof Error && /InstantiationService has been disposed/i.test(error.message)) { - return - } if (error instanceof Error) { console.error(`[editor] ${scope}:`, error) } else { @@ -62,6 +55,7 @@ export type MarkdownEditorProps = { userName?: string userId?: string documentId: string + workspaceId?: string | null readOnly?: boolean extraRight?: React.ReactNode conflictControls?: React.ReactNode @@ -89,7 +83,6 @@ export type MarkdownEditorProps = { renderPreview?: (props: PreviewPaneProps) => React.ReactNode } - export function MarkdownEditor(props: MarkdownEditorProps) { const { doc, @@ -101,6 +94,7 @@ export function MarkdownEditor(props: MarkdownEditorProps) { userId, userName, documentId, + workspaceId, readOnly = false, extraRight, conflictControls, @@ -110,12 +104,14 @@ export function MarkdownEditor(props: MarkdownEditorProps) { previewOverride, renderPreview, } = props + const { isDarkMode } = useTheme() const isMobile = useIsMobile() const { editor: activeEditor, setEditor, registerEditor } = useEditorContext() const { viewMode, setViewMode, viewModeHydrated, hasPersistentViewMode } = useViewContext() const navigate = useNavigate() const shareToken = useShareToken() + const shareScope = useRouterState({ select: (state) => { const raw = (state.location?.search as any)?.shareScope @@ -123,6 +119,7 @@ export function MarkdownEditor(props: MarkdownEditorProps) { return scope === 'folder' || scope === 'document' ? scope : null }, }) + const isShareMount = useRouterState({ select: (state) => { const search = (state.location?.search ?? {}) as Record @@ -136,14 +133,14 @@ export function MarkdownEditor(props: MarkdownEditorProps) { return Boolean(raw) }, }) + const isShareLink = Boolean(shareToken && !isShareMount) - const brandedMonacoTheme = isDarkMode ? REFMD_DARK_THEME : REFMD_LIGHT_THEME - const monacoTheme = brandedMonacoTheme const view = forcedView ?? viewMode + const [isVimMode, setIsVimMode] = useState(() => typeof window !== 'undefined' && localStorage.getItem('editorVimMode') === 'true') const [syncScroll, setSyncScroll] = useState(true) const [toolbarOpen, setToolbarOpen] = useState(false) - const [editorMountNonce, setEditorMountNonce] = useState(0) + const readOnlyWarningRef = useRef(0) const emitReadOnlyWarning = useCallback(() => { if (!readOnly) return @@ -152,77 +149,84 @@ export function MarkdownEditor(props: MarkdownEditorProps) { readOnlyWarningRef.current = now toast.info('Document is read-only') }, [readOnly]) + const syncScrollRef = useRef(true) - useEffect(() => { syncScrollRef.current = syncScroll }, [syncScroll]) - const vimModeRef = useRef<{ dispose: () => void } | null>(null) + useEffect(() => { + syncScrollRef.current = syncScroll + }, [syncScroll]) + const vimStatusBarRef = useRef(null) const fileInputRef = useRef(null) const viewRef = useRef(forcedView ?? initialViewProp) + const unregisterEditorRef = useRef void)>(null) + useEffect(() => { viewRef.current = view as ViewMode }, [view]) - const { onMount: onMonacoMount, text: boundText, editorRef } = useMonacoBinding({ - doc, - awareness, - language: 'markdown', - onTextChange: () => {}, - }) + const mosaicGroupIdRef = useRef(scrollSyncGroupId) useEffect(() => { mosaicGroupIdRef.current = scrollSyncGroupId }, [scrollSyncGroupId]) + const mosaicScrollRafRef = useRef(null) const suppressMosaicEmitRef = useRef(false) const suppressMosaicTimeoutRef = useRef(null) - const unregisterEditorRef = useRef void)>(null) - const focusDisposableRef = useRef void }>(null) - const blurDisposableRef = useRef void }>(null) - const isThisEditorActive = useCallback(() => { - const ed = editorRef.current - if (!ed) return false - return activeEditor === ed - }, [activeEditor, editorRef]) - - const ensureThisEditorActive = useCallback(() => { - const ed = editorRef.current as monacoNs.editor.IStandaloneCodeEditor | null - if (!ed) return - if (activeEditor !== ed) setEditor(ed as any) - }, [activeEditor, editorRef, setEditor]) - const disableVimMode = useCallback(() => { - if (vimModeRef.current) { - safeExecute('disable vim mode', () => vimModeRef.current?.dispose()) - vimModeRef.current = null - } - if (vimStatusBarRef.current) { - vimStatusBarRef.current.textContent = '' + // Set up awareness user info + useEffect(() => { + if (!awareness || (awareness as any)?._destroyed) return + + const generateUserColor = (id?: string, light = false): string => { + let hash = 0 + const str = id || Math.random().toString() + for (let i = 0; i < str.length; i++) { + const char = str.charCodeAt(i) + hash = ((hash << 5) - hash) + char + hash = hash & hash + } + const hue = Math.abs(hash) % 360 + const saturation = light ? 30 : 70 + const lightness = light ? 80 : 50 + return `hsl(${hue}, ${saturation}%, ${lightness}%)` } - }, []) - const enableVimMode = useCallback(async (targetEditor?: monacoNs.editor.IStandaloneCodeEditor) => { - const editorInstance = targetEditor ?? (editorRef.current as monacoNs.editor.IStandaloneCodeEditor | null) - const statusBar = vimStatusBarRef.current - if (!editorInstance || !statusBar) return - disableVimMode() - try { - const { initVimMode } = await loadMonacoVim() - statusBar.textContent = '' - vimModeRef.current = initVimMode(editorInstance, statusBar) - editorInstance.focus() - } catch (error) { - logEditorError('enable vim mode', error) + + const info = { + name: userName || `User-${awareness.clientID}`, + color: generateUserColor(userId), + colorLight: generateUserColor(userId, true), + id: userId || String(awareness.clientID), } - }, [disableVimMode, editorRef]) - const { previewScrollPct, previewAnchorLine, handleEditorScroll, handlePreviewScroll, onEditorContentChange, onCaretAtEndChange, lockActive } = useScrollSync(editorRef) + awareness.setLocalStateField('user', info) + }, [awareness, userId, userName]) + + // Editor binding hook + const { text: boundText, editorRef, bindingExtensions, setEditorView, getInitialContent } = useEditorBinding({ + doc, + awareness, + onTextChange: () => {}, + }) + + // Create awareness extensions + const awarenessExts = useMemo(() => { + return [awarenessExtension(awareness), awarenessStyles()] + }, [awareness]) + + // Combined extensions + const editorExtensions = useMemo(() => { + return [...bindingExtensions, ...awarenessExts] + }, [bindingExtensions, awarenessExts]) + + const { previewScrollPct, previewAnchorLine, handleEditorScroll, handlePreviewScroll, lockActive } = useScrollSync(editorRef) const { runCommand } = useMarkdownCommands(editorRef) + const handleToolbarCommand = useCallback( (cmd: string, value?: number) => { runCommand(cmd as MarkdownCommand, value) }, [runCommand], ) - // Wire the actual callback now that hook is ready - ;(onMonacoMount as any)._onTextChange = onEditorContentChange - ;(onMonacoMount as any)._onCaretAtEnd = onCaretAtEndChange + useEffect(() => { if (!viewModeHydrated) return if (forcedView) return @@ -232,45 +236,12 @@ export function MarkdownEditor(props: MarkdownEditorProps) { safeExecute('set initial view mode', () => setViewMode(initialViewProp)) }, [forcedView, hasPersistentViewMode, initialViewProp, setViewMode, viewMode, viewModeHydrated]) - useAwarenessStyles(awareness, { userId, userName }) - - const { uploadFiles, uploadStatus } = useEditorUploads(documentId, readOnly, emitReadOnlyWarning) + const { uploadFiles, uploadStatus } = useEditorUploads(documentId, workspaceId, readOnly, emitReadOnlyWarning) const uploadFilesRef = useRef(uploadFiles) useEffect(() => { uploadFilesRef.current = uploadFiles }, [uploadFiles]) - const setReadOnlyOverlay = useCallback( - ( - editor: (monacoNs.editor.IStandaloneCodeEditor & { __readOnlyOverlay?: { widget: monacoNs.editor.IOverlayWidget; domNode: HTMLElement }; __monaco?: typeof monacoNs }) | undefined, - monacoInstance: typeof monacoNs | undefined, - enabled: boolean, - ) => { - if (!editor || !monacoInstance) return - const existing = editor.__readOnlyOverlay - if (enabled) { - if (existing) return - const domNode = document.createElement('div') - domNode.className = 'pointer-events-none select-none text-[11px] font-semibold uppercase tracking-[0.18em] text-muted-foreground bg-background/85 border border-border/60 rounded-full px-3 py-1 shadow-sm' - domNode.textContent = 'Read-only' - const widget: monacoNs.editor.IOverlayWidget = { - getId: () => 'read-only-overlay', - getDomNode: () => domNode, - getPosition: () => ({ - preference: monacoInstance.editor.OverlayWidgetPositionPreference.TOP_RIGHT_CORNER, - }), - } - editor.addOverlayWidget(widget) - editor.__readOnlyOverlay = { widget, domNode } - } else if (existing) { - try { editor.removeOverlayWidget(existing.widget) } catch {} - try { existing.domNode.remove() } catch {} - delete editor.__readOnlyOverlay - } - }, - [], - ) - const handleTaskToggle = useCallback((lineNumber: number, checked: boolean) => { if (readOnly) { emitReadOnlyWarning() @@ -283,16 +254,13 @@ export function MarkdownEditor(props: MarkdownEditorProps) { let currentLine = 1 while (currentLine < lineNumber) { const nextNewline = text.indexOf('\n', offset) - if (nextNewline === -1) { - return - } + if (nextNewline === -1) return offset = nextNewline + 1 currentLine += 1 } const nextNewline = text.indexOf('\n', offset) const lineEnd = nextNewline === -1 ? text.length : nextNewline const lineText = text.slice(offset, lineEnd) - // Allow optional blockquote and ordered list prefixes before the task checkbox const taskMatch = lineText.match(/^(\s*(?:>\s*)*(?:[-*+]|\d+[.)])\s*\[)([ xX])(\]\s*)(.*)$/) if (!taskMatch) return const [, prefix, currentChar, closing, rest] = taskMatch @@ -306,127 +274,58 @@ export function MarkdownEditor(props: MarkdownEditorProps) { }) }, [doc, readOnly, emitReadOnlyWarning]) - const handleBeforeMount = useCallback((monaco: Parameters[1]) => { - ensureRefmdThemes(monaco as any) - monaco.editor.setTheme(brandedMonacoTheme) - }, [brandedMonacoTheme]) - - const handleMount: OnMount = useCallback((editor, monaco) => { - // First, bind Monaco to Yjs via hook - onMonacoMount(editor, monaco) - ;(editor as any).__monaco = monaco - setReadOnlyOverlay(editor as any, monaco as any, readOnly) - // Register wiki-link completion provider - try { - const disp = registerWikiLinkCompletion(monaco as any) - ;(editor as any).__disposeWiki = () => safeExecute('dispose wiki completion', () => disp?.dispose?.()) - } catch (error) { - logEditorError('register wiki completion', error) - } + const isThisEditorActive = useCallback(() => { + const ed = editorRef.current + if (!ed) return false + return activeEditor === ed + }, [activeEditor, editorRef]) - // Activate monaco-markdown extension for lists/enter/tab/completions (lazy load) - ;(async () => { - try { - const mod = await import('monaco-markdown') - const ext = new mod.MonacoMarkdownExtension() - ext.activate(editor as any) - ;(editor as any).__disposeMonacoMd = () => {} - } catch (error) { - logEditorError('load monaco-markdown', error) - } - })() - - const cursorDispose = editor.onDidChangeCursorSelection((_e) => {}) - ;(editor as any).__disposeCursor = () => safeExecute('dispose cursor listener', () => cursorDispose.dispose()) - - const shouldWarnForKey = (ev: any) => { - if (!readOnly) return false - const native = ev?.browserEvent ?? ev - if (!native) return false - const { ctrlKey, metaKey, altKey } = native - if (ctrlKey || metaKey || altKey) return false - const key = native.key ?? native.code ?? '' - if (key === ' ' || key === 'Spacebar') return true - const editingKeys = ['Backspace', 'Delete', 'Enter', 'Tab'] - if (editingKeys.includes(key)) return true - if (typeof key === 'string' && key.length === 1) return true - return false + const ensureThisEditorActive = useCallback(() => { + const ed = editorRef.current + if (!ed) return + if (activeEditor !== ed) setEditor(ed) + }, [activeEditor, editorRef, setEditor]) + + // Handle editor view creation + const handleEditorViewCreated = useCallback((view: EditorView) => { + setEditorView(view) + + // Register editor + unregisterEditorRef.current?.() + unregisterEditorRef.current = registerEditor(view) + + // Set up scroll listener for split view sync + const scrollHandler = () => { + if (!syncScrollRef.current || viewRef.current !== 'split') return + handleEditorScroll() } + view.scrollDOM.addEventListener('scroll', scrollHandler) + + // Set up mosaic scroll sync + const mosaicScrollHandler = () => { + const groupId = mosaicGroupIdRef.current + if (!groupId) return + if (!syncScrollRef.current) return + if (suppressMosaicEmitRef.current) return + if (mosaicScrollRafRef.current != null) return - // Pre-lock preview to bottom when user hits Enter at file end - try { - const keydownDispose = editor.onKeyDown((e: any) => { + mosaicScrollRafRef.current = window.requestAnimationFrame(() => { + mosaicScrollRafRef.current = null try { - if (shouldWarnForKey(e)) { - emitReadOnlyWarning() - return - } - const KeyCode = (monaco as any)?.KeyCode - const isEnter = KeyCode ? e.keyCode === KeyCode.Enter : e.code === 'Enter' || e.keyCode === 13 - if (!isEnter) return - const model = editor.getModel() - const pos = editor.getPosition() - if (!model || !pos) return - const lastLine = model.getLineCount() - const atLastLine = pos.lineNumber >= lastLine - if (!atLastLine) return - const maxCol = model.getLineMaxColumn(lastLine) - const atEndOfDoc = pos.column >= maxCol - if (atEndOfDoc) { - safeExecute('handle enter at end of doc', () => onEditorContentChange()) - } + const block = view.lineBlockAtHeight(view.scrollDOM.scrollTop) + const line = view.state.doc.lineAt(block.from).number + if (!Number.isFinite(line) || line < 1) return + dispatchMosaicScrollSync({ groupId, source: 'editor', line }) } catch (error) { - logEditorError('keydown handler', error) + logEditorError('mosaic scroll sync emit', error) } }) - ;(editor as any).__disposeKeydown = () => safeExecute('dispose keydown listener', () => keydownDispose.dispose()) - } catch (error) { - logEditorError('register keydown handler', error) } + view.scrollDOM.addEventListener('scroll', mosaicScrollHandler) - // Hook editor scroll for sync - const scrollDispose = editor.onDidScrollChange?.((e) => { - if (!syncScrollRef.current || viewRef.current !== 'split') return - handleEditorScroll(e) - }) - ;(editor as any).__disposeScroll = () => safeExecute('dispose scroll listener', () => scrollDispose?.dispose?.()) - - // Mosaic scroll sync: emit current top line to paired preview tile (by group) - try { - const mosaicScrollDispose = editor.onDidScrollChange?.(() => { - const groupId = mosaicGroupIdRef.current - if (!groupId) return - if (!syncScrollRef.current) return - if (suppressMosaicEmitRef.current) return - if (mosaicScrollRafRef.current != null) return - mosaicScrollRafRef.current = window.requestAnimationFrame(() => { - mosaicScrollRafRef.current = null - try { - if ((editor as any)?._isDisposed === true) return - const domNode = editor.getDomNode?.() - if (!domNode) return - const range = editor.getVisibleRanges?.()?.[0] - const line = range?.startLineNumber ?? editor.getPosition?.()?.lineNumber ?? 1 - if (!Number.isFinite(line) || line < 1) return - dispatchMosaicScrollSync({ groupId, source: 'editor', line }) - } catch (error) { - logEditorError('mosaic scroll sync emit', error) - } - }) - }) - ;(editor as any).__disposeMosaicScroll = () => safeExecute('dispose mosaic scroll listener', () => mosaicScrollDispose?.dispose?.()) - } catch (error) { - logEditorError('register mosaic scroll listener', error) - } - - // Handle paste (Ctrl+V) with files from clipboard - const dom = editor.getDomNode() as HTMLElement | null + // Set up paste handler const pasteHandler = async (event: ClipboardEvent) => { try { - const editorDomNode = dom - const target = event.target as HTMLElement | null - if (!editorDomNode || !target || !editorDomNode.contains(target)) return - const clipboardData = event.clipboardData const fileList = clipboardData?.files if (!fileList || fileList.length === 0) return @@ -444,85 +343,51 @@ export function MarkdownEditor(props: MarkdownEditorProps) { logEditorError('paste handler', error) } } + view.contentDOM.addEventListener('paste', pasteHandler) - if (typeof document !== 'undefined') { - document.addEventListener('paste', pasteHandler as any, true) - } - - ;(editor as any).__disposePaste = () => { - safeExecute('remove document paste listener', () => { - if (typeof document !== 'undefined') { - document.removeEventListener('paste', pasteHandler as any, true) - } - }) - } - - // Apply vim if enabled + // Apply vim mode if enabled if (isVimMode) { - void enableVimMode(editor) + enableVimMode(view) } - }, [onMonacoMount, isVimMode, syncScroll, handleEditorScroll, emitReadOnlyWarning, readOnly, setReadOnlyOverlay, enableVimMode, brandedMonacoTheme]) - useEffect(() => { - const editorInstance = editorRef.current as (monacoNs.editor.IStandaloneCodeEditor & { __readOnlyOverlay?: { widget: monacoNs.editor.IOverlayWidget; domNode: HTMLElement }; __monaco?: typeof monacoNs }) | null - if (!editorInstance) return - const monacoInstance = editorInstance.__monaco - setReadOnlyOverlay(editorInstance, monacoInstance, readOnly) - }, [readOnly, setReadOnlyOverlay]) + // Store cleanup function on view + ;(view as any).__cleanup = () => { + view.scrollDOM.removeEventListener('scroll', scrollHandler) + view.scrollDOM.removeEventListener('scroll', mosaicScrollHandler) + view.contentDOM.removeEventListener('paste', pasteHandler) + } + }, [setEditorView, registerEditor, handleEditorScroll, isVimMode]) + // Cleanup on unmount useEffect(() => { - const editorInstance = editorRef.current as (monacoNs.editor.IStandaloneCodeEditor & { __monaco?: typeof monacoNs }) | null - const monacoInstance = editorInstance?.__monaco - if (!monacoInstance) return - ensureRefmdThemes(monacoInstance) - monacoInstance.editor.setTheme(brandedMonacoTheme) - }, [brandedMonacoTheme, editorRef]) - - useEffect(() => () => { - const anyEditor = editorRef.current as (monacoNs.editor.IStandaloneCodeEditor & { __readOnlyOverlay?: { widget: monacoNs.editor.IOverlayWidget; domNode: HTMLElement }; __monaco?: typeof monacoNs }) | undefined - safeExecute('dispose change listener', () => (anyEditor as any)?.__disposeChange?.()) - safeExecute('dispose scroll listener', () => (anyEditor as any)?.__disposeScroll?.()) - safeExecute('dispose mosaic scroll listener', () => (anyEditor as any)?.__disposeMosaicScroll?.()) - safeExecute('dispose paste handler', () => (anyEditor as any)?.__disposePaste?.()) - safeExecute('dispose wiki handler', () => (anyEditor as any)?.__disposeWiki?.()) - safeExecute('dispose cursor handler', () => (anyEditor as any)?.__disposeCursor?.()) - safeExecute('dispose monaco markdown handler', () => (anyEditor as any)?.__disposeMonacoMd?.()) - safeExecute('dispose keydown handler', () => (anyEditor as any)?.__disposeKeydown?.()) - safeExecute('dispose read-only overlay', () => { - if (anyEditor?.__readOnlyOverlay) { - try { anyEditor.removeOverlayWidget(anyEditor.__readOnlyOverlay.widget) } catch {} - try { anyEditor.__readOnlyOverlay.domNode.remove() } catch {} - delete anyEditor.__readOnlyOverlay - } - if (anyEditor && '__monaco' in anyEditor) { - delete (anyEditor as any).__monaco - } - }) - safeExecute('dispose editor focus listener', () => focusDisposableRef.current?.dispose()) - focusDisposableRef.current = null - safeExecute('dispose editor blur listener', () => blurDisposableRef.current?.dispose()) - blurDisposableRef.current = null - safeExecute('unregister editor instance', () => unregisterEditorRef.current?.()) - unregisterEditorRef.current = null - safeExecute('cancel mosaic scroll raf', () => { - if (mosaicScrollRafRef.current != null) { - window.cancelAnimationFrame(mosaicScrollRafRef.current) - mosaicScrollRafRef.current = null - } - }) - safeExecute('cancel mosaic suppress timeout', () => { - if (suppressMosaicTimeoutRef.current != null) { - window.clearTimeout(suppressMosaicTimeoutRef.current) - suppressMosaicTimeoutRef.current = null + return () => { + const view = editorRef.current + if (view) { + safeExecute('cleanup editor', () => (view as any).__cleanup?.()) } - suppressMosaicEmitRef.current = false - }) - disableVimMode() - }, [editorRef, setEditor, disableVimMode]) + safeExecute('unregister editor', () => unregisterEditorRef.current?.()) + unregisterEditorRef.current = null + safeExecute('cancel mosaic scroll raf', () => { + if (mosaicScrollRafRef.current != null) { + window.cancelAnimationFrame(mosaicScrollRafRef.current) + mosaicScrollRafRef.current = null + } + }) + safeExecute('cancel mosaic suppress timeout', () => { + if (suppressMosaicTimeoutRef.current != null) { + window.clearTimeout(suppressMosaicTimeoutRef.current) + suppressMosaicTimeoutRef.current = null + } + suppressMosaicEmitRef.current = false + }) + } + }, [editorRef]) + // Listen for mosaic scroll sync from preview useEffect(() => { if (typeof window === 'undefined') return if (!scrollSyncGroupId) return + const handler = (event: Event) => { try { if (!syncScrollRef.current) return @@ -532,30 +397,23 @@ export function MarkdownEditor(props: MarkdownEditorProps) { const line = detail.line if (!Number.isFinite(line) || (line as number) < 1) return - const editorInstance = editorRef.current as monacoNs.editor.IStandaloneCodeEditor | null - if (!editorInstance) return - if ((editorInstance as any)?._isDisposed === true) return - const domNode = editorInstance.getDomNode?.() - if (!domNode) return + const view = editorRef.current + if (!view) return - const model = editorInstance.getModel?.() - if (!model) return - const maxLine = model.getLineCount?.() ?? null - const clamped = maxLine - ? Math.min(maxLine, Math.max(1, Math.floor(line as number))) - : Math.max(1, Math.floor(line as number)) + const maxLine = view.state.doc.lines + const clamped = maxLine ? Math.min(maxLine, Math.max(1, Math.floor(line as number))) : Math.max(1, Math.floor(line as number)) if (suppressMosaicTimeoutRef.current != null) { window.clearTimeout(suppressMosaicTimeoutRef.current) suppressMosaicTimeoutRef.current = null } suppressMosaicEmitRef.current = true + try { - ;(editorInstance as any).revealLineNearTop?.(clamped) - } catch (error) { - // Avoid noisy errors when editor is being disposed during tile close/layout changes. - if (error instanceof Error && /InstantiationService has been disposed/i.test(error.message)) return - throw error + const lineInfo = view.state.doc.line(clamped) + view.dispatch({ + effects: EditorView.scrollIntoView(lineInfo.from, { y: 'start' }), + }) } finally { suppressMosaicTimeoutRef.current = window.setTimeout(() => { suppressMosaicTimeoutRef.current = null @@ -566,22 +424,27 @@ export function MarkdownEditor(props: MarkdownEditorProps) { logEditorError('mosaic scroll sync receive', error) } } + window.addEventListener(MOSAIC_SCROLL_SYNC_EVENT, handler as EventListener) return () => { window.removeEventListener(MOSAIC_SCROLL_SYNC_EVENT, handler as EventListener) } }, [editorRef, scrollSyncGroupId]) - const toggleVim = useCallback(async () => { + const toggleVim = useCallback(() => { const next = !isVimMode setIsVimMode(next) if (typeof window !== 'undefined') localStorage.setItem('editorVimMode', String(next)) + + const view = editorRef.current + if (!view) return + if (next) { - await enableVimMode() + enableVimMode(view) } else { - disableVimMode() + disableVimMode(view) } - }, [isVimMode, enableVimMode, disableVimMode]) + }, [isVimMode, editorRef]) const handleFileUpload = useCallback(() => { if (readOnly) { @@ -592,10 +455,6 @@ export function MarkdownEditor(props: MarkdownEditorProps) { if (fileInputRef.current) fileInputRef.current.click() }, [emitReadOnlyWarning, readOnly, ensureThisEditorActive]) - // uploadFiles provided by hook - - // View mode is now controlled via ViewContext - const Toolbar = useMemo(() => ( !value) }, [isThisEditorActive]) + const shortcutToggleVim = useCallback(() => { if (!isThisEditorActive()) return - void toggleVim() + toggleVim() }, [isThisEditorActive, toggleVim]) + const shortcutUpload = useCallback(() => { if (!isThisEditorActive()) return handleFileUpload() @@ -636,10 +497,12 @@ export function MarkdownEditor(props: MarkdownEditorProps) { let id = target if (!uuidRe.test(target) && !shareToken) { try { - const resp = await listDocuments({ query: target }) + const resp = await listDocuments({}) const items = (resp.items ?? []) as unknown as Array<{ id: string; title: string }> - const exact = items.find((r) => (r.title || '').toLowerCase() === target.toLowerCase()) - const pick = exact || items[0] + const targetLower = target.toLowerCase() + const exact = items.find((r) => (r.title || '').toLowerCase() === targetLower) + const partial = items.find((r) => (r.title || '').toLowerCase().includes(targetLower)) + const pick = exact || partial if (pick?.id) id = pick.id } catch (error) { logEditorError('lookup wiki link target', error) @@ -666,73 +529,6 @@ export function MarkdownEditor(props: MarkdownEditorProps) { } }, [isShareLink, isShareMount, navigate, shareScope, shareToken]) - // Ensure Monaco relayouts when view/layout changes or container resizes - useEffect(() => { - const ed = editorRef.current as monacoNs.editor.IStandaloneCodeEditor | null - if (!ed) return - const relayoutToContainer = () => { - safeExecute('editor relayout', () => { - const container = (ed as any).getContainerDomNode?.() as HTMLElement | null - const node = ed.getDomNode?.() as HTMLElement | null - const target = container || node?.parentElement || node - if (!target) { - ed.layout() - return - } - const rect = target.getBoundingClientRect() - if (!rect.width || !rect.height) { - ed.layout() - return - } - ed.layout({ width: rect.width, height: rect.height }) - }) - } - // immediate relayout on view change - relayoutToContainer() - // also schedule once after transition - const t = setTimeout(relayoutToContainer, 120) - // observe parent size changes - let ro: ResizeObserver | null = null - try { - const container = (ed as any).getContainerDomNode?.() as HTMLElement | null - const node = ed.getDomNode() as HTMLElement | null - const target = container || node?.parentElement || node - if (target && 'ResizeObserver' in window) { - ro = new ResizeObserver(() => relayoutToContainer()) - ro.observe(target) - } - } catch (error) { - logEditorError('init resize observer', error) - } - // window resize - window.addEventListener('resize', relayoutToContainer) - return () => { - clearTimeout(t) - safeExecute('disconnect resize observer', () => { - if (ro) ro.disconnect() - }) - window.removeEventListener('resize', relayoutToContainer) - } - }, [editorMountNonce, view, editorRef]) - - const handleEditorMount = useCallback( - (editor: monacoNs.editor.IStandaloneCodeEditor, monaco: Parameters[1]) => { - unregisterEditorRef.current?.() - unregisterEditorRef.current = registerEditor(editor as any) - safeExecute('dispose editor focus listener', () => focusDisposableRef.current?.dispose()) - safeExecute('dispose editor blur listener', () => blurDisposableRef.current?.dispose()) - focusDisposableRef.current = editor.onDidFocusEditorWidget(() => { - try { setEditor(editor as any) } catch {} - }) - blurDisposableRef.current = editor.onDidBlurEditorWidget(() => { - // Keep last active editor; do not clear on blur to avoid losing target when clicking chrome. - }) - handleMount(editor, monaco) - setEditorMountNonce((n) => n + 1) - }, - [handleMount, registerEditor, setEditor], - ) - const handleEditorDropFiles = useCallback( async (files: File[]) => { ensureThisEditorActive() @@ -741,8 +537,6 @@ export function MarkdownEditor(props: MarkdownEditorProps) { [ensureThisEditorActive, uploadFiles], ) - - return (
diff --git a/app/src/features/edit-document/ui/EditorLayout.tsx b/app/src/features/edit-document/ui/EditorLayout.tsx index 59bf0f5b..c164aa2d 100644 --- a/app/src/features/edit-document/ui/EditorLayout.tsx +++ b/app/src/features/edit-document/ui/EditorLayout.tsx @@ -1,7 +1,8 @@ -import { DiffEditor } from '@monaco-editor/react' +import { MergeView } from '@codemirror/merge' +import { Extension } from '@codemirror/state' +import { EditorView } from '@codemirror/view' import { AlertTriangle, Check, Loader2, SlidersHorizontal, X } from 'lucide-react' -import * as monacoNs from 'monaco-editor' -import { useCallback, useMemo, useEffect, useRef, useState, type CSSProperties, type ReactNode, type MutableRefObject } from 'react' +import { useCallback, useMemo, useEffect, useRef, type CSSProperties, type ReactNode, type MutableRefObject } from 'react' import { overlayPanelClass } from '@/shared/lib/overlay-classes' import { cn } from '@/shared/lib/utils' @@ -9,7 +10,7 @@ import type { ViewMode } from '@/shared/types/view-mode' import { Button } from '@/shared/ui/button' import type { UploadStatus } from '@/features/edit-document/hooks/useEditorUploads' -import { ensureRefmdThemes } from '@/features/edit-document/lib/monaco/theme' +import { createBaseExtensions } from '@/features/edit-document/lib/editor' import EditorPane from './EditorPane' import PreviewPane, { type PreviewPaneProps } from './PreviewPane' @@ -22,12 +23,13 @@ export type EditorLayoutProps = { toolbar: ReactNode toolbarOpen: boolean onToolbarOpenChange: (open: boolean) => void - monacoTheme: string - onEditorBeforeMount?: (monaco: typeof import('monaco-editor')) => void + isDarkMode: boolean readOnly: boolean onEditorDropFiles: (files: File[]) => Promise - onEditorMount: (editor: monacoNs.editor.IStandaloneCodeEditor, monaco: typeof import('monaco-editor')) => void - editorRef: MutableRefObject + onEditorViewCreated: (view: EditorView) => void + editorExtensions?: Extension[] + getInitialContent?: () => string + editorRef: MutableRefObject syncScroll: boolean onPreviewScroll: (percentage: number) => void previewScrollPct?: number @@ -58,7 +60,6 @@ export type EditorLayoutProps = { modified?: string onChange?: (val: string) => void readOnly?: boolean - theme?: string actions?: { onKeepMine?: () => void onTakeTheirs?: () => void @@ -75,11 +76,12 @@ export function EditorLayout({ toolbar, toolbarOpen, onToolbarOpenChange, - monacoTheme, - onEditorBeforeMount, + isDarkMode, readOnly, onEditorDropFiles, - onEditorMount, + onEditorViewCreated, + editorExtensions, + getInitialContent, editorRef, syncScroll, onPreviewScroll, @@ -102,164 +104,59 @@ export function EditorLayout({ conflictHunkWidgets, conflictView, }: EditorLayoutProps) { - const diffEditorRef = useRef(null) - const monacoRef = useRef(null) - const [diffReady, setDiffReady] = useState(false) - const overlayNodesRef = useRef>({}) - const overlayWidgetsRef = useRef>({}) - const overlayDisposablesRef = useRef([]) + const mergeViewContainerRef = useRef(null) + const mergeViewRef = useRef(null) + // Create and manage MergeView for conflict resolution useEffect(() => { - // cleanup helper - const cleanup = () => { - if (diffEditorRef.current && monacoRef.current) { - const modified = diffEditorRef.current.getModifiedEditor() - Object.values(overlayWidgetsRef.current).forEach((widget) => { - try { - modified.removeContentWidget(widget) - } catch { - /* ignore */ - } - }) + if (!conflictView || conflictView.kind !== 'text' || !mergeViewContainerRef.current) { + if (mergeViewRef.current) { + mergeViewRef.current.destroy() + mergeViewRef.current = null } - Object.values(overlayNodesRef.current).forEach((node) => node.remove()) - overlayNodesRef.current = {} - overlayWidgetsRef.current = {} - overlayDisposablesRef.current.forEach((d) => d.dispose()) - overlayDisposablesRef.current = [] - } - - const diff = diffEditorRef.current - const monacoInstance = monacoRef.current - if (!diff || !monacoInstance || !diffReady) { - cleanup() - return - } - const modified = diff.getModifiedEditor() - const model = modified?.getModel() - if (!modified || !model || !conflictHunkWidgets || conflictHunkWidgets.length === 0) { - cleanup() return } - const host = - modified.getDomNode()?.querySelector('.overflow-guard') ?? - modified.getDomNode() ?? - document.createElement('div') - if (!host) { - cleanup() - return - } - if (host instanceof HTMLElement) { - const style = host.style - if (!style.position || style.position === 'static') { - style.position = 'relative' - } - } + const container = mergeViewContainerRef.current + const baseExtensions = createBaseExtensions({ + isDarkMode, + readOnly: conflictView.readOnly ?? false, + vimMode: false, + isMobile, + lineWrapping: true, + }) - const isDark = typeof document !== 'undefined' && document.documentElement.classList.contains('dark') - const palette = { - ours: { - bg: isDark ? 'rgba(127,29,29,0.30)' : '#fef2f2', - bgActive: isDark ? 'rgba(185,28,28,0.55)' : '#fee2e2', - color: isDark ? '#fecdd3' : '#b91c1c', + const mergeView = new MergeView({ + a: { + doc: conflictView.original ?? '', + extensions: [ + ...baseExtensions, + EditorView.editable.of(false), + ], }, - theirs: { - bg: isDark ? 'rgba(5,46,22,0.30)' : '#f0fdf4', - bgActive: isDark ? 'rgba(34,197,94,0.50)' : '#dcfce7', - color: isDark ? '#bbf7d0' : '#166534', + b: { + doc: conflictView.modified ?? '', + extensions: [ + ...baseExtensions, + EditorView.editable.of(!conflictView.readOnly), + EditorView.updateListener.of((update) => { + if (update.docChanged && conflictView.onChange) { + conflictView.onChange(update.state.doc.toString()) + } + }), + ], }, - } - - const createNode = (hunk: typeof conflictHunkWidgets[number]) => { - const node = document.createElement('div') - node.style.position = 'absolute' - node.style.display = 'inline-flex' - node.style.flexDirection = 'row' - node.style.alignItems = 'center' - node.style.gap = '8px' - node.style.padding = '2px 6px' - node.style.borderRadius = '10px' - node.style.background = 'transparent' - node.style.pointerEvents = 'auto' - node.style.whiteSpace = 'nowrap' - node.style.zIndex = '50' - node.style.marginLeft = '8px' - - const makeBtn = (label: string, side: 'ours' | 'theirs') => { - const btn = document.createElement('button') - btn.textContent = label - btn.style.fontSize = '11px' - btn.style.padding = '4px 10px' - btn.style.borderRadius = '8px' - btn.style.border = 'none' - btn.style.cursor = 'pointer' - btn.style.lineHeight = '1' - btn.style.fontWeight = hunk.choice === side ? '700' : '500' - btn.style.display = 'inline-flex' - btn.style.alignItems = 'center' - btn.style.justifyContent = 'center' - const colors = side === 'ours' ? palette.ours : palette.theirs - btn.style.background = hunk.choice === side ? colors.bgActive : colors.bg - btn.style.color = colors.color - btn.onmousedown = (e) => { - e.preventDefault() - e.stopPropagation() - } - btn.onclick = (e) => { - e.preventDefault() - e.stopPropagation() - hunk.onChoose(side) - } - return btn - } - - node.appendChild(makeBtn('Keep Mine', 'ours')) - node.appendChild(makeBtn('Take Remote', 'theirs')) - return node - } - - conflictHunkWidgets.forEach((hunk) => { - const node = createNode(hunk) - overlayNodesRef.current[hunk.id] = node - const widget: monacoNs.editor.IContentWidget = { - getId: () => `conflict-hunk-${hunk.id}`, - getDomNode: () => node, - getPosition: () => ({ - position: { - lineNumber: Math.max(hunk.line, 1), - // Place at line end so it follows text instead of gutter. - column: - (modified.getModel()?.getLineMaxColumn(Math.max(hunk.line, 1)) ?? 1) + - 1, - }, - preference: [monacoNs.editor.ContentWidgetPositionPreference.EXACT], - }), - } - overlayWidgetsRef.current[hunk.id] = widget - modified.addContentWidget(widget) + parent: container, + collapseUnchanged: {}, }) - const relayout = () => { - Object.values(overlayWidgetsRef.current).forEach((widget) => { - try { - modified.layoutContentWidget(widget) - } catch { - /* ignore */ - } - }) - } - - relayout() - - overlayDisposablesRef.current.push( - modified.onDidScrollChange(() => relayout()), - modified.onDidLayoutChange(() => relayout()), - modified.onDidChangeConfiguration(() => relayout()), - ) + mergeViewRef.current = mergeView - return cleanup - }, [conflictHunkWidgets, diffReady]) + return () => { + mergeView.destroy() + mergeViewRef.current = null + } + }, [conflictView, isDarkMode, isMobile]) const uploadStatusNode = (() => { if (uploadStatus.state === 'idle') return null @@ -347,10 +244,13 @@ export function EditorLayout({ const revealEditorLine = useCallback( (line: number) => { - const editor = editorRef.current - if (!editor) return + const editorView = editorRef.current + if (!editorView) return try { - ;(editor as any).revealLineNearTop?.(line) + const lineInfo = editorView.state.doc.line(line) + editorView.dispatch({ + effects: EditorView.scrollIntoView(lineInfo.from, { y: 'start' }), + }) } catch {} }, [editorRef], @@ -368,35 +268,30 @@ export function EditorLayout({
{layoutState.wEditor !== '0%' && (
- {editorBanner ?
{editorBanner}
: null} + 'flex flex-1 min-h-0 min-w-0 flex-col', + !isMobile && 'px-4 pb-6 pt-6 sm:px-6 sm:pb-8 sm:pt-8', + )} + > + {editorBanner ?
{editorBanner}
: null}
{editorOverlay ? ( -
- {editorOverlay} -
+
{editorOverlay}
) : null}
{uploadStatusNode} @@ -431,57 +326,10 @@ export function EditorLayout({ {conflictView && conflictView.kind === 'text' ? (
{conflictControls ?
{conflictControls}
: null} -
- { - monacoRef.current = monacoInstance - ensureRefmdThemes(monacoInstance) - }} - onMount={(editor, monacoInstance) => { - diffEditorRef.current = editor - monacoRef.current = monacoInstance - setDiffReady(true) - monacoInstance.editor.setTheme(conflictView.theme ?? monacoTheme) - const modified = editor.getModifiedEditor() - const original = editor.getOriginalEditor() - // Align gutters; show line numbers only on original - original.updateOptions({ - glyphMargin: false, - lineDecorationsWidth: 24, - lineNumbersMinChars: 1, // Monaco enforces >=1 - lineNumbers: 'on' as const, - }) - modified.updateOptions({ - glyphMargin: false, - lineDecorationsWidth: 24, - lineNumbersMinChars: 1, // Monaco enforces >=1 - lineNumbers: 'off' as const, - }) - if (conflictView.onChange) { - modified.onDidChangeModelContent(() => { - conflictView.onChange?.(modified.getValue()) - }) - } - }} - language="markdown" - theme={conflictView.theme ?? monacoTheme} - options={{ - readOnly: conflictView.readOnly, - renderSideBySide: false, - renderMarginRevertIcon: false, - renderOverviewRuler: false, - renderIndicators: false, - minimap: { enabled: false }, - automaticLayout: true, - wordWrap: 'on', - scrollBeyondLastLine: true, - fontSize: isMobile ? 17 : 14, - lineHeight: isMobile ? 26 : 22, - }} - /> -
+
{conflictHunkWidgets && conflictHunkWidgets.length ? (
@@ -497,14 +345,15 @@ export function EditorLayout({
) : ( { if (!readOnly) await onEditorDropFiles(files) }} - isMobile={isMobile} - onMount={onEditorMount} vimStatusBarRef={vimStatusBarRef} showVimStatusBar={showVimStatusBar} /> @@ -519,15 +368,12 @@ export function EditorLayout({
void + isDarkMode: boolean readOnly?: boolean - onMount: OnMount - onDropFiles?: (files: File[]) => Promise | void isMobile?: boolean + extensions?: Extension[] + getInitialContent?: () => string + onViewCreated?: (view: EditorView) => void + onDropFiles?: (files: File[]) => Promise | void vimStatusBarRef: MutableRefObject showVimStatusBar?: boolean } -export default function EditorPane({ theme, onBeforeMount, readOnly, onMount, onDropFiles, isMobile = false, vimStatusBarRef, showVimStatusBar = false }: Props) { +export default function EditorPane({ + isDarkMode, + readOnly = false, + isMobile = false, + extensions = [], + getInitialContent, + onViewCreated, + onDropFiles, + vimStatusBarRef, + showVimStatusBar = false, +}: Props) { + const containerRef = useRef(null) + const viewRef = useRef(null) const [isDragging, setIsDragging] = useState(false) const dragCounterRef = useRef(0) + // Create and mount the editor - recreate when extensions change + useEffect(() => { + if (!containerRef.current) return + + // Get initial content from Y.Text - this is crucial for yCollab to work + const initialContent = getInitialContent?.() ?? '' + + const baseExtensions = createEditorExtensions({ + isDarkMode, + readOnly, + vimMode: false, + isMobile, + lineWrapping: true, + }) + + const state = EditorState.create({ + doc: initialContent, + extensions: [...baseExtensions, ...extensions], + }) + + const view = new EditorView({ + state, + parent: containerRef.current, + }) + + viewRef.current = view + onViewCreated?.(view) + + return () => { + view.destroy() + viewRef.current = null + } + }, [extensions]) + + // Update theme when isDarkMode changes + useEffect(() => { + const view = viewRef.current + if (!view) return + + view.dispatch({ + effects: themeCompartment.reconfigure(getThemeExtension(isDarkMode)), + }) + }, [isDarkMode]) + + // Update readOnly when it changes + useEffect(() => { + const view = viewRef.current + if (!view) return + + view.dispatch({ + effects: readOnlyCompartment.reconfigure(EditorState.readOnly.of(readOnly)), + }) + }, [readOnly]) + + const handleDragEnter = useCallback((e: React.DragEvent) => { + if (e.dataTransfer?.types?.includes('Files')) { + dragCounterRef.current++ + setIsDragging(true) + } + }, []) + + const handleDragLeave = useCallback(() => { + dragCounterRef.current = Math.max(0, dragCounterRef.current - 1) + if (dragCounterRef.current === 0) setIsDragging(false) + }, []) + + const handleDragOver = useCallback((e: React.DragEvent) => { + if (e.dataTransfer?.types?.includes('Files')) { + e.preventDefault() + setIsDragging(true) + } + }, []) + + const handleDrop = useCallback( + async (e: React.DragEvent) => { + e.preventDefault() + const files = Array.from(e.dataTransfer?.files || []) + setIsDragging(false) + dragCounterRef.current = 0 + if (files.length > 0) { + try { + await onDropFiles?.(files) + } catch {} + } + }, + [onDropFiles], + ) + return (
{ if (e.dataTransfer?.types?.includes('Files')) { dragCounterRef.current++; setIsDragging(true) } }} - onDragLeave={() => { dragCounterRef.current = Math.max(0, dragCounterRef.current - 1); if (dragCounterRef.current === 0) setIsDragging(false) }} - onDragOver={(e) => { if (e.dataTransfer?.types?.includes('Files')) { e.preventDefault(); setIsDragging(true) } }} - onDrop={async (e) => { - e.preventDefault() - const files = Array.from(e.dataTransfer?.files || []) - setIsDragging(false) - dragCounterRef.current = 0 - if (files.length > 0) { try { await onDropFiles?.(files as File[]) } catch {} } - }} + onDragEnter={handleDragEnter} + onDragLeave={handleDragLeave} + onDragOver={handleDragOver} + onDrop={handleDrop} > -
('') const { theme } = useTheme() const highlightTheme = useMemo( - () => (theme === 'dark' ? 'OneHalfDark' : 'OneHalfLight'), + () => (theme === 'dark' ? 'one-dark-pro' : 'github-light'), [theme], ) + // Get plugin manifests for placeholder hydration + const { plugins } = usePluginManifest() + const rendererSpecs = useMemo(() => collectRendererSpecs(plugins), [plugins]) + const placeholderKinds = useMemo( + () => [...new Set(rendererSpecs.map((s) => s.kind))], + [rendererSpecs] + ) + const requestRef = useRef(null) const queuedRef = useRef<{ text: string; override?: string } | null>(null) const latestKeyRef = useRef('') @@ -79,19 +89,18 @@ function ServerMarkdown({ content, className, documentIdOverride, onTagClick, on let token: string | undefined try { token = new URLSearchParams(window.location.search).get('token') || undefined } catch {} - const promise = renderMarkdown({ - text, - options: { - flavor: 'doc', - features: ['gfm', 'highlight'], - sanitize: true, - hardbreaks: true as any, - absolute_attachments: true as any, - base_origin: apiOrigin as any, - doc_id: override as any, - token: token as any, - theme: highlightTheme as any, - } as any, + const promise = renderMarkdown(text, { + flavor: 'doc', + features: ['gfm', 'highlight'], + sanitize: true, + hardbreaks: true, + // Keep attachment paths as-is (./attachments/xxx) for client-side file map resolution + absoluteAttachments: false, + baseOrigin: apiOrigin, + docId: override, + token: token, + theme: highlightTheme, + placeholderKinds: placeholderKinds.length > 0 ? placeholderKinds : undefined, }) requestRef.current = promise as any @@ -99,7 +108,18 @@ function ServerMarkdown({ content, className, documentIdOverride, onTagClick, on try { const out = await promise if (latestKeyRef.current === requestKey) { - const nextHtml = out?.html || '' + let nextHtml = out?.html || '' + + // Add hydration attributes to placeholders + const placeholders = out?.placeholders || [] + if (placeholders.length > 0 && rendererSpecs.length > 0) { + nextHtml = addPlaceholderHydration(nextHtml, placeholders, rendererSpecs, { + theme: highlightTheme, + docId: override, + token, + }) + } + lastSuccessfulHtmlRef.current = nextHtml setHtml(nextHtml) } @@ -119,7 +139,7 @@ function ServerMarkdown({ content, className, documentIdOverride, onTagClick, on } } }, - [highlightTheme], + [highlightTheme, rendererSpecs, placeholderKinds], ) useEffect(() => { @@ -146,10 +166,21 @@ function ServerMarkdown({ content, className, documentIdOverride, onTagClick, on wrapper.innerHTML = html morphdom(el, wrapper, { childrenOnly: true, - onBeforeElUpdated: (fromEl) => { + onBeforeElUpdated: (fromEl, toEl) => { if (fromEl.tagName === 'REFMD-WIKILINK' || fromEl.tagName === 'REFMD-ATTACHMENT') { return false } + // Preserve decrypted images - don't let morphdom reset their src + if (fromEl.tagName === 'IMG' && (fromEl as HTMLImageElement).dataset.decryptedSrc) { + const fromImg = fromEl as HTMLImageElement + const toImg = toEl as HTMLImageElement + const toSrc = toImg.getAttribute('src') || '' + // Only skip update if the original src (before decryption) matches + // This preserves the decrypted blob URL while allowing updates if the image actually changed + if (fromImg.dataset.e2eeProcessedSrc === toSrc || fromImg.dataset.e2eeProcessing === toSrc) { + return false + } + } return true }, }) @@ -159,7 +190,7 @@ function ServerMarkdown({ content, className, documentIdOverride, onTagClick, on const detachFns: Array<() => void> = [] try { - const maybeFns = upgradeAll(el) + const maybeFns = upgradeAll(el, documentIdOverride) if (Array.isArray(maybeFns)) detachFns.push(...maybeFns) } catch {} @@ -200,11 +231,77 @@ function ServerMarkdown({ content, className, documentIdOverride, onTagClick, on }) } const imgs = Array.from(el.querySelectorAll('img')) as HTMLImageElement[] + + // Process images - decrypt E2EE images and replace src with blob URL + for (const img of imgs) { + const src = img.getAttribute('src') || '' + + // Skip if already processing this exact src + if (img.dataset.e2eeProcessing === src) { + continue + } + + // Skip if already processed this exact src + if (img.dataset.e2eeProcessedSrc === src) { + continue + } + + const bridge = (window as any).__refmd_file_decryption__ + + // Check if this is a logical path (./attachments/xxx) that needs decryption + if ((src.startsWith('./attachments/') || src.startsWith('attachments/')) && documentIdOverride && bridge?.resolveAndDecrypt) { + // Mark as processing with the specific src + img.dataset.e2eeProcessing = src + img.style.opacity = '0.5' + img.alt = 'Loading encrypted image...' + + // Capture the original src to verify later + const originalSrc = src + + bridge.resolveAndDecrypt(src, documentIdOverride) + .then((result: { blobUrl: string; filename: string; mimeType: string } | null) => { + // Verify the image still has the same src we started with + // (morphdom might have reused this element for a different image) + const currentSrc = img.getAttribute('src') || '' + const currentProcessing = img.dataset.e2eeProcessing + + // Only apply result if this element still corresponds to the original src + // Either the src hasn't changed, or the processing marker matches + if (currentSrc !== originalSrc && currentProcessing !== originalSrc) { + // Element was reused for a different image, skip this result + return + } + + delete img.dataset.e2eeProcessing + img.dataset.e2eeProcessedSrc = originalSrc + if (result) { + img.src = result.blobUrl + img.alt = result.filename + img.dataset.decryptedSrc = result.blobUrl + } + img.style.opacity = '1' + }) + .catch(() => { + // Only clear processing state if it still matches + if (img.dataset.e2eeProcessing === originalSrc) { + delete img.dataset.e2eeProcessing + img.style.opacity = '1' + } + }) + } + } + + // Note: Blob URLs are NOT revoked here because they are cached in blobUrlCache + // and may be reused across renders. They are cleaned up when clearFileMap is called + // (e.g., when navigating away from the document). + detachFns.push(...imgs.map((img) => { const handler = (e: Event) => { e.preventDefault() e.stopPropagation() - setModalImage({ src: img.getAttribute('src') || '', alt: img.getAttribute('alt') || undefined }) + // Use decrypted src if available + const src = img.dataset.decryptedSrc || img.getAttribute('src') || '' + setModalImage({ src, alt: img.getAttribute('alt') || undefined }) } img.addEventListener('click', handler) return () => img.removeEventListener('click', handler) @@ -231,7 +328,7 @@ function ServerMarkdown({ content, className, documentIdOverride, onTagClick, on detachFns.push(() => el.removeEventListener('click', onTagClickHandler)) return () => { detachFns.forEach((fn) => fn()) } - }, [html, onTagClick]) + }, [html, onTagClick, documentIdOverride]) return ( <> diff --git a/app/src/features/export/hooks/useExport.ts b/app/src/features/export/hooks/useExport.ts new file mode 100644 index 00000000..0223fd04 --- /dev/null +++ b/app/src/features/export/hooks/useExport.ts @@ -0,0 +1,316 @@ +/** + * useExport Hook + * + * Provides E2EE-compliant document export functionality. + * All conversion happens client-side after decryption. + */ + +import { useCallback, useState } from 'react' +import * as Y from 'yjs' + +import { + getDocumentContent, + type EncryptedUpdateEntry, +} from '@/shared/api/client' + + +import { + resolveAndDecrypt, + initFileMap, +} from '@/entities/file/decryption-bridge' + +import { + decrypt, + fetchDocumentKeys, + getSodium, +} from '@/features/security' + +import { createDocumentArchive } from '../lib/archive' +import { + type ExportFormat, + getExtension, + getPandocFormat, + getFormatMetadata, + sanitizeFilename, +} from '../lib/formats' +import { exportWithPandoc, preloadPandoc } from '../lib/pandoc' +import { exportToPdf, type PdfExportOptions } from '../lib/pdf' + +export interface UseExportOptions { + documentId: string + workspaceId: string + title: string +} + +export interface ExportState { + isExporting: boolean + progress: string | null + error: string | null +} + +export interface UseExportResult { + exportDocument: (format: ExportFormat) => Promise + state: ExportState +} + +/** + * Fetch and decrypt document content + */ +async function fetchDecryptedMarkdown( + documentId: string, + workspaceId: string +): Promise { + // Fetch content from API + const contentRes = await getDocumentContent({ id: documentId }) + + const hasSnapshot = contentRes.content && contentRes.content.length > 0 + const hasUpdates = contentRes.updates && contentRes.updates.length > 0 + + if (!hasSnapshot && !hasUpdates) { + return '' + } + + const sodium = await getSodium() + const doc = new Y.Doc() + + try { + // Get encryption keys + const { dek } = await fetchDocumentKeys(documentId, workspaceId) + + // Apply snapshot if present + if (hasSnapshot) { + const encryptedContent = sodium.from_base64(contentRes.content, sodium.base64_variants.ORIGINAL) + const nonce = sodium.from_base64(contentRes.nonce!, sodium.base64_variants.ORIGINAL) + const yjsState = await decrypt(dek, encryptedContent, nonce) + Y.applyUpdateV2(doc, yjsState) + } + + // Apply pending updates + if (hasUpdates) { + for (const update of contentRes.updates as EncryptedUpdateEntry[]) { + const encryptedData = sodium.from_base64(update.data, sodium.base64_variants.ORIGINAL) + const nonce = sodium.from_base64(update.nonce!, sodium.base64_variants.ORIGINAL) + const yjsUpdate = await decrypt(dek, encryptedData, nonce) + Y.applyUpdateV2(doc, yjsUpdate) + } + } + + return doc.getText('content').toString() + } finally { + doc.destroy() + } +} + +/** + * Create attachment resolver that returns Blob + * Used for pandoc exports (DOCX, EPUB, etc.) + */ +function createBlobAttachmentResolver( + documentId: string +): (path: string) => Promise { + return async (path: string): Promise => { + try { + const result = await resolveAndDecrypt(path, documentId) + if (!result) { + return null + } + + // Fetch the blob from the blob URL + const response = await fetch(result.blobUrl) + return await response.blob() + } catch (error) { + console.warn('[Export] Failed to resolve attachment:', path, error) + return null + } + } +} + +/** + * Create attachment resolver for PDF export + * Converts blob URLs to base64 data URIs + */ +function createDataUriAttachmentResolver( + documentId: string +): (path: string) => Promise { + return async (path: string): Promise => { + try { + const result = await resolveAndDecrypt(path, documentId) + if (!result) { + return null + } + + // Fetch the blob and convert to data URI + const response = await fetch(result.blobUrl) + const blob = await response.blob() + + return new Promise((resolve, reject) => { + const reader = new FileReader() + reader.onloadend = () => resolve(reader.result as string) + reader.onerror = reject + reader.readAsDataURL(blob) + }) + } catch (error) { + console.warn('[Export] Failed to resolve attachment:', path, error) + return null + } + } +} + +/** + * Convert markdown to the requested format + */ +async function convertToFormat( + markdown: string, + format: ExportFormat, + title: string, + documentId?: string, + workspaceId?: string +): Promise { + const meta = getFormatMetadata(format) + + // Special handling: Archive (ZIP) + if (meta.isArchive) { + return createDocumentArchive(markdown, sanitizeFilename(title)) + } + + // Initialize file map if documentId is provided (needed for all formats with attachments) + if (documentId && workspaceId) { + const { dek } = await fetchDocumentKeys(documentId, workspaceId) + await initFileMap(documentId, dek) + } + + // Special handling: PDF (pandoc → HTML → browser print) + if (meta.useHtml2Pdf) { + const options: PdfExportOptions = {} + + if (documentId) { + options.documentId = documentId + options.resolveAttachment = createDataUriAttachmentResolver(documentId) + } + + return exportToPdf(markdown, title, options) + } + + // Plain markdown - no conversion needed + if (format === 'markdown') { + return new Blob([markdown], { type: meta.mimeType }) + } + + // All other formats: use pandoc-wasm + const pandocFormat = getPandocFormat(format) + if (!pandocFormat) { + throw new Error(`Unsupported format: ${format}`) + } + + return exportWithPandoc(markdown, pandocFormat, meta.mimeType, { + standalone: true, + title, + resolveAttachment: documentId ? createBlobAttachmentResolver(documentId) : undefined, + }) +} + +/** + * Trigger file download + */ +function downloadBlob(blob: Blob, filename: string): void { + const url = URL.createObjectURL(blob) + try { + const link = document.createElement('a') + link.href = url + link.download = filename + link.style.display = 'none' + document.body.appendChild(link) + link.click() + document.body.removeChild(link) + } finally { + URL.revokeObjectURL(url) + } +} + +/** + * Hook for exporting documents with E2EE support + */ +export function useExport(options: UseExportOptions): UseExportResult { + const { documentId, workspaceId, title } = options + const [state, setState] = useState({ + isExporting: false, + progress: null, + error: null, + }) + + const exportDocument = useCallback( + async (format: ExportFormat) => { + setState({ isExporting: true, progress: 'Decrypting document...', error: null }) + + try { + // 1. Fetch and decrypt content + const markdown = await fetchDecryptedMarkdown(documentId, workspaceId) + const sanitizedTitle = sanitizeFilename(title) + + // 2. Convert to requested format + setState(prev => ({ ...prev, progress: `Converting to ${format}...` })) + + const blob = await convertToFormat(markdown, format, title, documentId, workspaceId) + const filename = `${sanitizedTitle}.${getExtension(format)}` + + // 3. Download + setState(prev => ({ ...prev, progress: 'Downloading...' })) + downloadBlob(blob, filename) + + setState({ isExporting: false, progress: null, error: null }) + } catch (error) { + const message = error instanceof Error ? error.message : 'Export failed' + setState({ isExporting: false, progress: null, error: message }) + throw error + } + }, + [documentId, workspaceId, title] + ) + + return { exportDocument, state } +} + +/** + * Standalone export function for use outside of React components + */ +export async function exportDocumentFile( + documentId: string, + workspaceId: string, + title: string, + format: ExportFormat +): Promise { + // Fetch and decrypt content + const markdown = await fetchDecryptedMarkdown(documentId, workspaceId) + const sanitizedTitle = sanitizeFilename(title) + + // Convert to requested format + const blob = await convertToFormat(markdown, format, title, documentId, workspaceId) + const filename = `${sanitizedTitle}.${getExtension(format)}` + + // Download + downloadBlob(blob, filename) + + return filename +} + +/** + * Download workspace archive (not yet implemented) + * + * @deprecated Workspace archive download is not yet available. + * This feature is being migrated to client-side export for E2EE compliance. + */ +export async function downloadWorkspaceArchive(_params: { + workspaceId: string + workspaceName: string + format?: ExportFormat +}): Promise { + throw new Error( + 'Workspace archive download is not yet available. ' + + 'This feature is being migrated to client-side export for E2EE compliance.' + ) +} + +/** + * Preload pandoc-wasm in background (call when export dialog opens) + */ +export { preloadPandoc } diff --git a/app/src/features/export/index.ts b/app/src/features/export/index.ts new file mode 100644 index 00000000..e4091293 --- /dev/null +++ b/app/src/features/export/index.ts @@ -0,0 +1,52 @@ +/** + * Export Feature Module + * + * E2EE-compliant client-side document export. + * All conversion happens after decryption, ensuring server never sees plaintext. + * + * Architecture: + * - Most formats: pandoc-wasm (dynamically imported on first use) + * - PDF: pdfmake (pandoc cannot generate PDF without LaTeX) + * - Archive: jszip + */ + +// Formats +export { + type ExportFormat, + type PandocOutputFormat, + type ExportFormatCategory, + type ExportFormatMetadata, + EXPORT_FORMATS, + getFormatMetadata, + getExtension, + getMimeType, + getPandocFormat, + sanitizeFilename, +} from './lib/formats' + +// Pandoc conversion (primary converter for most formats) +export { + convertWithPandoc, + exportWithPandoc, + isPandocLoaded, + preloadPandoc, +} from './lib/pandoc' + +// Special converters +export { exportToPdf, type PdfExportOptions } from './lib/pdf' +export { + createArchive, + createDocumentArchive, + createWorkspaceArchive, + type ArchiveFile, +} from './lib/archive' + +// Hooks +export { + useExport, + exportDocumentFile, + downloadWorkspaceArchive, + type UseExportOptions, + type UseExportResult, + type ExportState, +} from './hooks/useExport' diff --git a/app/src/features/export/lib/archive.ts b/app/src/features/export/lib/archive.ts new file mode 100644 index 00000000..ae44e1d6 --- /dev/null +++ b/app/src/features/export/lib/archive.ts @@ -0,0 +1,95 @@ +/** + * Archive Export + * + * Creates ZIP archives containing markdown and attachments. + * All processing happens client-side for E2EE compliance. + */ + +import JSZip from 'jszip' + +export interface ArchiveFile { + /** Relative path within the archive (e.g., "document.md" or "attachments/image.png") */ + path: string + /** File content as Blob, string, or Uint8Array */ + content: Blob | string | Uint8Array +} + +/** + * Create a ZIP archive from files + */ +export async function createArchive(files: ArchiveFile[]): Promise { + const zip = new JSZip() + + for (const file of files) { + if (file.content instanceof Blob) { + zip.file(file.path, file.content) + } else if (typeof file.content === 'string') { + zip.file(file.path, file.content) + } else { + zip.file(file.path, file.content) + } + } + + return await zip.generateAsync({ + type: 'blob', + compression: 'DEFLATE', + compressionOptions: { level: 6 }, + }) +} + +/** + * Create a document archive with markdown and optional attachments + */ +export async function createDocumentArchive( + markdown: string, + filename: string, + attachments?: { name: string; data: Blob | Uint8Array }[] +): Promise { + const files: ArchiveFile[] = [ + { + path: `${filename}.md`, + content: markdown, + }, + ] + + if (attachments && attachments.length > 0) { + for (const attachment of attachments) { + files.push({ + path: `attachments/${attachment.name}`, + content: attachment.data, + }) + } + } + + return await createArchive(files) +} + +/** + * Create a workspace archive with multiple documents + */ +export async function createWorkspaceArchive( + documents: { path: string; content: string; attachments?: { name: string; data: Blob | Uint8Array }[] }[] +): Promise { + const files: ArchiveFile[] = [] + + for (const doc of documents) { + // Add the markdown file + files.push({ + path: doc.path.endsWith('.md') ? doc.path : `${doc.path}.md`, + content: doc.content, + }) + + // Add attachments if any + if (doc.attachments && doc.attachments.length > 0) { + const docDir = doc.path.replace(/\.md$/, '').replace(/[^/]+$/, '') + for (const attachment of doc.attachments) { + files.push({ + path: `${docDir}attachments/${attachment.name}`, + content: attachment.data, + }) + } + } + } + + return await createArchive(files) +} diff --git a/app/src/features/export/lib/formats.ts b/app/src/features/export/lib/formats.ts new file mode 100644 index 00000000..d5c7b3f1 --- /dev/null +++ b/app/src/features/export/lib/formats.ts @@ -0,0 +1,480 @@ +/** + * Export Format Definitions + * + * Defines supported export formats for E2EE-compliant client-side export. + * - PDF: pdfmake (LaTeX not available in browser) + * - Other formats: pandoc-wasm + */ + +// Pandoc output format names +export type PandocOutputFormat = + | 'markdown' + | 'html' + | 'html5' + | 'latex' + | 'beamer' + | 'context' + | 'man' + | 'mediawiki' + | 'dokuwiki' + | 'textile' + | 'org' + | 'texinfo' + | 'opml' + | 'docbook' + | 'opendocument' + | 'odt' + | 'docx' + | 'rtf' + | 'epub' + | 'epub3' + | 'fb2' + | 'asciidoc' + | 'icml' + | 'slidy' + | 'slideous' + | 'dzslides' + | 'revealjs' + | 's5' + | 'json' + | 'plain' + | 'commonmark' + | 'commonmark_x' + | 'markdown_strict' + | 'markdown_phpextra' + | 'gfm' + | 'rst' + | 'native' + | 'haddock' + +// All export formats (includes archive and pdf which are handled specially) +export type ExportFormat = + | 'archive' + | 'pdf' + | PandocOutputFormat + +export type ExportFormatCategory = 'primary' | 'other' + +export interface ExportFormatMetadata { + label: string + description: string + extension: string + mimeType: string + category: ExportFormatCategory + group?: string + /** If true, use html2pdf.js (pandoc → HTML → PDF) */ + useHtml2Pdf?: boolean + /** If true, this is a ZIP archive (special handling) */ + isArchive?: boolean + /** Pandoc format name (if different from key) */ + pandocFormat?: PandocOutputFormat +} + +export const EXPORT_FORMATS: Record = { + // Primary formats + archive: { + label: 'ZIP Archive (.zip)', + description: 'Markdown with all attachments bundled', + extension: 'zip', + mimeType: 'application/zip', + category: 'primary', + isArchive: true, + }, + markdown: { + label: 'Markdown (.md)', + description: 'Plain markdown document', + extension: 'md', + mimeType: 'text/markdown; charset=utf-8', + category: 'primary', + pandocFormat: 'markdown', + }, + html: { + label: 'HTML (.html)', + description: 'Self-contained HTML page', + extension: 'html', + mimeType: 'text/html; charset=utf-8', + category: 'primary', + pandocFormat: 'html5', + }, + pdf: { + label: 'PDF (.pdf)', + description: 'Portable Document Format', + extension: 'pdf', + mimeType: 'application/pdf', + category: 'primary', + useHtml2Pdf: true, + }, + docx: { + label: 'Word (.docx)', + description: 'Microsoft Word document', + extension: 'docx', + mimeType: 'application/vnd.openxmlformats-officedocument.wordprocessingml.document', + category: 'primary', + pandocFormat: 'docx', + }, + + // Web & Slides + html5: { + label: 'HTML5 (.html)', + description: 'HTML5 output; self-contained page', + extension: 'html', + mimeType: 'text/html; charset=utf-8', + category: 'other', + group: 'Web & Slides', + pandocFormat: 'html5', + }, + slidy: { + label: 'Slidy (.html)', + description: 'Slidy HTML presentation', + extension: 'html', + mimeType: 'text/html; charset=utf-8', + category: 'other', + group: 'Web & Slides', + pandocFormat: 'slidy', + }, + slideous: { + label: 'Slideous (.html)', + description: 'Slideous HTML presentation', + extension: 'html', + mimeType: 'text/html; charset=utf-8', + category: 'other', + group: 'Web & Slides', + pandocFormat: 'slideous', + }, + dzslides: { + label: 'DZSlides (.html)', + description: 'DZSlides HTML presentation', + extension: 'html', + mimeType: 'text/html; charset=utf-8', + category: 'other', + group: 'Web & Slides', + pandocFormat: 'dzslides', + }, + revealjs: { + label: 'reveal.js (.html)', + description: 'reveal.js HTML presentation', + extension: 'html', + mimeType: 'text/html; charset=utf-8', + category: 'other', + group: 'Web & Slides', + pandocFormat: 'revealjs', + }, + s5: { + label: 'S5 (.html)', + description: 'S5 HTML presentation', + extension: 'html', + mimeType: 'text/html; charset=utf-8', + category: 'other', + group: 'Web & Slides', + pandocFormat: 's5', + }, + + // TeX & Academic + latex: { + label: 'LaTeX (.tex)', + description: 'LaTeX document source', + extension: 'tex', + mimeType: 'application/x-tex; charset=utf-8', + category: 'other', + group: 'TeX & Academic', + pandocFormat: 'latex', + }, + beamer: { + label: 'Beamer slides (.tex)', + description: 'LaTeX Beamer slide deck', + extension: 'tex', + mimeType: 'application/x-tex; charset=utf-8', + category: 'other', + group: 'TeX & Academic', + pandocFormat: 'beamer', + }, + context: { + label: 'ConTeXt (.tex)', + description: 'ConTeXt document source', + extension: 'tex', + mimeType: 'application/x-tex; charset=utf-8', + category: 'other', + group: 'TeX & Academic', + pandocFormat: 'context', + }, + + // Office & Rich Text + odt: { + label: 'ODT (.odt)', + description: 'OpenDocument Text document', + extension: 'odt', + mimeType: 'application/vnd.oasis.opendocument.text', + category: 'other', + group: 'Office & Rich Text', + pandocFormat: 'odt', + }, + opendocument: { + label: 'OpenDocument Flat XML (.fodt)', + description: 'Flat OpenDocument Text document', + extension: 'fodt', + mimeType: 'application/vnd.oasis.opendocument.text', + category: 'other', + group: 'Office & Rich Text', + pandocFormat: 'opendocument', + }, + rtf: { + label: 'RTF (.rtf)', + description: 'Rich Text Format document', + extension: 'rtf', + mimeType: 'application/rtf', + category: 'other', + group: 'Office & Rich Text', + pandocFormat: 'rtf', + }, + icml: { + label: 'ICML (.icml)', + description: 'Adobe InCopy ICML document', + extension: 'icml', + mimeType: 'application/xml', + category: 'other', + group: 'Office & Rich Text', + pandocFormat: 'icml', + }, + + // E-books + epub: { + label: 'EPUB 2 (.epub)', + description: 'EPUB eBook (v2)', + extension: 'epub', + mimeType: 'application/epub+zip', + category: 'other', + group: 'E-books', + pandocFormat: 'epub', + }, + epub3: { + label: 'EPUB 3 (.epub)', + description: 'EPUB eBook (v3)', + extension: 'epub', + mimeType: 'application/epub+zip', + category: 'other', + group: 'E-books', + pandocFormat: 'epub3', + }, + fb2: { + label: 'FictionBook (.fb2)', + description: 'FictionBook eBook', + extension: 'fb2', + mimeType: 'application/xml', + category: 'other', + group: 'E-books', + pandocFormat: 'fb2', + }, + + // Wiki & Markup + mediawiki: { + label: 'MediaWiki (.mediawiki)', + description: 'MediaWiki markup', + extension: 'mediawiki', + mimeType: 'text/plain; charset=utf-8', + category: 'other', + group: 'Wiki & Markup', + pandocFormat: 'mediawiki', + }, + dokuwiki: { + label: 'DokuWiki (.txt)', + description: 'DokuWiki markup', + extension: 'txt', + mimeType: 'text/plain; charset=utf-8', + category: 'other', + group: 'Wiki & Markup', + pandocFormat: 'dokuwiki', + }, + textile: { + label: 'Textile (.textile)', + description: 'Textile markup', + extension: 'textile', + mimeType: 'text/plain; charset=utf-8', + category: 'other', + group: 'Wiki & Markup', + pandocFormat: 'textile', + }, + org: { + label: 'Org-mode (.org)', + description: 'Emacs Org-mode document', + extension: 'org', + mimeType: 'text/plain; charset=utf-8', + category: 'other', + group: 'Wiki & Markup', + pandocFormat: 'org', + }, + texinfo: { + label: 'Texinfo (.texi)', + description: 'GNU Texinfo document', + extension: 'texi', + mimeType: 'text/plain; charset=utf-8', + category: 'other', + group: 'Wiki & Markup', + pandocFormat: 'texinfo', + }, + asciidoc: { + label: 'AsciiDoc (.adoc)', + description: 'AsciiDoc markup', + extension: 'adoc', + mimeType: 'text/plain; charset=utf-8', + category: 'other', + group: 'Wiki & Markup', + pandocFormat: 'asciidoc', + }, + rst: { + label: 'reStructuredText (.rst)', + description: 'reStructuredText document', + extension: 'rst', + mimeType: 'text/plain; charset=utf-8', + category: 'other', + group: 'Wiki & Markup', + pandocFormat: 'rst', + }, + plain: { + label: 'Plain text (.txt)', + description: 'Plain UTF-8 text output', + extension: 'txt', + mimeType: 'text/plain; charset=utf-8', + category: 'other', + group: 'Wiki & Markup', + pandocFormat: 'plain', + }, + commonmark: { + label: 'CommonMark (.md)', + description: 'CommonMark markdown', + extension: 'md', + mimeType: 'text/markdown; charset=utf-8', + category: 'other', + group: 'Wiki & Markup', + pandocFormat: 'commonmark', + }, + commonmark_x: { + label: 'CommonMark+Extensions (.md)', + description: 'CommonMark with extensions', + extension: 'md', + mimeType: 'text/markdown; charset=utf-8', + category: 'other', + group: 'Wiki & Markup', + pandocFormat: 'commonmark_x', + }, + markdown_strict: { + label: 'Markdown (strict) (.md)', + description: 'Original markdown syntax', + extension: 'md', + mimeType: 'text/markdown; charset=utf-8', + category: 'other', + group: 'Wiki & Markup', + pandocFormat: 'markdown_strict', + }, + markdown_phpextra: { + label: 'Markdown (PHP Extra) (.md)', + description: 'Markdown PHP Extra dialect', + extension: 'md', + mimeType: 'text/markdown; charset=utf-8', + category: 'other', + group: 'Wiki & Markup', + pandocFormat: 'markdown_phpextra', + }, + gfm: { + label: 'GitHub Markdown (.md)', + description: 'GitHub-flavoured markdown', + extension: 'md', + mimeType: 'text/markdown; charset=utf-8', + category: 'other', + group: 'Wiki & Markup', + pandocFormat: 'gfm', + }, + haddock: { + label: 'Haddock (.txt)', + description: 'Haddock markup (Haskell docs)', + extension: 'txt', + mimeType: 'text/plain; charset=utf-8', + category: 'other', + group: 'Wiki & Markup', + pandocFormat: 'haddock', + }, + + // Data & Interchange + opml: { + label: 'OPML (.opml)', + description: 'Outline Processor Markup Language document', + extension: 'opml', + mimeType: 'application/xml', + category: 'other', + group: 'Data & Interchange', + pandocFormat: 'opml', + }, + docbook: { + label: 'DocBook XML (.xml)', + description: 'DocBook XML document', + extension: 'xml', + mimeType: 'application/xml', + category: 'other', + group: 'Data & Interchange', + pandocFormat: 'docbook', + }, + json: { + label: 'Pandoc JSON (.json)', + description: 'Pandoc JSON abstract syntax tree', + extension: 'json', + mimeType: 'application/json; charset=utf-8', + category: 'other', + group: 'Data & Interchange', + pandocFormat: 'json', + }, + native: { + label: 'Pandoc native (.hs)', + description: 'Pandoc native Haskell AST', + extension: 'hs', + mimeType: 'text/plain; charset=utf-8', + category: 'other', + group: 'Data & Interchange', + pandocFormat: 'native', + }, + + // Manuals + man: { + label: 'Man page (.man)', + description: 'Groff man page source', + extension: 'man', + mimeType: 'text/plain; charset=utf-8', + category: 'other', + group: 'Manuals', + pandocFormat: 'man', + }, +} + +export function getFormatMetadata(format: ExportFormat): ExportFormatMetadata { + return EXPORT_FORMATS[format] +} + +export function getExtension(format: ExportFormat): string { + return EXPORT_FORMATS[format].extension +} + +export function getMimeType(format: ExportFormat): string { + return EXPORT_FORMATS[format].mimeType +} + +export function getPandocFormat(format: ExportFormat): PandocOutputFormat | null { + const meta = EXPORT_FORMATS[format] + if (meta.useHtml2Pdf || meta.isArchive) { + return null + } + return meta.pandocFormat ?? (format as PandocOutputFormat) +} + +export function sanitizeFilename(input: string): string { + const invalid = new Set(['/', '\\', ':', '*', '?', '"', '<', '>', '|', '\0']) + let base = (input ?? '').trim() + if (!base) base = 'document' + + let sanitized = '' + for (const ch of base) { + sanitized += invalid.has(ch) ? '-' : ch + } + sanitized = sanitized.replace(/ /g, '_') + if (sanitized.length > 100) sanitized = sanitized.slice(0, 100) + if (!sanitized) sanitized = 'document' + + return sanitized +} diff --git a/app/src/features/export/lib/pandoc.ts b/app/src/features/export/lib/pandoc.ts new file mode 100644 index 00000000..9a52bee8 --- /dev/null +++ b/app/src/features/export/lib/pandoc.ts @@ -0,0 +1,197 @@ +/** + * Pandoc WASM Wrapper + * + * Provides client-side document conversion using wasm-pandoc. + * Uses dynamic import for lazy loading (WASM is ~56MB). + * All processing happens client-side for E2EE compliance. + */ + +import type { pandoc as PandocFn } from 'wasm-pandoc' + +import type { PandocOutputFormat } from './formats' + +// Pandoc function (lazy loaded) +let pandocFn: typeof PandocFn | null = null +let initPromise: Promise | null = null + +export interface PandocOptions { + standalone?: boolean + title?: string + /** Function to resolve attachment paths to their contents */ + resolveAttachment?: (path: string) => Promise +} + +/** + * Initialize wasm-pandoc (lazy loaded on first use) + */ +async function getPandoc(): Promise { + if (pandocFn) { + return pandocFn + } + + if (initPromise) { + return initPromise + } + + initPromise = (async () => { + // Dynamic import - only loads when actually needed + const { pandoc } = await import('wasm-pandoc') + pandocFn = pandoc + return pandoc + })() + + return initPromise +} + +/** + * Extract attachment paths from markdown + */ +function extractAttachmentPaths(markdown: string): { fullMatch: string; path: string }[] { + const results: { fullMatch: string; path: string }[] = [] + // Match markdown images: ![alt](path) or ![alt](path "title") + const mdImageRegex = /!\[([^\]]*)\]\(([^)\s]+)(?:\s+"[^"]*")?\)/g + + let match + while ((match = mdImageRegex.exec(markdown)) !== null) { + const path = match[2] + if (!path.startsWith('http://') && !path.startsWith('https://') && !path.startsWith('data:')) { + results.push({ fullMatch: match[0], path }) + } + } + + return results +} + +/** + * Replace attachment paths in markdown with data URIs + */ +async function embedAttachmentsInMarkdown( + markdown: string, + resolveAttachment: (path: string) => Promise +): Promise { + const attachments = extractAttachmentPaths(markdown) + if (attachments.length === 0) { + return markdown + } + + // Resolve all attachments in parallel + const resolved = await Promise.all( + attachments.map(async ({ fullMatch, path }) => { + try { + const blob = await resolveAttachment(path) + if (!blob) { + return { fullMatch, path, dataUri: null } + } + + // Convert blob to data URI + const dataUri = await new Promise((resolve, reject) => { + const reader = new FileReader() + reader.onloadend = () => resolve(reader.result as string) + reader.onerror = reject + reader.readAsDataURL(blob) + }) + + return { fullMatch, path, dataUri } + } catch (error) { + console.warn('[Pandoc] Failed to resolve attachment:', path, error) + return { fullMatch, path, dataUri: null } + } + }) + ) + + // Replace paths with data URIs in markdown + let result = markdown + for (const { fullMatch, dataUri } of resolved) { + if (dataUri) { + // Extract alt text from the full match + const altMatch = fullMatch.match(/!\[([^\]]*)\]/) + const altText = altMatch ? altMatch[1] : '' + const replacement = `![${altText}](${dataUri})` + result = result.replace(fullMatch, replacement) + } + } + + return result +} + +/** + * Convert markdown to the specified format using wasm-pandoc + * + * @param markdown - Input markdown text + * @param format - Target output format (pandoc format name) + * @param options - Additional pandoc options + * @returns Converted content as string or Blob + */ +export async function convertWithPandoc( + markdown: string, + format: PandocOutputFormat, + options?: PandocOptions +): Promise { + const pandoc = await getPandoc() + + // Build command line arguments + const args: string[] = ['-f', 'markdown', '-t', format] + + // Add standalone flag for formats that support it + if (options?.standalone !== false) { + const standaloneFormats = [ + 'html', 'html5', 'latex', 'beamer', 'context', + 'docx', 'odt', 'rtf', 'epub', 'epub3', + ] + if (standaloneFormats.includes(format)) { + args.push('-s') + } + } + + // Add metadata for title if provided + if (options?.title) { + args.push('-M', `title=${options.title}`) + } + + // Embed attachments as data URIs if resolver is provided + let processedMarkdown = markdown + if (options?.resolveAttachment) { + processedMarkdown = await embedAttachmentsInMarkdown(markdown, options.resolveAttachment) + } + + const result = await pandoc(args.join(' '), processedMarkdown) + + return result.out +} + +/** + * Convert markdown and return as Blob + */ +export async function exportWithPandoc( + markdown: string, + format: PandocOutputFormat, + mimeType: string, + options?: PandocOptions +): Promise { + const result = await convertWithPandoc(markdown, format, options) + + if (result instanceof Blob) { + return result + } + + return new Blob([result], { type: mimeType }) +} + +/** + * Check if wasm-pandoc is loaded + */ +export function isPandocLoaded(): boolean { + return pandocFn !== null +} + +/** + * Preload wasm-pandoc (optional, for better UX) + * Call this when user opens export dialog to start loading in background + */ +export async function preloadPandoc(): Promise { + try { + await getPandoc() + } catch { + // Silently fail - will retry on actual export + } +} diff --git a/app/src/features/export/lib/pdf.ts b/app/src/features/export/lib/pdf.ts new file mode 100644 index 00000000..453a22ca --- /dev/null +++ b/app/src/features/export/lib/pdf.ts @@ -0,0 +1,253 @@ +/** + * PDF Export + * + * Converts Markdown to PDF using pandoc-wasm + browser print + * This mirrors the backend implementation which used: + * Pandoc (markdown → HTML) + wkhtmltopdf (HTML → PDF) + * + * All processing happens client-side for E2EE compliance. + * Uses browser's native print functionality for accurate rendering. + */ + +import { exportWithPandoc } from './pandoc' + +export interface PdfExportOptions { + /** Document ID for resolving attachments */ + documentId?: string + /** Function to resolve and decrypt attachment paths to data URIs */ + resolveAttachment?: (path: string) => Promise +} + +/** + * CSS matching backend wkhtmltopdf output + */ +const PDF_STYLES = ` + @page { + size: A4; + margin: 20mm; + } + + @media print { + body { + -webkit-print-color-adjust: exact; + print-color-adjust: exact; + } + } + + body { + font-family: 'Noto Sans CJK JP', 'Noto Sans CJK SC', 'Noto Sans CJK TC', 'Noto Sans CJK KR', + 'Noto Sans JP', 'Noto Sans', 'Noto Serif CJK JP', 'Noto Serif CJK SC', + 'Noto Serif CJK TC', 'Noto Serif CJK KR', 'Source Han Sans JP', 'Source Han Sans SC', + 'Source Han Sans TC', 'Source Han Sans KR', 'Hiragino Kaku Gothic ProN', 'Yu Gothic', + 'PingFang SC', 'Microsoft YaHei', 'Microsoft JhengHei', 'Malgun Gothic', + -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif; + font-size: 12pt; + line-height: 1.5; + color: #000; + margin: 0; + padding: 0; + } + + code, pre { + font-family: 'Noto Sans Mono CJK JP', 'Noto Sans Mono', 'Source Code Pro', 'Roboto Mono', + 'Menlo', 'Consolas', monospace; + } + + h1 { font-size: 2em; margin: 0.67em 0; } + h2 { font-size: 1.5em; margin: 0.83em 0; } + h3 { font-size: 1.17em; margin: 1em 0; } + h4 { font-size: 1em; margin: 1.33em 0; } + h5 { font-size: 0.83em; margin: 1.67em 0; } + h6 { font-size: 0.67em; margin: 2.33em 0; } + + p { margin: 1em 0; } + ul, ol { margin: 1em 0; padding-left: 2em; } + li { margin: 0.25em 0; } + + pre { + background: #f8f8f8; + padding: 0.5em; + overflow-x: auto; + border: 1px solid #ccc; + white-space: pre-wrap; + word-wrap: break-word; + } + + code { font-size: 0.9em; } + pre code { background: none; border: none; padding: 0; } + + blockquote { + margin: 1em 2em; + padding-left: 1em; + border-left: 2px solid #ccc; + color: #555; + } + + table { border-collapse: collapse; margin: 1em 0; width: 100%; } + th, td { border: 1px solid #ccc; padding: 0.5em; text-align: left; } + th { background: #f0f0f0; font-weight: bold; } + + img { max-width: 100%; height: auto; } + figure { margin: 1em 0; } + figcaption { font-size: 0.9em; color: #666; text-align: center; margin-top: 0.5em; } + + a { color: #0066cc; text-decoration: underline; } + hr { border: none; border-top: 1px solid #ccc; margin: 1em 0; } + + input[type="checkbox"] { margin-right: 0.5em; } +` + +/** + * Export to PDF using browser print dialog + * + * Flow: Markdown → HTML (pandoc) → Print dialog (browser) + * Returns a dummy blob since actual PDF is created via print dialog + */ +export async function exportToPdf( + markdown: string, + title: string, + options?: PdfExportOptions +): Promise { + // 1. Convert markdown to HTML using pandoc-wasm (non-standalone for body content only) + const htmlBlob = await exportWithPandoc(markdown, 'html5', 'text/html', { + standalone: false, + }) + + // Get HTML string (body content only) + let bodyContent = await htmlBlob.text() + + // 2. Resolve and embed images as data URIs if resolver is provided + if (options?.resolveAttachment) { + bodyContent = await embedImages(bodyContent, options.resolveAttachment) + } + + // 3. Open print window with styled content + await openPrintWindow(bodyContent, title) + + // Return empty blob (actual PDF is created via print dialog) + return new Blob([], { type: 'application/pdf' }) +} + +/** + * Open a new window with print-ready content + */ +async function openPrintWindow(bodyContent: string, title: string): Promise { + const printWindow = window.open('', '_blank') + if (!printWindow) { + throw new Error('Failed to open print window. Please allow popups for this site.') + } + + const htmlDocument = ` + + + + + ${escapeHtml(title)} + + + + ${bodyContent} + + +` + + printWindow.document.write(htmlDocument) + printWindow.document.close() + + // Wait for images to load + await new Promise((resolve) => { + const images = printWindow.document.querySelectorAll('img') + if (images.length === 0) { + resolve() + return + } + + let loaded = 0 + const checkDone = () => { + loaded++ + if (loaded >= images.length) { + resolve() + } + } + + images.forEach((img) => { + if (img.complete) { + checkDone() + } else { + img.onload = checkDone + img.onerror = checkDone + } + }) + + // Timeout fallback + setTimeout(resolve, 3000) + }) + + // Small delay for rendering + await new Promise((resolve) => setTimeout(resolve, 100)) + + // Trigger print dialog + printWindow.print() +} + +/** + * Escape HTML special characters + */ +function escapeHtml(str: string): string { + return str + .replace(/&/g, '&') + .replace(//g, '>') + .replace(/"/g, '"') +} + +/** + * Embed images as base64 data URIs + */ +async function embedImages( + html: string, + resolveAttachment: (path: string) => Promise +): Promise { + // Find all img tags with src attributes + const imgRegex = /]*src="([^"]+)"[^>]*>/gi + const matches: { fullMatch: string; src: string }[] = [] + + let match + while ((match = imgRegex.exec(html)) !== null) { + matches.push({ fullMatch: match[0], src: match[1] }) + } + + if (matches.length === 0) { + return html + } + + // Resolve all images in parallel + const resolved = await Promise.all( + matches.map(async ({ fullMatch, src }) => { + // Skip external URLs and data URIs + if (src.startsWith('http://') || src.startsWith('https://') || src.startsWith('data:')) { + return { fullMatch, src, dataUri: null } + } + + try { + const dataUri = await resolveAttachment(src) + return { fullMatch, src, dataUri } + } catch (error) { + console.warn('[PDF Export] Failed to resolve attachment:', src, error) + return { fullMatch, src, dataUri: null } + } + }) + ) + + // Replace src attributes with data URIs + let result = html + for (const { fullMatch, src, dataUri } of resolved) { + if (dataUri) { + const newTag = fullMatch.replace(`src="${src}"`, `src="${dataUri}"`) + result = result.replace(fullMatch, newTag) + } + } + + return result +} + diff --git a/app/src/features/file-tree/model/file-tree-context.tsx b/app/src/features/file-tree/model/file-tree-context.tsx index 0ed83789..3d68f8c8 100644 --- a/app/src/features/file-tree/model/file-tree-context.tsx +++ b/app/src/features/file-tree/model/file-tree-context.tsx @@ -256,7 +256,7 @@ export function FileTreeProvider({ children }: { children: React.ReactNode }) { gcTime: 10 * 60 * 1000, }) - const { data: shareMounts = [] } = useShareMounts() + const { data: shareMounts = [] } = useShareMounts({ enabled: !!userId && !!activeWorkspaceId && !isShare }) const shareMountTrees = useQueries({ queries: (shareMounts as any[]).map((mount) => ({ queryKey: ['share-mount-tree', mount.id, mount.token], @@ -343,9 +343,9 @@ export function FileTreeProvider({ children }: { children: React.ReactNode }) { const items: DbDoc[] = resp.tree.map((n) => ({ id: n.id, title: n.title, - parent_id: n.parent_id ?? null, - created_at: n.created_at, - updated_at: n.updated_at, + parent_id: n.parentId ?? null, + created_at: n.createdAt, + updated_at: n.updatedAt, type: n.type === 'folder' ? ('folder' as const) : ('document' as const), })) return buildTree(items) diff --git a/app/src/features/file-tree/model/useFileTreeInteractions.ts b/app/src/features/file-tree/model/useFileTreeInteractions.ts index 39c1c0c8..7aeb736f 100644 --- a/app/src/features/file-tree/model/useFileTreeInteractions.ts +++ b/app/src/features/file-tree/model/useFileTreeInteractions.ts @@ -12,6 +12,7 @@ import { usePluginExecutor, usePluginManifest, type PluginCommand } from '@/enti import { deleteShareMount } from '@/entities/share' import type { DocumentNode } from '@/features/file-tree/model/types' +import { createDocumentDekIfNeeded } from '@/features/security' export type PluginMenuItem = { title: string @@ -26,6 +27,7 @@ type UseFileTreeInteractionsOptions = { shareToken: string isShare: boolean documents: DocumentNode[] + workspaceId: string | null getSelectedDocumentId: () => string | null setSelectedDocumentId: (id: string | null) => void refreshDocuments: () => void @@ -119,6 +121,7 @@ export function useFileTreeInteractions({ shareToken, isShare, documents, + workspaceId, getSelectedDocumentId, setSelectedDocumentId, refreshDocuments, @@ -134,6 +137,7 @@ export function useFileTreeInteractions({ const { runPluginCommand, resolveDocRoute } = usePluginExecutor({ plugins, shareToken, + workspaceId, refreshDocuments, navigate: (to) => navigate({ to }), getCurrentDocumentId: getSelectedDocumentId, @@ -178,6 +182,8 @@ export function useFileTreeInteractions({ const parent = parentId ?? null try { const doc = await createDocumentApi({ title: 'Untitled', parent_id: parent }) + // Generate DEK for E2EE if enabled + await createDocumentDekIfNeeded(doc.id, workspaceId) requestRename(doc.id) refreshDocuments() if (parent) expandFolder(parent) @@ -190,7 +196,7 @@ export function useFileTreeInteractions({ return null } }, - [expandFolder, refreshDocuments, requestRename], + [expandFolder, refreshDocuments, requestRename, workspaceId], ) const createFolder = useCallback( @@ -243,6 +249,8 @@ export function useFileTreeInteractions({ if (node.type === 'folder') return try { const duplicated = await duplicateDocumentApi({ id: node.id }) + // Generate DEK for E2EE if enabled + await createDocumentDekIfNeeded(duplicated.id, workspaceId) refreshDocuments() setSelectedDocumentId(duplicated.id) navigate({ to: '/document/$id', params: { id: duplicated.id } }) @@ -252,7 +260,7 @@ export function useFileTreeInteractions({ toast.error('Failed to duplicate document') } }, - [isShareView, navigate, refreshDocuments, setSelectedDocumentId], + [isShareView, navigate, refreshDocuments, setSelectedDocumentId, workspaceId], ) const deleteNode = useCallback( diff --git a/app/src/features/file-tree/ui/FileNode.tsx b/app/src/features/file-tree/ui/FileNode.tsx index e5001752..7ff017f6 100644 --- a/app/src/features/file-tree/ui/FileNode.tsx +++ b/app/src/features/file-tree/ui/FileNode.tsx @@ -11,7 +11,6 @@ import { Copy, Globe, Link as LinkIcon, - Ban, AlertTriangle, MessageSquare, Blocks, @@ -29,7 +28,6 @@ import type { LucideIcon } from 'lucide-react' import React, { useState, useCallback, memo, useEffect, useRef } from 'react' import { toast } from 'sonner' -import type { GitPullConflictItem } from '@/shared/api' import useInView from '@/shared/hooks/use-in-view' import { dispatchOpenPreviewTile } from '@/shared/lib/mosaic-events' import { overlayMenuClass } from '@/shared/lib/overlay-classes' @@ -41,10 +39,10 @@ import { Input } from '@/shared/ui/input' import { SidebarMenuItem, SidebarMenuButton } from '@/shared/ui/sidebar' import { useArchiveDocument, useUnarchiveDocument } from '@/entities/document' -import { ignoreDocument } from '@/entities/git' import { getPluginKv } from '@/entities/plugin' import { useFileTree, type DocumentNode } from '@/features/file-tree' +import type { ConflictItem } from '@/features/git-sync' @@ -69,8 +67,7 @@ type FileNodeProps = { onDrop: (e: React.DragEvent, id: string, type: 'file' | 'folder', parentId?: string) => void onDragOver: (e: React.DragEvent, nodeId?: string, nodeType?: 'file' | 'folder') => void pluginRules?: FileTreeRule[] - gitEnabled?: boolean - conflict?: GitPullConflictItem | null + conflict?: ConflictItem | null } export const FileNode = memo(function FileNode({ @@ -92,7 +89,6 @@ export const FileNode = memo(function FileNode({ onDrop, onDragOver, pluginRules, - gitEnabled = false, conflict = null, }: FileNodeProps) { const { @@ -614,21 +610,6 @@ export const FileNode = memo(function FileNode({ )} {!isShareMount && ( <> - {gitEnabled && ( - guardMenuAction(event, async () => { - try { - const r = await ignoreDocument({ id: node.id }) - const added = (r as any).added ?? 0 - toast.success(`Ignored in Git (${added} pattern${added === 1 ? '' : 's'})`) - } catch (e: any) { - toast.error(`Failed to ignore: ${e?.message || e}`) - } - })} - > - Ignore in Git - - )} {!isArchived && ( guardMenuAction(event, handleArchive)} @@ -679,7 +660,6 @@ export const FileNode = memo(function FileNode({ prev.isSelected === next.isSelected && prev.isDragging === next.isDragging && prev.isDropTarget === next.isDropTarget && - prev.gitEnabled === next.gitEnabled && (prev.conflict?.path || null) === (next.conflict?.path || null) )) diff --git a/app/src/features/file-tree/ui/FolderNode.tsx b/app/src/features/file-tree/ui/FolderNode.tsx index 776a391e..460e4487 100644 --- a/app/src/features/file-tree/ui/FolderNode.tsx +++ b/app/src/features/file-tree/ui/FolderNode.tsx @@ -1,6 +1,6 @@ "use client" -import { ChevronRight, ChevronDown, Folder, FolderOpen, Plus, Edit, Trash2, MoreHorizontal, Users, Share2, Link as LinkIcon, Ban, Archive, ArchiveRestore, Download } from 'lucide-react' +import { ChevronRight, ChevronDown, Folder, FolderOpen, Plus, Edit, Trash2, MoreHorizontal, Users, Share2, Link as LinkIcon, Archive, ArchiveRestore, Download } from 'lucide-react' import React, { useState, useCallback, memo, useEffect, useRef } from 'react' import { toast } from 'sonner' @@ -14,9 +14,10 @@ import { Input } from '@/shared/ui/input' import { SidebarMenuItem, SidebarMenuButton, SidebarMenuSub } from '@/shared/ui/sidebar' import { Tooltip, TooltipContent, TooltipTrigger } from '@/shared/ui/tooltip' -import { downloadDocumentFile, useArchiveDocument, useUnarchiveDocument } from '@/entities/document' -import { ignoreFolder } from '@/entities/git' +import { useArchiveDocument, useUnarchiveDocument } from '@/entities/document' +import { useAuthContext } from '@/features/auth' +import { exportDocumentFile } from '@/features/export' import { useFileTree, type DocumentNode } from '@/features/file-tree' @@ -43,7 +44,6 @@ type FolderNodeProps = { onDragOver: (e: React.DragEvent, nodeId?: string, nodeType?: 'file' | 'folder') => void renderChildren?: () => React.ReactNode onShareFolder?: (node: DocumentNode) => void - gitEnabled?: boolean } export const FolderNode = memo(function FolderNode({ @@ -68,8 +68,8 @@ export const FolderNode = memo(function FolderNode({ onDragOver, renderChildren, onShareFolder, - gitEnabled = false, }: FolderNodeProps) { + const { activeWorkspaceId } = useAuthContext() const { sharedFolderIds, underSharedFolderFolderIds, @@ -177,9 +177,13 @@ export const FolderNode = memo(function FolderNode({ const handleDownloadFolder = useCallback(async () => { if (downloadPending) return if (isShareMount) return + if (!activeWorkspaceId) { + toast.error('Workspace not available for export') + return + } setDownloadPending(true) try { - const filename = await downloadDocumentFile(node.id, { title: node.title, format: 'archive' }) + const filename = await exportDocumentFile(node.id, activeWorkspaceId, node.title, 'archive') toast.success(`Download ready: ${filename}`) } catch (error) { const message = error instanceof Error ? error.message : 'Failed to download folder' @@ -187,7 +191,7 @@ export const FolderNode = memo(function FolderNode({ } finally { setDownloadPending(false) } - }, [downloadPending, node.id, node.title]) + }, [downloadPending, node.id, node.title, activeWorkspaceId, isShareMount]) const handleArchive = useCallback(async () => { if (isShareMount) return try { @@ -434,19 +438,6 @@ export const FolderNode = memo(function FolderNode({ > Download Folder - {!isShareMount && gitEnabled && ( - guardMenuAction(event, async () => { - try { - const r = await ignoreFolder({ id: node.id }) - const added = (r as any).added ?? 0 - toast.success(`Folder ignored in Git (${added} pattern${added === 1 ? '' : 's'})`) - } catch (e: any) { - toast.error(`Failed to ignore: ${e?.message || e}`) - } - })}> - Ignore Folder in Git - - )} {!isArchived && !isShareMount && ( guardMenuAction(event, handleArchive)} diff --git a/app/src/features/git-sync/index.ts b/app/src/features/git-sync/index.ts index f57dac7b..7582f428 100644 --- a/app/src/features/git-sync/index.ts +++ b/app/src/features/git-sync/index.ts @@ -4,3 +4,66 @@ export { default as GitChangesDialog } from './ui/git-changes-dialog' export { default as GitPullDialog } from './ui/git-pull-dialog' export * from './ui/commit-diff-panel' export * from './ui/working-diff-panel' + +// E2EE Git sync - Core +export { GitClient } from './lib/git-client' +export { + saveGitCredentials, + loadGitCredentials, + deleteGitCredentials, + hasGitCredentials, + type GitCredentials, +} from './lib/git-credentials' + +// E2EE Git sync - Sync +export { + syncWorkspaceToGit, + initGitRepository, + getGitStatus, + getGitHistory, + type SyncOptions, + type SyncResult, +} from './lib/sync' + +// E2EE Git sync - Pull +export { + pullFromGit, + type PullResult, + type ConflictItem, +} from './lib/pull' + +// E2EE Git sync - History & Diff +export { + getHistory, + getWorkingDiff, + getCommitDiff, + type GitCommitItem, + type TextDiffResult, + type DiffLine, + type TextDiffLineType, +} from './lib/history' + +// E2EE Git sync - Conflict Resolution +export { + resolveConflict, + finalizeConflictResolution, + abortConflictResolution, + generateMergePreview, + type ConflictResolution, +} from './lib/conflict-resolver' + +// E2EE Git sync - Import +export { + importFromGit, + getImportableFiles, + readImportFile, + clearImportedRepository, + type ImportResult, + type ImportProgress, +} from './lib/import' + +// E2EE Git sync - Dirty calculation +export { + calculateDirtyFiles, + type DirtyFile, +} from './lib/dirty-calculator' diff --git a/app/src/features/git-sync/lib/conflict-resolver.ts b/app/src/features/git-sync/lib/conflict-resolver.ts new file mode 100644 index 00000000..18d35385 --- /dev/null +++ b/app/src/features/git-sync/lib/conflict-resolver.ts @@ -0,0 +1,189 @@ +/** + * Conflict Resolver for KeyVault Git Sync + * + * Handles resolving merge conflicts client-side. + */ + +import * as git from 'isomorphic-git' + +import { getKeyVaultService } from '@/features/security' + +import { GitClient } from './git-client' +import { loadGitCredentials } from './git-credentials' + +export interface ConflictResolution { + path: string + choice: 'ours' | 'theirs' | 'custom' + customContent?: string +} + +/** + * Resolve a single conflict + */ +export async function resolveConflict( + workspaceId: string, + resolution: ConflictResolution, + oursContent: string, + theirsContent: string +): Promise { + const gitClient = new GitClient(workspaceId) + + let content: string + switch (resolution.choice) { + case 'ours': + content = oursContent + break + case 'theirs': + content = theirsContent + break + case 'custom': + if (!resolution.customContent) { + throw new Error('Custom content required for custom resolution') + } + content = resolution.customContent + break + default: + throw new Error(`Unknown resolution choice: ${resolution.choice}`) + } + + // Write resolved content to file + await gitClient.writeFile(resolution.path, content) + + // Stage the resolved file + await gitClient.add(resolution.path) +} + +/** + * Finalize conflict resolution by creating a merge commit + */ +export async function finalizeConflictResolution( + workspaceId: string, + commitMessage?: string +): Promise<{ success: boolean; message: string; commitSha?: string }> { + const service = getKeyVaultService() + if (!service.isUnlocked) { + return { + success: false, + message: 'KeyVault is locked. Please unlock first.', + } + } + + const credentials = await loadGitCredentials(workspaceId) + if (!credentials) { + return { + success: false, + message: 'Git credentials not configured.', + } + } + + const gitClient = new GitClient(workspaceId) + + try { + // Create merge commit + const message = commitMessage || 'Merge remote changes (resolved conflicts)' + const commitSha = await gitClient.commit(message) + + // Push to remote + await gitClient.push(credentials) + + return { + success: true, + message: 'Conflicts resolved and pushed successfully.', + commitSha, + } + } catch (error) { + return { + success: false, + message: `Failed to finalize: ${error instanceof Error ? error.message : 'Unknown error'}`, + } + } +} + +/** + * Abort conflict resolution (reset to HEAD) + */ +export async function abortConflictResolution( + workspaceId: string +): Promise<{ success: boolean; message: string }> { + const gitClient = new GitClient(workspaceId) + + try { + // Reset to HEAD + const currentHead = await git.resolveRef({ + fs: gitClient.fs, + dir: gitClient.dir, + ref: 'HEAD', + }) + + await git.checkout({ + fs: gitClient.fs, + dir: gitClient.dir, + ref: currentHead, + force: true, + }) + + return { + success: true, + message: 'Conflict resolution aborted.', + } + } catch (error) { + return { + success: false, + message: `Failed to abort: ${error instanceof Error ? error.message : 'Unknown error'}`, + } + } +} + +/** + * Generate a three-way merge preview + */ +export function generateMergePreview( + base: string, + ours: string, + theirs: string +): string { + const baseLines = base.split('\n') + const oursLines = ours.split('\n') + const theirsLines = theirs.split('\n') + + // Simple three-way merge visualization + const result: string[] = [] + + const maxLen = Math.max(baseLines.length, oursLines.length, theirsLines.length) + + for (let i = 0; i < maxLen; i++) { + const baseLine = baseLines[i] + const ourLine = oursLines[i] + const theirLine = theirsLines[i] + + if (ourLine === theirLine) { + // Same in both - use it + if (ourLine !== undefined) { + result.push(ourLine) + } + } else if (ourLine === baseLine) { + // Changed only in theirs - use theirs + if (theirLine !== undefined) { + result.push(theirLine) + } + } else if (theirLine === baseLine) { + // Changed only in ours - use ours + if (ourLine !== undefined) { + result.push(ourLine) + } + } else { + // Changed in both - conflict + result.push('<<<<<<< ours') + if (ourLine !== undefined) { + result.push(ourLine) + } + result.push('=======') + if (theirLine !== undefined) { + result.push(theirLine) + } + result.push('>>>>>>> theirs') + } + } + + return result.join('\n') +} diff --git a/app/src/features/git-sync/lib/dirty-calculator.ts b/app/src/features/git-sync/lib/dirty-calculator.ts new file mode 100644 index 00000000..437f4637 --- /dev/null +++ b/app/src/features/git-sync/lib/dirty-calculator.ts @@ -0,0 +1,156 @@ +/** + * Dirty File Calculator for KeyVault Git Sync + * + * Calculates dirty files on-demand by comparing document content with Git HEAD. + * No persistence - ensures cross-device consistency. + */ + +import * as Y from 'yjs' + +import { + listDocuments, + getDocumentContent, + type Document, + type EncryptedUpdateEntry, +} from '@/shared/api/client' + +import { + fetchDocumentKeys, + SessionLockedError, + decrypt, + getSodium, +} from '@/features/security' + + +import { GitClient } from './git-client' + +export interface DirtyFile { + path: string + documentId: string + status: 'modified' | 'added' | 'deleted' +} + +/** + * Build file path from document + */ +function buildFilePath(doc: Document): string { + const basePath = doc.desired_path || doc.slug || doc.id + + if (doc.type === 'document' && !basePath.endsWith('.md')) { + return `${basePath}.md` + } + + return basePath +} + +/** + * Fetch and decrypt document content + */ +async function fetchDecryptedDocumentContent( + documentId: string, + workspaceId: string +): Promise { + try { + const contentRes = await getDocumentContent({ id: documentId }) + + const hasSnapshot = contentRes.content && contentRes.content.length > 0 + const hasUpdates = contentRes.updates && contentRes.updates.length > 0 + + if (!hasSnapshot && !hasUpdates) { + return '' + } + + const sodium = await getSodium() + const doc = new Y.Doc() + + // Get encryption keys + let dek: Uint8Array + try { + const keys = await fetchDocumentKeys(documentId, workspaceId) + dek = keys.dek + } catch (err) { + if (err instanceof SessionLockedError) { + doc.destroy() + return '' + } + throw err + } + + // Apply snapshot if present + if (hasSnapshot) { + const encryptedContent = sodium.from_base64(contentRes.content, sodium.base64_variants.ORIGINAL) + const nonce = sodium.from_base64(contentRes.nonce!, sodium.base64_variants.ORIGINAL) + const yjsState = await decrypt(dek, encryptedContent, nonce) + Y.applyUpdateV2(doc, yjsState) + } + + // Apply pending updates + if (hasUpdates) { + for (const update of contentRes.updates as EncryptedUpdateEntry[]) { + const encryptedData = sodium.from_base64(update.data, sodium.base64_variants.ORIGINAL) + const nonce = sodium.from_base64(update.nonce!, sodium.base64_variants.ORIGINAL) + const yjsUpdate = await decrypt(dek, encryptedData, nonce) + Y.applyUpdateV2(doc, yjsUpdate) + } + } + + const text = doc.getText('content').toString() + doc.destroy() + + return text + } catch { + return '' + } +} + +/** + * Calculate dirty files by comparing document content with Git HEAD. + * This is an on-demand calculation that ensures cross-device consistency. + */ +export async function calculateDirtyFiles( + workspaceId: string, + git: GitClient +): Promise { + const dirty: DirtyFile[] = [] + + // 1. Get all documents in workspace + const documentsResponse = await listDocuments({ state: 'active' }) + const workspaceDocs = documentsResponse.items.filter( + (doc) => doc.workspace_id === workspaceId && doc.type === 'document' + ) + + // 2. Get Git status matrix to find all tracked files + const statusMatrix = await git.status() + const gitFiles = new Set(statusMatrix.map(([filepath]) => filepath)) + + // 3. Check each document against Git + for (const doc of workspaceDocs) { + const path = buildFilePath(doc) + + try { + const gitContent = await git.readFile(path) + const docContent = await fetchDecryptedDocumentContent(doc.id, workspaceId) + + if (gitContent !== docContent) { + dirty.push({ path, documentId: doc.id, status: 'modified' }) + } + + // Remove from git files set (we've processed this one) + gitFiles.delete(path) + } catch { + // File doesn't exist in Git = new file + dirty.push({ path, documentId: doc.id, status: 'added' }) + } + } + + // 4. Files in Git but not in documents = deleted + for (const path of gitFiles) { + if (!path.endsWith('.md')) continue + // Skip .git directory files + if (path.startsWith('.git/')) continue + + dirty.push({ path, documentId: '', status: 'deleted' }) + } + + return dirty +} diff --git a/app/src/features/git-sync/lib/git-client.ts b/app/src/features/git-sync/lib/git-client.ts new file mode 100644 index 00000000..ada424d2 --- /dev/null +++ b/app/src/features/git-sync/lib/git-client.ts @@ -0,0 +1,454 @@ +/** + * Git Client for E2EE + * + * Uses isomorphic-git for client-side Git operations. + * Network operations go through backend proxy (HTTPS) or tunnel (SSH). + */ + +import LightningFS from '@isomorphic-git/lightning-fs' +import * as git from 'isomorphic-git' +import type { HttpClient, GitHttpRequest, GitHttpResponse } from 'isomorphic-git' + +import { API_BASE_URL } from '@/shared/lib/config' + +import type { GitCredentials } from './git-credentials' + + +/** + * Parse Git SSH URL to extract host and repo + */ +export function parseGitSshUrl(url: string): { host: string; repo: string } { + // Format: git@github.com:user/repo.git + const match = url.match(/^git@([^:]+):(.+)$/) + if (!match) { + throw new Error(`Invalid SSH URL: ${url}`) + } + return { host: match[1], repo: match[2] } +} + +/** + * Git Client for E2EE environment + */ +export class GitClient { + private _fs: LightningFS + private pfs: LightningFS['promises'] + private _dir: string + private proxyBaseUrl: string + + constructor(workspaceId: string) { + this._fs = new LightningFS(`git-${workspaceId}`) + this.pfs = this._fs.promises + this._dir = '/repo' + this.proxyBaseUrl = `${API_BASE_URL}/api/git/proxy` + } + + /** Get the filesystem (for direct git operations) */ + get fs(): LightningFS { + return this._fs + } + + /** Get the repository directory path */ + get dir(): string { + return this._dir + } + + /** + * Collect body from async iterator + */ + private async collectBody(body: AsyncIterableIterator | undefined): Promise { + if (!body) return undefined + + const chunks: Uint8Array[] = [] + for await (const chunk of body) { + chunks.push(chunk) + } + + if (chunks.length === 0) return undefined + if (chunks.length === 1) return chunks[0] + + // Concatenate chunks + const totalLength = chunks.reduce((sum, chunk) => sum + chunk.length, 0) + const result = new Uint8Array(totalLength) + let offset = 0 + for (const chunk of chunks) { + result.set(chunk, offset) + offset += chunk.length + } + return result + } + + /** + * Create async iterator from Uint8Array + */ + private async *toAsyncIterator(data: Uint8Array): AsyncIterableIterator { + yield data + } + + /** + * Create HTTP client for HTTPS proxy + */ + private createHttpsProxy(token: string): HttpClient { + const proxyBaseUrl = this.proxyBaseUrl + const collectBody = this.collectBody.bind(this) + const toAsyncIterator = this.toAsyncIterator.bind(this) + + return { + async request(req: GitHttpRequest): Promise { + const proxyUrl = `${proxyBaseUrl}/https/${req.url.replace(/^https?:\/\//, '')}` + + const headers: Record = { + ...(req.headers || {}), + Authorization: `Basic ${btoa(unescape(encodeURIComponent(`x-access-token:${token}`)))}`, + } + + // Collect request body from async iterator + const bodyData = await collectBody(req.body) + + const response = await fetch(proxyUrl, { + method: req.method || 'GET', + headers, + body: bodyData ? new Blob([new Uint8Array(bodyData)]) : undefined, + credentials: 'omit', + }) + + const responseBody = new Uint8Array(await response.arrayBuffer()) + + return { + url: response.url, + method: req.method, + statusCode: response.status, + statusMessage: response.statusText, + headers: Object.fromEntries(response.headers.entries()), + body: responseBody.length > 0 ? toAsyncIterator(responseBody) : undefined, + } + }, + } + } + + /** + * Create HTTP client for SSH tunnel + */ + private createSshProxy(privateKey: string, passphrase?: string): HttpClient { + const proxyBaseUrl = this.proxyBaseUrl + const collectBody = this.collectBody.bind(this) + const toAsyncIterator = this.toAsyncIterator.bind(this) + + return { + async request(req: GitHttpRequest): Promise { + // Determine service from URL path + const service = req.url.includes('git-upload-pack') + ? 'git-upload-pack' + : 'git-receive-pack' + + // Extract host and repo from URL + // URL format: https://github.com/user/repo.git/info/refs?service=... + const urlMatch = req.url.match(/https?:\/\/([^/]+)\/(.+?)(?:\/info\/refs|\/git-|$)/) + if (!urlMatch) { + throw new Error(`Cannot parse URL for SSH: ${req.url}`) + } + + const host = urlMatch[1] + const repo = urlMatch[2] + + // Collect request body from async iterator + const bodyData = await collectBody(req.body) + + const response = await fetch(`${proxyBaseUrl}/ssh`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + host, + repo, + service, + private_key: privateKey, + passphrase: passphrase || null, + data: bodyData ? Array.from(bodyData) : [], + }), + }) + + if (!response.ok) { + const error = await response.text() + throw new Error(`SSH tunnel error: ${error}`) + } + + const result = await response.json() + + return { + url: req.url, + method: req.method, + statusCode: 200, + statusMessage: 'OK', + headers: {}, + body: result.data ? toAsyncIterator(new Uint8Array(result.data)) : undefined, + } + }, + } + } + + /** + * Get HTTP client based on auth type + */ + private getHttpClient(auth: GitCredentials): HttpClient { + if (auth.authType === 'ssh') { + if (!auth.privateKey) { + throw new Error('SSH private key required') + } + return this.createSshProxy(auth.privateKey, auth.passphrase) + } + + if (!auth.token) { + throw new Error('Access token required') + } + return this.createHttpsProxy(auth.token) + } + + /** + * Initialize the repository directory + */ + async ensureDir(): Promise { + try { + await this.pfs.mkdir(this.dir) + } catch (e) { + // Directory may already exist + if ((e as NodeJS.ErrnoException).code !== 'EEXIST') { + throw e + } + } + } + + /** + * Clone a repository + */ + async clone(url: string, auth: GitCredentials): Promise { + await this.ensureDir() + + await git.clone({ + fs: this.fs, + http: this.getHttpClient(auth), + dir: this.dir, + url, + ref: auth.branchName || 'main', + depth: 1, + singleBranch: true, + corsProxy: undefined, // Using our own proxy + }) + } + + /** + * Pull from remote + */ + async pull(auth: GitCredentials): Promise { + await git.pull({ + fs: this.fs, + http: this.getHttpClient(auth), + dir: this.dir, + ref: auth.branchName || 'main', + author: { name: 'RefMD', email: 'sync@refmd.app' }, + singleBranch: true, + }) + } + + /** + * Push to remote + */ + async push(auth: GitCredentials): Promise { + await git.push({ + fs: this.fs, + http: this.getHttpClient(auth), + dir: this.dir, + ref: auth.branchName || 'main', + }) + } + + /** + * Fetch from remote + */ + async fetch(auth: GitCredentials): Promise { + await git.fetch({ + fs: this.fs, + http: this.getHttpClient(auth), + dir: this.dir, + ref: auth.branchName || 'main', + singleBranch: true, + }) + } + + /** + * Add file to staging + */ + async add(filepath: string): Promise { + await git.add({ + fs: this.fs, + dir: this.dir, + filepath, + }) + } + + /** + * Remove file from staging and working tree + */ + async remove(filepath: string): Promise { + await git.remove({ + fs: this.fs, + dir: this.dir, + filepath, + }) + } + + /** + * Create a commit + */ + async commit(message: string): Promise { + return git.commit({ + fs: this.fs, + dir: this.dir, + message, + author: { name: 'RefMD', email: 'sync@refmd.app' }, + }) + } + + /** + * Get status of all files + */ + async status(): Promise<[string, number, number, number][]> { + return git.statusMatrix({ + fs: this.fs, + dir: this.dir, + }) + } + + /** + * Check if repository is initialized + */ + async isInitialized(): Promise { + try { + await git.findRoot({ fs: this.fs, filepath: this.dir }) + return true + } catch { + return false + } + } + + /** + * Get current branch name + */ + async currentBranch(): Promise { + return git.currentBranch({ + fs: this.fs, + dir: this.dir, + }) as Promise + } + + /** + * List branches + */ + async listBranches(): Promise { + return git.listBranches({ + fs: this.fs, + dir: this.dir, + }) + } + + /** + * Get commit log + */ + async log(depth: number = 10): Promise { + return git.log({ + fs: this.fs, + dir: this.dir, + depth, + }) + } + + /** + * Write file to repository + */ + async writeFile(filepath: string, content: string): Promise { + const fullPath = `${this.dir}/${filepath}` + + // Ensure parent directory exists + const parentDir = fullPath.substring(0, fullPath.lastIndexOf('/')) + if (parentDir && parentDir !== this.dir) { + await this.mkdirp(parentDir) + } + + await this.pfs.writeFile(fullPath, content, 'utf8') + } + + /** + * Read file from repository + */ + async readFile(filepath: string): Promise { + const fullPath = `${this.dir}/${filepath}` + const content = await this.pfs.readFile(fullPath, { encoding: 'utf8' }) + return content as string + } + + /** + * Delete file from repository + */ + async deleteFile(filepath: string): Promise { + const fullPath = `${this.dir}/${filepath}` + await this.pfs.unlink(fullPath) + } + + /** + * List files in directory + */ + async listFiles(dirPath: string = ''): Promise { + const fullPath = dirPath ? `${this.dir}/${dirPath}` : this.dir + return this.pfs.readdir(fullPath) as Promise + } + + /** + * Create directory recursively + */ + private async mkdirp(dirPath: string): Promise { + const parts = dirPath.split('/').filter(Boolean) + let current = '' + + for (const part of parts) { + current = current ? `${current}/${part}` : `/${part}` + try { + await this.pfs.mkdir(current) + } catch (e) { + if ((e as NodeJS.ErrnoException).code !== 'EEXIST') { + throw e + } + } + } + } + + /** + * Clear repository (delete all files) + */ + async clear(): Promise { + const files = await this.listFilesRecursive(this.dir) + for (const file of files) { + await this.pfs.unlink(file) + } + } + + /** + * List all files recursively + */ + private async listFilesRecursive(dirPath: string): Promise { + const result: string[] = [] + const entries = (await this.pfs.readdir(dirPath)) as string[] + + for (const entry of entries) { + const fullPath = `${dirPath}/${entry}` + const stat = await this.pfs.stat(fullPath) + + if (stat.isDirectory()) { + if (entry !== '.git') { + result.push(...(await this.listFilesRecursive(fullPath))) + } + } else { + result.push(fullPath) + } + } + + return result + } +} diff --git a/app/src/features/git-sync/lib/git-conflict-store.ts b/app/src/features/git-sync/lib/git-conflict-store.ts index 33a00765..258bacd3 100644 --- a/app/src/features/git-sync/lib/git-conflict-store.ts +++ b/app/src/features/git-sync/lib/git-conflict-store.ts @@ -1,17 +1,22 @@ -import type { GitPullConflictItem, GitPullResolution } from '@/shared/api' +/** + * Git Conflict Store for E2EE Git Sync + * + * Client-side store for tracking git merge conflicts. + * No server-side sessions - everything is client-side. + */ + import { getClientWorkspaceId } from '@/shared/api/client.config' +import type { ConflictItem } from './pull' + +export type { ConflictItem } + export const GIT_CONFLICT_EVENT = 'refmd:git-conflicts-updated' -export const GIT_SESSION_EVENT = 'refmd:git-session-updated' -let currentConflicts: GitPullConflictItem[] = [] -let currentResolutions: GitPullResolution[] = [] -let currentSessionId: string | null = null +let currentConflicts: ConflictItem[] = [] let currentWorkspaceId: string | null = null const STORAGE_CONFLICTS_KEY = 'refmd:git-conflicts' -const STORAGE_RESOLUTIONS_KEY = 'refmd:git-conflict-resolutions' -const STORAGE_SESSION_KEY = 'refmd:git-conflict-session' type StoredArray = { items: T[]; found: boolean } @@ -26,10 +31,7 @@ const scopedKey = (base: string, workspaceId: string | null) => (workspaceId ? ` const refreshWorkspaceState = () => { const workspaceId = normalizeWorkspaceId(getClientWorkspaceId()) currentWorkspaceId = workspaceId - currentConflicts = loadScopedArray(STORAGE_CONFLICTS_KEY, currentWorkspaceId).items - currentResolutions = loadScopedArray(STORAGE_RESOLUTIONS_KEY, currentWorkspaceId).items - const sid = loadScopedArray(STORAGE_SESSION_KEY, currentWorkspaceId) - currentSessionId = sid.items.length ? sid.items[0] : null + currentConflicts = loadScopedArray(STORAGE_CONFLICTS_KEY, currentWorkspaceId).items } const loadFromStorage = (key: string): StoredArray => { @@ -58,7 +60,7 @@ const loadScopedArray = (baseKey: string, workspaceId: string | null): Stored const scopedValue = loadFromStorage(scoped) if (scopedValue.found) return scopedValue - // Do not leak legacy (unscoped) values into another workspace; only use legacy when no workspace is selected. + // Do not leak legacy (unscoped) values into another workspace if (workspaceId) return { items: [], found: false } return loadFromStorage(baseKey) @@ -69,63 +71,29 @@ if (typeof window !== 'undefined') { refreshWorkspaceState() } -export const readConflicts = (): GitPullConflictItem[] => { +export const readConflicts = (): ConflictItem[] => { refreshWorkspaceState() return currentConflicts.slice() } -export const readResolutions = (): GitPullResolution[] => { - refreshWorkspaceState() - return currentResolutions.slice() -} -export const readSessionId = (): string | null => { - refreshWorkspaceState() - return currentSessionId -} -export const setConflicts = (conflicts: GitPullConflictItem[] | null | undefined) => { +export const setConflicts = (conflicts: ConflictItem[] | null | undefined) => { refreshWorkspaceState() currentConflicts = Array.isArray(conflicts) ? conflicts.slice() : [] persistStorage(scopedKey(STORAGE_CONFLICTS_KEY, currentWorkspaceId), currentConflicts) - // Clear resolutions if conflicts are cleared - if (!currentConflicts.length) { - setResolutions([]) - } if (typeof window !== 'undefined') { window.dispatchEvent(new CustomEvent(GIT_CONFLICT_EVENT, { detail: currentConflicts })) } } -export const setResolutions = (resolutions: GitPullResolution[] | null | undefined) => { - refreshWorkspaceState() - currentResolutions = Array.isArray(resolutions) ? resolutions.slice() : [] - persistStorage(scopedKey(STORAGE_RESOLUTIONS_KEY, currentWorkspaceId), currentResolutions) -} - -export const clearResolutions = () => setResolutions([]) - -export const setSessionId = (sessionId: string | null) => { - refreshWorkspaceState() - currentSessionId = sessionId || null - persistStorage(scopedKey(STORAGE_SESSION_KEY, currentWorkspaceId), sessionId ? [sessionId] : []) - if (typeof window !== 'undefined') { - window.dispatchEvent(new CustomEvent(GIT_SESSION_EVENT, { detail: currentSessionId })) - } -} - -export const clearSession = () => { - clearAllConflicts() -} - export const clearAllConflicts = () => { setConflicts([]) - setSessionId(null) } -export const subscribeConflicts = (handler: (items: GitPullConflictItem[]) => void) => { +export const subscribeConflicts = (handler: (items: ConflictItem[]) => void) => { if (typeof window === 'undefined') return () => {} refreshWorkspaceState() const listener = (event: Event) => { - const detail = (event as CustomEvent).detail || currentConflicts + const detail = (event as CustomEvent).detail || currentConflicts handler(detail) } const storageListener = () => { @@ -139,23 +107,3 @@ export const subscribeConflicts = (handler: (items: GitPullConflictItem[]) => vo window.removeEventListener('storage', storageListener) } } - -export const subscribeSessionId = (handler: (sessionId: string | null) => void) => { - if (typeof window === 'undefined') return () => {} - refreshWorkspaceState() - const storageListener = (event: StorageEvent) => { - refreshWorkspaceState() - if (event.key && event.key !== scopedKey(STORAGE_SESSION_KEY, currentWorkspaceId)) return - handler(readSessionId()) - } - const eventListener = (event: Event) => { - const detail = (event as CustomEvent).detail - handler(detail ?? readSessionId()) - } - window.addEventListener('storage', storageListener) - window.addEventListener(GIT_SESSION_EVENT, eventListener) - return () => { - window.removeEventListener('storage', storageListener) - window.removeEventListener(GIT_SESSION_EVENT, eventListener) - } -} diff --git a/app/src/features/git-sync/lib/git-credentials.ts b/app/src/features/git-sync/lib/git-credentials.ts new file mode 100644 index 00000000..f4ecefb7 --- /dev/null +++ b/app/src/features/git-sync/lib/git-credentials.ts @@ -0,0 +1,142 @@ +/** + * Git Credentials Manager for KeyVault + * + * Handles encrypted storage of Git authentication credentials. + * Credentials are encrypted with workspace KEK and stored on server. + * This enables cross-device sync while maintaining E2EE security. + */ + +import { createOrUpdateConfig, getConfig, deleteConfig } from '@/entities/git' + +import { getKeyVaultService, encrypt, decrypt, getSodium } from '@/features/security' + +export interface GitCredentials { + repositoryUrl: string + branchName: string + authType: 'https-pat' | 'ssh' + // HTTPS + token?: string + // SSH + privateKey?: string + passphrase?: string +} + +interface E2EEAuthData { + e2ee: true + ciphertext: string // base64 encoded + nonce: string // base64 encoded +} + +/** + * Save Git credentials encrypted with workspace KEK to server + */ +export async function saveGitCredentials( + workspaceId: string, + credentials: GitCredentials +): Promise { + const service = getKeyVaultService() + service.ensureUnlocked() + + // Get or create workspace KEK + const kek = await service.keyManager.getOrCreateWorkspaceKek(workspaceId) + + // Prepare auth data to encrypt + const authDataPlain = { + token: credentials.token, + privateKey: credentials.privateKey, + passphrase: credentials.passphrase, + } + + // Encrypt with KEK + const sodium = await getSodium() + const plaintext = new TextEncoder().encode(JSON.stringify(authDataPlain)) + const { ciphertext, nonce } = await encrypt(kek, plaintext) + + // Encode to base64 + const ciphertextB64 = sodium.to_base64(ciphertext, sodium.base64_variants.ORIGINAL) + const nonceB64 = sodium.to_base64(nonce, sodium.base64_variants.ORIGINAL) + + // Create E2EE auth data structure + const e2eeAuthData: E2EEAuthData = { + e2ee: true, + ciphertext: ciphertextB64, + nonce: nonceB64, + } + + // Save to server (server stores as-is, doesn't re-encrypt) + await createOrUpdateConfig({ + requestBody: { + repository_url: credentials.repositoryUrl, + branch_name: credentials.branchName || 'main', + auth_type: credentials.authType === 'ssh' ? 'ssh' : 'token', + auth_data: e2eeAuthData, + auto_sync: false, + }, + }) +} + +/** + * Load Git credentials from server and decrypt with workspace KEK + */ +export async function loadGitCredentials( + workspaceId: string +): Promise { + const service = getKeyVaultService() + service.ensureUnlocked() + + // Get config from server + const config = await getConfig() + if (!config) { + return null + } + + // Check if we have encrypted auth data + const rawAuthData = (config as any).encrypted_auth_data + if (!rawAuthData || !rawAuthData.e2ee) { + // Legacy non-encrypted config - return without auth data + return { + repositoryUrl: config.repository_url, + branchName: config.branch_name, + authType: config.auth_type === 'ssh' ? 'ssh' : 'https-pat', + } + } + + // Get or create workspace KEK + const kek = await service.keyManager.getOrCreateWorkspaceKek(workspaceId) + + // Decrypt auth data + const sodium = await getSodium() + const ciphertext = sodium.from_base64(rawAuthData.ciphertext, sodium.base64_variants.ORIGINAL) + const nonce = sodium.from_base64(rawAuthData.nonce, sodium.base64_variants.ORIGINAL) + + const plaintext = await decrypt(kek, ciphertext, nonce) + const authData = JSON.parse(new TextDecoder().decode(plaintext)) + + return { + repositoryUrl: config.repository_url, + branchName: config.branch_name, + authType: config.auth_type === 'ssh' ? 'ssh' : 'https-pat', + token: authData.token, + privateKey: authData.privateKey, + passphrase: authData.passphrase, + } +} + +/** + * Delete Git credentials from server + */ +export async function deleteGitCredentials(): Promise { + await deleteConfig() +} + +/** + * Check if Git credentials exist for a workspace + */ +export async function hasGitCredentials(): Promise { + try { + const config = await getConfig() + return config !== null + } catch { + return false + } +} diff --git a/app/src/features/git-sync/lib/history.ts b/app/src/features/git-sync/lib/history.ts new file mode 100644 index 00000000..34ca41b8 --- /dev/null +++ b/app/src/features/git-sync/lib/history.ts @@ -0,0 +1,366 @@ +/** + * Git History and Diff for E2EE Git Sync + * + * Uses isomorphic-git for client-side history and diff operations. + */ + +import * as git from 'isomorphic-git' +import type { FsClient } from 'isomorphic-git' + +import { calculateDirtyFiles } from './dirty-calculator' +import { GitClient } from './git-client' +import { fetchDecryptedDocumentContent } from './sync' + +export interface GitCommitItem { + hash: string + message: string + author_name: string + author_email: string + time: string +} + +export type TextDiffLineType = 'added' | 'deleted' | 'context' + +export interface DiffLine { + line_type: TextDiffLineType + content: string + old_line_number: number | null + new_line_number: number | null +} + +export interface TextDiffResult { + file_path: string + diff_lines: DiffLine[] +} + +/** + * Get commit history for workspace + */ +export async function getHistory(workspaceId: string, depth: number = 50): Promise { + const gitClient = new GitClient(workspaceId) + + const isInitialized = await gitClient.isInitialized() + if (!isInitialized) { + return [] + } + + try { + const commits = await gitClient.log(depth) + + return commits.map((commit) => ({ + hash: commit.oid, + message: commit.commit.message, + author_name: commit.commit.author.name, + author_email: commit.commit.author.email, + time: new Date(commit.commit.author.timestamp * 1000).toISOString(), + })) + } catch { + return [] + } +} + +/** + * Simple line-based diff algorithm + */ +function computeLineDiff(oldContent: string, newContent: string): DiffLine[] { + const oldLines = oldContent.split('\n') + const newLines = newContent.split('\n') + + const diffLines: DiffLine[] = [] + + // Simple diff: use LCS (Longest Common Subsequence) algorithm + const lcs = computeLCS(oldLines, newLines) + + let oldIdx = 0 + let newIdx = 0 + let lcsIdx = 0 + + while (oldIdx < oldLines.length || newIdx < newLines.length) { + if (lcsIdx < lcs.length && oldIdx < oldLines.length && oldLines[oldIdx] === lcs[lcsIdx]) { + // Common line + if (newIdx < newLines.length && newLines[newIdx] === lcs[lcsIdx]) { + diffLines.push({ + line_type: 'context', + content: oldLines[oldIdx], + old_line_number: oldIdx + 1, + new_line_number: newIdx + 1, + }) + oldIdx++ + newIdx++ + lcsIdx++ + } else { + // Addition in new + diffLines.push({ + line_type: 'added', + content: newLines[newIdx], + old_line_number: null, + new_line_number: newIdx + 1, + }) + newIdx++ + } + } else if (oldIdx < oldLines.length) { + // Deletion from old + diffLines.push({ + line_type: 'deleted', + content: oldLines[oldIdx], + old_line_number: oldIdx + 1, + new_line_number: null, + }) + oldIdx++ + } else if (newIdx < newLines.length) { + // Addition in new + diffLines.push({ + line_type: 'added', + content: newLines[newIdx], + old_line_number: null, + new_line_number: newIdx + 1, + }) + newIdx++ + } + } + + return diffLines +} + +/** + * Compute Longest Common Subsequence + */ +function computeLCS(a: string[], b: string[]): string[] { + const m = a.length + const n = b.length + + // DP table + const dp: number[][] = Array(m + 1) + .fill(null) + .map(() => Array(n + 1).fill(0)) + + for (let i = 1; i <= m; i++) { + for (let j = 1; j <= n; j++) { + if (a[i - 1] === b[j - 1]) { + dp[i][j] = dp[i - 1][j - 1] + 1 + } else { + dp[i][j] = Math.max(dp[i - 1][j], dp[i][j - 1]) + } + } + } + + // Backtrack to find LCS + const lcs: string[] = [] + let i = m + let j = n + + while (i > 0 && j > 0) { + if (a[i - 1] === b[j - 1]) { + lcs.unshift(a[i - 1]) + i-- + j-- + } else if (dp[i - 1][j] > dp[i][j - 1]) { + i-- + } else { + j-- + } + } + + return lcs +} + +/** + * Get working tree diff (uncommitted changes) + */ +export async function getWorkingDiff(workspaceId: string): Promise { + const gitClient = new GitClient(workspaceId) + + const isInitialized = await gitClient.isInitialized() + if (!isInitialized) { + return [] + } + + const dirtyFiles = await calculateDirtyFiles(workspaceId, gitClient) + + const diffs: TextDiffResult[] = [] + + for (const dirty of dirtyFiles) { + try { + let oldContent = '' + let newContent = '' + + if (dirty.status === 'deleted') { + oldContent = await gitClient.readFile(dirty.path).catch(() => '') + newContent = '' + } else if (dirty.status === 'added') { + oldContent = '' + newContent = await fetchDecryptedDocumentContent(dirty.documentId, workspaceId) + } else { + oldContent = await gitClient.readFile(dirty.path).catch(() => '') + newContent = await fetchDecryptedDocumentContent(dirty.documentId, workspaceId) + } + + diffs.push({ + file_path: dirty.path, + diff_lines: computeLineDiff(oldContent, newContent), + }) + } catch { + // Skip files that can't be diffed + } + } + + return diffs +} + +/** + * Get diff between two commits + */ +export async function getCommitDiff( + workspaceId: string, + fromCommit: string, + toCommit: string +): Promise { + const gitClient = new GitClient(workspaceId) + + const isInitialized = await gitClient.isInitialized() + if (!isInitialized) { + return [] + } + + try { + // Get files changed between commits + const changedFiles = await getChangedFilesBetweenCommits(gitClient, fromCommit, toCommit) + + const diffs: TextDiffResult[] = [] + + for (const file of changedFiles) { + const oldContent = await readFileAtCommit(gitClient, fromCommit, file).catch(() => '') + const newContent = await readFileAtCommit(gitClient, toCommit, file).catch(() => '') + + diffs.push({ + file_path: file, + diff_lines: computeLineDiff(oldContent, newContent), + }) + } + + return diffs + } catch { + return [] + } +} + +/** + * Get list of changed files between two commits + */ +async function getChangedFilesBetweenCommits( + gitClient: GitClient, + fromCommit: string, + toCommit: string +): Promise { + const fs = gitClient.fs + const dir = gitClient.dir + + const fromTree = await git.readTree({ + fs, + dir, + oid: fromCommit, + }) + + const toTree = await git.readTree({ + fs, + dir, + oid: toCommit, + }) + + const fromFiles = new Map() + const toFiles = new Map() + + // Collect files from both trees + await collectFilesFromTree(fs, dir, fromTree.tree, '', fromFiles) + await collectFilesFromTree(fs, dir, toTree.tree, '', toFiles) + + // Find changed files + const changedFiles = new Set() + + for (const [path, oid] of fromFiles) { + if (!toFiles.has(path) || toFiles.get(path) !== oid) { + changedFiles.add(path) + } + } + + for (const path of toFiles.keys()) { + if (!fromFiles.has(path)) { + changedFiles.add(path) + } + } + + return Array.from(changedFiles) +} + +/** + * Recursively collect files from a tree + */ +async function collectFilesFromTree( + fs: FsClient, + dir: string, + tree: git.TreeEntry[], + prefix: string, + files: Map +): Promise { + for (const entry of tree) { + const path = prefix ? `${prefix}/${entry.path}` : entry.path + + if (entry.type === 'blob') { + files.set(path, entry.oid) + } else if (entry.type === 'tree') { + const subtree = await git.readTree({ + fs, + dir, + oid: entry.oid, + }) + await collectFilesFromTree(fs, dir, subtree.tree, path, files) + } + } +} + +/** + * Read file content at a specific commit + */ +async function readFileAtCommit( + gitClient: GitClient, + commitOid: string, + filepath: string +): Promise { + const fs = gitClient.fs + const dir = gitClient.dir + + // Get the commit + const commit = await git.readCommit({ + fs, + dir, + oid: commitOid, + }) + + // Walk the tree to find the file + const pathParts = filepath.split('/') + let currentOid = commit.commit.tree + + for (let i = 0; i < pathParts.length; i++) { + const tree = await git.readTree({ + fs, + dir, + oid: currentOid, + }) + + const entry = tree.tree.find((e) => e.path === pathParts[i]) + if (!entry) { + throw new Error(`File not found: ${filepath}`) + } + + currentOid = entry.oid + } + + // Read the blob + const blob = await git.readBlob({ + fs, + dir, + oid: currentOid, + }) + + return new TextDecoder().decode(blob.blob) +} diff --git a/app/src/features/git-sync/lib/import.ts b/app/src/features/git-sync/lib/import.ts new file mode 100644 index 00000000..7c37c57c --- /dev/null +++ b/app/src/features/git-sync/lib/import.ts @@ -0,0 +1,344 @@ +/** + * Git Import for KeyVault Git Sync + * + * Imports a Git repository into the workspace. + */ + +import * as Y from 'yjs' + +import { + updateDocumentContent as apiUpdateDocumentContent, +} from '@/shared/api/client' + +import { createDocument } from '@/entities/document' + +import { + getKeyVaultService, + fetchDocumentKeys, + createDocumentDek, + encrypt, + getSodium, +} from '@/features/security' + +import { GitClient } from './git-client' +import type { GitCredentials } from './git-credentials' + + +export interface ImportResult { + success: boolean + message: string + docsCreated: number + attachmentsFound: number +} + +export interface ImportProgress { + phase: 'cloning' | 'scanning' | 'importing' | 'done' + current: number + total: number + currentFile?: string +} + +export type ProgressCallback = (progress: ImportProgress) => void + +/** + * Import a Git repository into the workspace. + * + * This function clones the repository and creates documents from markdown files. + */ +export async function importFromGit( + workspaceId: string, + repositoryUrl: string, + credentials: GitCredentials, + onProgress?: ProgressCallback +): Promise { + const service = getKeyVaultService() + if (!service.isUnlocked) { + return { + success: false, + message: 'KeyVault is locked. Please unlock first.', + docsCreated: 0, + attachmentsFound: 0, + } + } + + const gitClient = new GitClient(workspaceId) + + // 1. Clone repository + onProgress?.({ + phase: 'cloning', + current: 0, + total: 1, + }) + + try { + await gitClient.clone(repositoryUrl, credentials) + } catch (error) { + return { + success: false, + message: `Failed to clone repository: ${error instanceof Error ? error.message : 'Unknown error'}`, + docsCreated: 0, + attachmentsFound: 0, + } + } + + // 2. Scan files + onProgress?.({ + phase: 'scanning', + current: 0, + total: 1, + }) + + let docsCreated = 0 + let attachmentsFound = 0 + + try { + const allFiles = await listAllFiles(gitClient, '') + + const markdownFiles: string[] = [] + const attachmentFiles: string[] = [] + + for (const file of allFiles) { + if (file.endsWith('.md') || file.endsWith('.markdown')) { + markdownFiles.push(file) + } else if (isAttachment(file)) { + attachmentFiles.push(file) + } + } + + attachmentsFound = attachmentFiles.length + + // 3. Import markdown files as documents + onProgress?.({ + phase: 'importing', + current: 0, + total: markdownFiles.length, + }) + + for (let i = 0; i < markdownFiles.length; i++) { + const filePath = markdownFiles[i] + + onProgress?.({ + phase: 'importing', + current: i, + total: markdownFiles.length, + currentFile: filePath, + }) + + try { + // Read file content + const rawContent = await gitClient.readFile(filePath) + + // Strip frontmatter from markdown + const content = stripFrontmatter(rawContent) + + // Extract title from filename (remove extension and path) + const fileName = filePath.split('/').pop() || filePath + const title = fileName.replace(/\.(md|markdown)$/i, '') || 'Untitled' + + // Create document + const doc = await createDocument({ title, parent_id: null }) + + // Create DEK for the document + await createDocumentDek(doc.id, workspaceId) + + // Get DEK for encryption + const { dek } = await fetchDocumentKeys(doc.id, workspaceId) + + // Create Yjs doc and set content + const ydoc = new Y.Doc() + ydoc.getText('content').insert(0, content) + + // Get Yjs state as bytes + const yjsState = Y.encodeStateAsUpdateV2(ydoc) + ydoc.destroy() + + // Encrypt content with DEK + const { ciphertext, nonce } = await encrypt(dek, yjsState) + + // Convert to base64 for API + const sodium = await getSodium() + const contentBase64 = sodium.to_base64(ciphertext, sodium.base64_variants.ORIGINAL) + const nonceBase64 = sodium.to_base64(nonce, sodium.base64_variants.ORIGINAL) + + // Update document content with encrypted data + await apiUpdateDocumentContent({ + id: doc.id, + requestBody: { + content: contentBase64, + nonce: nonceBase64, + }, + }) + + docsCreated++ + } catch (error) { + console.error(`[git-import] Failed to import ${filePath}:`, error) + // Continue with other files + } + } + + onProgress?.({ + phase: 'done', + current: docsCreated, + total: markdownFiles.length, + }) + } catch (error) { + return { + success: false, + message: `Failed to import repository: ${error instanceof Error ? error.message : 'Unknown error'}`, + docsCreated, + attachmentsFound, + } + } + + return { + success: true, + message: `Repository imported. Created ${docsCreated} documents.`, + docsCreated, + attachmentsFound, + } +} + +/** + * Get list of markdown files from cloned repository + */ +export async function getImportableFiles(workspaceId: string): Promise<{ + markdownFiles: string[] + attachmentFiles: string[] +}> { + const gitClient = new GitClient(workspaceId) + + const isInitialized = await gitClient.isInitialized() + if (!isInitialized) { + return { markdownFiles: [], attachmentFiles: [] } + } + + try { + const allFiles = await listAllFiles(gitClient, '') + + const markdownFiles: string[] = [] + const attachmentFiles: string[] = [] + + for (const file of allFiles) { + if (file.endsWith('.md') || file.endsWith('.markdown')) { + markdownFiles.push(file) + } else if (isAttachment(file)) { + attachmentFiles.push(file) + } + } + + return { markdownFiles, attachmentFiles } + } catch { + return { markdownFiles: [], attachmentFiles: [] } + } +} + +/** + * Read file content from the cloned repository + */ +export async function readImportFile( + workspaceId: string, + filePath: string +): Promise { + const gitClient = new GitClient(workspaceId) + return gitClient.readFile(filePath) +} + +/** + * Read binary file from the cloned repository + */ +export async function readImportBinaryFile( + workspaceId: string, + filePath: string +): Promise { + const gitClient = new GitClient(workspaceId) + const content = await gitClient.readFile(filePath) + // For binary files, we need to read as raw bytes + // This is a simplified version - real implementation would need proper binary handling + return new TextEncoder().encode(content) +} + +/** + * List all files recursively + */ +async function listAllFiles(gitClient: GitClient, dirPath: string): Promise { + const files: string[] = [] + + try { + const entries = await gitClient.listFiles(dirPath) + + for (const entry of entries) { + if (entry === '.git') continue + + const fullPath = dirPath ? `${dirPath}/${entry}` : entry + + try { + // Try to list as directory + const subEntries = await gitClient.listFiles(fullPath) + if (Array.isArray(subEntries) && subEntries.length > 0) { + // It's a directory + const subFiles = await listAllFiles(gitClient, fullPath) + files.push(...subFiles) + } else { + // Empty directory or file + files.push(fullPath) + } + } catch { + // It's a file + files.push(fullPath) + } + } + } catch { + // Return empty on error + } + + return files +} + +/** + * Strip YAML frontmatter from markdown content. + * Frontmatter is delimited by --- at the start and end. + */ +function stripFrontmatter(content: string): string { + // Remove BOM if present + const trimmed = content.replace(/^\uFEFF/, '') + + // Check if content starts with frontmatter delimiter + const openMatch = trimmed.match(/^---\r?\n/) + if (!openMatch) { + return trimmed + } + + // Find the closing delimiter + const afterOpen = trimmed.slice(openMatch[0].length) + const closeMatch = afterOpen.match(/\n---\r?\n/) + if (!closeMatch) { + return trimmed + } + + // Extract body after frontmatter + const bodyStart = closeMatch.index! + closeMatch[0].length + return afterOpen.slice(bodyStart) +} + +/** + * Check if file is an attachment (image, etc.) + */ +function isAttachment(filePath: string): boolean { + const attachmentExtensions = [ + '.png', '.jpg', '.jpeg', '.gif', '.webp', '.svg', + '.pdf', '.doc', '.docx', '.xls', '.xlsx', + '.mp3', '.mp4', '.wav', '.ogg', + '.zip', '.tar', '.gz', + ] + + const ext = filePath.substring(filePath.lastIndexOf('.')).toLowerCase() + return attachmentExtensions.includes(ext) +} + +/** + * Clear the cloned repository + */ +export async function clearImportedRepository(workspaceId: string): Promise { + const gitClient = new GitClient(workspaceId) + await gitClient.clear() +} diff --git a/app/src/features/git-sync/lib/pull-session-manager.ts b/app/src/features/git-sync/lib/pull-session-manager.ts deleted file mode 100644 index 1c88bde0..00000000 --- a/app/src/features/git-sync/lib/pull-session-manager.ts +++ /dev/null @@ -1,178 +0,0 @@ -import { ApiError, type GitPullConflictItem, type GitPullResolution, type GitPullSessionResponse } from '@/shared/api' - -import { finalizePullSession, resolvePullSession, startPullSession } from '@/entities/git' - -import { clearAllConflicts, clearSession, clearResolutions, readConflicts, readResolutions, readSessionId, setConflicts, setSessionId } from './git-conflict-store' - -export type PullSessionResult = { - status: 'merged' | 'conflicts' | 'stale' | 'error' - conflicts: GitPullConflictItem[] - sessionId?: string | null - message?: string | null - emptyConflictWarning?: boolean -} - -const extractConflicts = (value: unknown): GitPullConflictItem[] => { - const toArr = (v: unknown): GitPullConflictItem[] => (Array.isArray(v) ? (v as GitPullConflictItem[]) : []) - if (!value) return [] - if (Array.isArray(value)) return toArr(value) - if (typeof value === 'object') { - const maybe = (value as any)?.conflicts - if (Array.isArray(maybe)) return toArr(maybe) - } - return [] -} - -export const performPullSession = async ( - resolutions?: GitPullResolution[], - options?: { sessionId?: string | null; autoFinalize?: boolean }, -): Promise => { - const sessionIdFromStore = readSessionId() - const sessionId: string | undefined = (options?.sessionId ?? sessionIdFromStore) || undefined - let requestResolutions = resolutions ?? readResolutions() - - if (!sessionId) { - requestResolutions = resolutions ?? [] - clearResolutions() - } - try { - const res: GitPullSessionResponse = sessionId - ? await resolvePullSession({ id: sessionId, requestBody: { resolutions: requestResolutions } }) - : await startPullSession() - - if ((res as any)?.status === 'stale') { - clearAllConflicts() - return { - status: 'stale', - conflicts: [], - sessionId: undefined, - message: res.message, - } - } - - const conflicts = res.conflicts ?? [] - const sid: string | undefined = res.session_id || sessionId || undefined - const sessionChanged = Boolean(sid && sid !== sessionIdFromStore) - if (conflicts.length > 0) { - setSessionId(sid ?? null) - if (sessionChanged) { - clearResolutions() - } - setConflicts(conflicts) - return { - status: 'conflicts', - conflicts, - sessionId: sid, - message: res.message, - } - } - - const finalizeIfNeeded = async (): Promise => { - if (options?.autoFinalize === false) { - // Caller will explicitly finalize; keep session available to them. - return { - status: 'merged', - conflicts: [], - sessionId: sid, - message: res.message, - } - } - if (!sid) { - clearSession() - return { - status: 'merged', - conflicts: [], - sessionId: undefined, - message: res.message, - } - } - - try { - const finalizeRes = await finalizePullSession({ id: sid }) - const msg = finalizeRes.message || res.message || 'Finalize failed' - if (typeof msg === 'string' && msg.toLowerCase().includes('stale')) { - clearAllConflicts() - return { status: 'stale', conflicts: [], sessionId: undefined, message: msg } - } - - if (finalizeRes.success) { - clearSession() - return { - status: 'merged', - conflicts: [], - sessionId: undefined, - message: msg, - } - } - - const remaining = finalizeRes.conflicts ?? [] - if (remaining.length > 0) { - setSessionId(sid ?? null) - setConflicts(remaining) - return { - status: 'conflicts', - conflicts: remaining, - sessionId: sid, - message: finalizeRes.message || res.message, - } - } - - return { - status: 'error', - conflicts: readConflicts(), - sessionId: sid, - message: msg, - } - } catch (err: any) { - // Surface finalize errors so caller can prompt a retry. - const detail = err?.body?.message || err?.message || 'Finalize failed' - return { - status: 'error', - conflicts: readConflicts(), - sessionId: sid, - message: detail, - } - } - } - - return await finalizeIfNeeded() - } catch (e: any) { - const bodyConflicts = extractConflicts(e?.body) - const statusField = (e as any)?.body?.status - const msg = (e as any)?.body?.message || e?.message || `${e}` - - if (statusField === 'stale' || (typeof msg === 'string' && msg.toLowerCase().includes('stale'))) { - clearAllConflicts() - return { status: 'stale', conflicts: [], sessionId: undefined, message: msg } - } - - if (e instanceof ApiError && e.status === 409) { - if (bodyConflicts.length > 0) { - const sid = (e as any)?.body?.session_id || sessionId || readSessionId() || undefined - setSessionId(sid ?? null) - if (!sessionId || sid !== sessionIdFromStore) { - clearResolutions() - } - setConflicts(bodyConflicts) - return { status: 'conflicts', conflicts: bodyConflicts, sessionId: sid, message: msg } - } - clearResolutions() - const fallback = readConflicts() - setConflicts(fallback) - return { - status: 'conflicts', - conflicts: fallback, - sessionId: readSessionId() || undefined, - message: msg || 'Conflicts reported but none returned.', - emptyConflictWarning: true, - } - } - - return { - status: 'error', - conflicts: readConflicts(), - sessionId: readSessionId() || undefined, - message: msg, - } - } -} diff --git a/app/src/features/git-sync/lib/pull.ts b/app/src/features/git-sync/lib/pull.ts new file mode 100644 index 00000000..653f94aa --- /dev/null +++ b/app/src/features/git-sync/lib/pull.ts @@ -0,0 +1,423 @@ +/** + * Git Pull for KeyVault Git Sync + * + * Handles pulling changes from remote and detecting conflicts. + */ + +import * as git from 'isomorphic-git' + +import { listDocuments, type Document } from '@/shared/api/client' + +import { getKeyVaultService } from '@/features/security' + +import { GitClient } from './git-client' +import { loadGitCredentials } from './git-credentials' + +export interface PullResult { + success: boolean + message: string + conflicts: ConflictItem[] + filesUpdated: number +} + +export interface ConflictItem { + path: string + ours: string + theirs: string + base: string + documentId?: string + is_binary: boolean +} + +/** + * Pull changes from remote repository + */ +export async function pullFromGit(workspaceId: string): Promise { + const service = getKeyVaultService() + if (!service.isUnlocked) { + return { + success: false, + message: 'KeyVault is locked. Please unlock first.', + conflicts: [], + filesUpdated: 0, + } + } + + const credentials = await loadGitCredentials(workspaceId) + if (!credentials) { + return { + success: false, + message: 'Git credentials not configured.', + conflicts: [], + filesUpdated: 0, + } + } + + const gitClient = new GitClient(workspaceId) + + const isInitialized = await gitClient.isInitialized() + if (!isInitialized) { + return { + success: false, + message: 'Repository not initialized.', + conflicts: [], + filesUpdated: 0, + } + } + + try { + // 1. Fetch latest changes + await gitClient.fetch(credentials) + + // 2. Get current and remote HEAD + const currentBranch = (await gitClient.currentBranch()) || 'main' + const localHead = await git.resolveRef({ + fs: gitClient.fs, + dir: gitClient.dir, + ref: 'HEAD', + }) + const remoteHead = await git.resolveRef({ + fs: gitClient.fs, + dir: gitClient.dir, + ref: `refs/remotes/origin/${currentBranch}`, + }).catch(() => null) + + if (!remoteHead) { + return { + success: true, + message: 'No remote changes.', + conflicts: [], + filesUpdated: 0, + } + } + + if (localHead === remoteHead) { + return { + success: true, + message: 'Already up to date.', + conflicts: [], + filesUpdated: 0, + } + } + + // 3. Check for conflicts (compare changed files) + const conflicts = await detectConflicts(gitClient, workspaceId, localHead, remoteHead) + + if (conflicts.length > 0) { + // Return conflicts for resolution + return { + success: false, + message: 'Conflicts detected. Please resolve them.', + conflicts, + filesUpdated: 0, + } + } + + // 4. Fast-forward or merge (no conflicts) + await git.merge({ + fs: gitClient.fs, + dir: gitClient.dir, + ours: localHead, + theirs: remoteHead, + author: { name: 'RefMD', email: 'sync@refmd.app' }, + }) + + // 5. Apply changes to documents + const filesUpdated = await applyGitFilesToDocuments(workspaceId, gitClient) + + return { + success: true, + message: 'Pull completed successfully.', + conflicts: [], + filesUpdated, + } + } catch (error) { + if (error instanceof Error && error.message.includes('merge conflict')) { + // Handle merge conflict + const currentBranch = (await gitClient.currentBranch()) || 'main' + const localHead = await git.resolveRef({ + fs: gitClient.fs, + dir: gitClient.dir, + ref: 'HEAD', + }) + const remoteHead = await git.resolveRef({ + fs: gitClient.fs, + dir: gitClient.dir, + ref: `refs/remotes/origin/${currentBranch}`, + }) + + const conflicts = await detectConflicts(gitClient, workspaceId, localHead, remoteHead) + + return { + success: false, + message: 'Merge conflicts detected. Please resolve them.', + conflicts, + filesUpdated: 0, + } + } + + return { + success: false, + message: `Pull failed: ${error instanceof Error ? error.message : 'Unknown error'}`, + conflicts: [], + filesUpdated: 0, + } + } +} + +/** + * Detect conflicts between local and remote changes + */ +async function detectConflicts( + gitClient: GitClient, + workspaceId: string, + localHead: string, + remoteHead: string +): Promise { + const conflicts: ConflictItem[] = [] + + try { + // Find common ancestor + const [mergeBase] = await git.findMergeBase({ + fs: gitClient.fs, + dir: gitClient.dir, + oids: [localHead, remoteHead], + }) + + // Get changed files in local and remote + const localChanges = await getChangedFiles(gitClient, mergeBase, localHead) + const remoteChanges = await getChangedFiles(gitClient, mergeBase, remoteHead) + + // Find files changed in both + const conflictPaths = new Set() + for (const path of localChanges) { + if (remoteChanges.has(path)) { + conflictPaths.add(path) + } + } + + // Get document mapping + const documents = await listDocuments({ state: 'active' }) + const pathToDoc = new Map() + for (const doc of documents.items) { + if (doc.workspace_id === workspaceId) { + const path = buildFilePath(doc) + pathToDoc.set(path, doc) + } + } + + // Build conflict items + for (const path of conflictPaths) { + const baseContent = await readFileAtCommit(gitClient, mergeBase, path).catch(() => '') + const oursContent = await readFileAtCommit(gitClient, localHead, path).catch(() => '') + const theirsContent = await readFileAtCommit(gitClient, remoteHead, path).catch(() => '') + + // Only add if contents are actually different + if (oursContent !== theirsContent) { + const doc = pathToDoc.get(path) + conflicts.push({ + path, + ours: oursContent, + theirs: theirsContent, + base: baseContent, + documentId: doc?.id, + is_binary: false, + }) + } + } + } catch { + // If we can't detect conflicts, return empty array + } + + return conflicts +} + +/** + * Get changed files between two commits + */ +async function getChangedFiles( + gitClient: GitClient, + fromCommit: string, + toCommit: string +): Promise> { + const changed = new Set() + + try { + const fromTree = await git.readTree({ + fs: gitClient.fs, + dir: gitClient.dir, + oid: fromCommit, + }) + + const toTree = await git.readTree({ + fs: gitClient.fs, + dir: gitClient.dir, + oid: toCommit, + }) + + const fromFiles = new Map() + const toFiles = new Map() + + await collectTreeFiles(gitClient, fromTree.tree, '', fromFiles) + await collectTreeFiles(gitClient, toTree.tree, '', toFiles) + + // Find changed files + for (const [path, oid] of fromFiles) { + if (!toFiles.has(path) || toFiles.get(path) !== oid) { + changed.add(path) + } + } + + for (const path of toFiles.keys()) { + if (!fromFiles.has(path)) { + changed.add(path) + } + } + } catch { + // Return empty set on error + } + + return changed +} + +/** + * Collect all files from a tree + */ +async function collectTreeFiles( + gitClient: GitClient, + tree: git.TreeEntry[], + prefix: string, + files: Map +): Promise { + for (const entry of tree) { + const path = prefix ? `${prefix}/${entry.path}` : entry.path + + if (entry.type === 'blob') { + files.set(path, entry.oid) + } else if (entry.type === 'tree') { + const subtree = await git.readTree({ + fs: gitClient.fs, + dir: gitClient.dir, + oid: entry.oid, + }) + await collectTreeFiles(gitClient, subtree.tree, path, files) + } + } +} + +/** + * Read file content at a specific commit + */ +async function readFileAtCommit( + gitClient: GitClient, + commitOid: string, + filepath: string +): Promise { + const commit = await git.readCommit({ + fs: gitClient.fs, + dir: gitClient.dir, + oid: commitOid, + }) + + const pathParts = filepath.split('/') + let currentOid = commit.commit.tree + + for (const part of pathParts) { + const tree = await git.readTree({ + fs: gitClient.fs, + dir: gitClient.dir, + oid: currentOid, + }) + + const entry = tree.tree.find((e) => e.path === part) + if (!entry) { + throw new Error(`File not found: ${filepath}`) + } + + currentOid = entry.oid + } + + const blob = await git.readBlob({ + fs: gitClient.fs, + dir: gitClient.dir, + oid: currentOid, + }) + + return new TextDecoder().decode(blob.blob) +} + +/** + * Apply Git files to documents after successful merge. + * Returns the count of files that could potentially be updated. + * Actual document updates happen through the editor's E2EE encryption flow. + */ +async function applyGitFilesToDocuments( + _workspaceId: string, + gitClient: GitClient +): Promise { + let filesUpdated = 0 + + try { + // Get all markdown files from Git + const allFiles = await listGitFiles(gitClient, '') + + // Count markdown files + for (const filePath of allFiles) { + if (filePath.endsWith('.md')) { + filesUpdated++ + } + } + } catch { + // Return 0 on error + } + + return filesUpdated +} + +/** + * List all files in Git repository + */ +async function listGitFiles(gitClient: GitClient, dirPath: string): Promise { + const files: string[] = [] + + try { + const entries = await gitClient.listFiles(dirPath) + + for (const entry of entries) { + if (entry === '.git') continue + + const fullPath = dirPath ? `${dirPath}/${entry}` : entry + + try { + // Check if it's a directory by trying to list it + const subEntries = await gitClient.listFiles(fullPath) + if (Array.isArray(subEntries)) { + // It's a directory + const subFiles = await listGitFiles(gitClient, fullPath) + files.push(...subFiles) + } + } catch { + // It's a file + files.push(fullPath) + } + } + } catch { + // Return empty on error + } + + return files +} + +/** + * Build file path from document + */ +function buildFilePath(doc: Document): string { + const basePath = doc.desired_path || doc.slug || doc.id + + if (doc.type === 'document' && !basePath.endsWith('.md')) { + return `${basePath}.md` + } + + return basePath +} + diff --git a/app/src/features/git-sync/lib/sync.ts b/app/src/features/git-sync/lib/sync.ts new file mode 100644 index 00000000..3b5ca7e2 --- /dev/null +++ b/app/src/features/git-sync/lib/sync.ts @@ -0,0 +1,359 @@ +/** + * Git Sync Logic for KeyVault + * + * Handles synchronization between encrypted documents and Git repository. + * All Git operations are performed client-side using isomorphic-git. + * Network operations go through backend proxy (HTTPS) or tunnel (SSH). + */ + +import * as Y from 'yjs' + +import { + getDocumentContent, + type Document, + type EncryptedUpdateEntry, +} from '@/shared/api/client' + +import { + getKeyVaultService, + fetchDocumentKeys, + SessionLockedError, + decrypt, + getSodium, + decryptString, +} from '@/features/security' + +import { calculateDirtyFiles } from './dirty-calculator' +import { GitClient } from './git-client' +import { loadGitCredentials, type GitCredentials } from './git-credentials' + +export interface SyncOptions { + message?: string + workspaceId: string +} + +export interface SyncResult { + success: boolean + message: string + filesChanged: number + commitSha?: string +} + +/** + * Decrypt document title from API response + */ +export async function decryptDocumentTitle( + doc: Document, + workspaceId: string +): Promise { + if (!doc.encryptedTitle || !doc.encryptedTitleNonce) { + return doc.title || 'Untitled' + } + + try { + const { dek } = await fetchDocumentKeys(doc.id, workspaceId) + + const sodium = await getSodium() + const ciphertext = sodium.from_base64(doc.encryptedTitle, sodium.base64_variants.ORIGINAL) + const nonce = sodium.from_base64(doc.encryptedTitleNonce, sodium.base64_variants.ORIGINAL) + return await decryptString(dek, ciphertext, nonce) + } catch (err) { + if (err instanceof SessionLockedError) { + return doc.title || 'Untitled' + } + return doc.title || 'Untitled' + } +} + +/** + * Fetch and decrypt document content + */ +export async function fetchDecryptedDocumentContent( + documentId: string, + workspaceId: string +): Promise { + try { + const contentRes = await getDocumentContent({ id: documentId }) + + const hasSnapshot = contentRes.content && contentRes.content.length > 0 + const hasUpdates = contentRes.updates && contentRes.updates.length > 0 + + if (!hasSnapshot && !hasUpdates) { + return '' + } + + const sodium = await getSodium() + const doc = new Y.Doc() + + // Get encryption keys + let dek: Uint8Array + try { + const keys = await fetchDocumentKeys(documentId, workspaceId) + dek = keys.dek + } catch (err) { + if (err instanceof SessionLockedError) { + doc.destroy() + return '' + } + throw err + } + + // Apply snapshot if present + if (hasSnapshot) { + const encryptedContent = sodium.from_base64(contentRes.content, sodium.base64_variants.ORIGINAL) + const nonce = sodium.from_base64(contentRes.nonce!, sodium.base64_variants.ORIGINAL) + const yjsState = await decrypt(dek, encryptedContent, nonce) + Y.applyUpdateV2(doc, yjsState) + } + + // Apply pending updates + if (hasUpdates) { + for (const update of contentRes.updates as EncryptedUpdateEntry[]) { + const encryptedData = sodium.from_base64(update.data, sodium.base64_variants.ORIGINAL) + const nonce = sodium.from_base64(update.nonce!, sodium.base64_variants.ORIGINAL) + const yjsUpdate = await decrypt(dek, encryptedData, nonce) + Y.applyUpdateV2(doc, yjsUpdate) + } + } + + const text = doc.getText('content').toString() + doc.destroy() + + return text + } catch { + return '' + } +} + +/** + * Sync workspace documents to Git repository + */ +export async function syncWorkspaceToGit(options: SyncOptions): Promise { + const { workspaceId, message } = options + + // 1. Check KeyVault unlock status + const service = getKeyVaultService() + if (!service.isUnlocked) { + return { + success: false, + message: 'KeyVault is locked. Please unlock first.', + filesChanged: 0, + } + } + + // 2. Load Git credentials + const credentials = await loadGitCredentials(workspaceId) + if (!credentials) { + return { + success: false, + message: 'Git credentials not configured.', + filesChanged: 0, + } + } + + // 3. Initialize Git client + const git = new GitClient(workspaceId) + + // 4. Check if repository is initialized + const isInitialized = await git.isInitialized() + if (!isInitialized) { + // Clone the repository first + try { + await git.clone(credentials.repositoryUrl, credentials) + } catch (error) { + return { + success: false, + message: `Failed to clone repository: ${error instanceof Error ? error.message : 'Unknown error'}`, + filesChanged: 0, + } + } + } + + // 5. Pull latest changes + try { + await git.pull(credentials) + } catch (error) { + // Pull might fail if there are conflicts - we'll handle that later + console.warn('Pull failed, continuing with sync:', error) + } + + // 6. Calculate dirty files (on-demand comparison with Git HEAD) + const dirtyFiles = await calculateDirtyFiles(workspaceId, git) + + if (dirtyFiles.length === 0) { + return { + success: true, + message: 'No changes to commit.', + filesChanged: 0, + } + } + + // 7. Process each dirty file + let filesChanged = 0 + for (const dirty of dirtyFiles) { + try { + if (dirty.status === 'deleted') { + // Remove file from Git + await git.remove(dirty.path) + } else { + // Added or modified - fetch and write content + const content = await fetchDecryptedDocumentContent(dirty.documentId, workspaceId) + await git.writeFile(dirty.path, content) + await git.add(dirty.path) + } + filesChanged++ + } catch (error) { + console.error(`Failed to sync file ${dirty.path}:`, error) + } + } + + // 8. Check if there are staged changes to commit + const status = await git.status() + const hasChanges = status.some(([, head, workdir, stage]) => { + // Check for any changes (modified, added, deleted) + return head !== workdir || head !== stage || workdir !== stage + }) + + if (!hasChanges) { + return { + success: true, + message: 'No changes to commit.', + filesChanged: 0, + } + } + + // 9. Create commit + const commitMessage = message || `Sync from RefMD at ${new Date().toISOString()}` + let commitSha: string + try { + commitSha = await git.commit(commitMessage) + } catch (error) { + return { + success: false, + message: `Failed to commit: ${error instanceof Error ? error.message : 'Unknown error'}`, + filesChanged, + } + } + + // 10. Push to remote + try { + await git.push(credentials) + } catch (error) { + return { + success: false, + message: `Committed locally but failed to push: ${error instanceof Error ? error.message : 'Unknown error'}`, + filesChanged, + commitSha, + } + } + + return { + success: true, + message: 'Sync completed successfully.', + filesChanged, + commitSha, + } +} + +/** + * Initialize Git repository for workspace + */ +export async function initGitRepository( + workspaceId: string, + repositoryUrl: string, + credentials: GitCredentials +): Promise<{ success: boolean; message: string }> { + const service = getKeyVaultService() + if (!service.isUnlocked) { + return { + success: false, + message: 'KeyVault is locked. Please unlock first.', + } + } + + const git = new GitClient(workspaceId) + + try { + await git.clone(repositoryUrl, credentials) + return { + success: true, + message: 'Repository initialized successfully.', + } + } catch (error) { + return { + success: false, + message: `Failed to clone repository: ${error instanceof Error ? error.message : 'Unknown error'}`, + } + } +} + +/** + * Get Git status for workspace + */ +export async function getGitStatus(workspaceId: string): Promise<{ + initialized: boolean + branch?: string + changes: number + ahead: number + behind: number +}> { + const git = new GitClient(workspaceId) + + const isInitialized = await git.isInitialized() + if (!isInitialized) { + return { + initialized: false, + changes: 0, + ahead: 0, + behind: 0, + } + } + + const branch = await git.currentBranch() + const status = await git.status() + + // Count changed files + const changes = status.filter(([, head, workdir, stage]) => { + return head !== workdir || head !== stage || workdir !== stage + }).length + + return { + initialized: true, + branch: branch || undefined, + changes, + ahead: 0, // TODO: Calculate ahead/behind with fetch + behind: 0, + } +} + +/** + * Get Git commit history for workspace + */ +export async function getGitHistory( + workspaceId: string, + depth: number = 20 +): Promise> { + const git = new GitClient(workspaceId) + + const isInitialized = await git.isInitialized() + if (!isInitialized) { + return [] + } + + try { + const logs = await git.log(depth) + return logs.map((entry) => ({ + sha: entry.oid, + message: entry.commit.message, + author: entry.commit.author.name, + date: new Date(entry.commit.author.timestamp * 1000), + })) + } catch { + return [] + } +} diff --git a/app/src/features/git-sync/ui/commit-diff-panel.tsx b/app/src/features/git-sync/ui/commit-diff-panel.tsx index 86b8a33c..c2de56ef 100644 --- a/app/src/features/git-sync/ui/commit-diff-panel.tsx +++ b/app/src/features/git-sync/ui/commit-diff-panel.tsx @@ -1,14 +1,18 @@ import { RefreshCw } from 'lucide-react' import React from 'react' -import type { TextDiffLineType, TextDiffResult } from '@/shared/api' import { cn } from '@/shared/lib/utils' import { Alert, AlertDescription } from '@/shared/ui/alert' import { Button } from '@/shared/ui/button' import { DiffViewer } from '@/shared/ui/diff-viewer' import { ScrollArea } from '@/shared/ui/scroll-area' -import { fetchCommitDiff } from '@/entities/git' +import { useAuthContext } from '@/features/auth' +import { + getCommitDiff, + type TextDiffLineType, + type TextDiffResult, +} from '@/features/git-sync' import { FileExpander } from './file-expander' @@ -32,6 +36,7 @@ function getStats(diff: TextDiffResult): { additions: number; deletions: number } export function CommitDiffPanel({ commitId, className }: Props) { + const { activeWorkspaceId } = useAuthContext() const [loading, setLoading] = React.useState(true) const [error, setError] = React.useState(null) const [diffs, setDiffs] = React.useState([]) @@ -39,11 +44,12 @@ export function CommitDiffPanel({ commitId, className }: Props) { const [viewMode, setViewMode] = React.useState('unified') const load = React.useCallback(async () => { + if (!activeWorkspaceId) return try { setLoading(true) setError(null) const parent = commitId + '^' - const r = await fetchCommitDiff(parent, commitId) + const r = await getCommitDiff(activeWorkspaceId, parent, commitId) setDiffs(r) setExpanded(new Set(r.map((d) => d.file_path))) } catch (e: any) { @@ -52,7 +58,7 @@ export function CommitDiffPanel({ commitId, className }: Props) { } finally { setLoading(false) } - }, [commitId]) + }, [commitId, activeWorkspaceId]) React.useEffect(() => { load() }, [load]) diff --git a/app/src/features/git-sync/ui/git-history-dialog.tsx b/app/src/features/git-sync/ui/git-history-dialog.tsx index 52bbe852..fef25026 100644 --- a/app/src/features/git-sync/ui/git-history-dialog.tsx +++ b/app/src/features/git-sync/ui/git-history-dialog.tsx @@ -2,7 +2,6 @@ import { useQuery, useQueryClient } from '@tanstack/react-query' import { GitCommit as GitCommitIcon, RefreshCw, User, Clock, AlignLeft, Columns2 } from 'lucide-react' import React from 'react' -import type { GitCommitItem, TextDiffLineType, TextDiffResult } from '@/shared/api' import { useIsMobile } from '@/shared/hooks/use-mobile' import { overlayPanelClass } from '@/shared/lib/overlay-classes' import { cn } from '@/shared/lib/utils' @@ -12,7 +11,14 @@ import { Dialog, DialogContent, DialogHeader, DialogTitle } from '@/shared/ui/di import { DiffViewer } from '@/shared/ui/diff-viewer' import { ScrollArea } from '@/shared/ui/scroll-area' -import { getHistory, getCommitDiff } from '@/entities/git' +import { useAuthContext } from '@/features/auth' +import { + getHistory, + getCommitDiff, + type GitCommitItem, + type TextDiffLineType, + type TextDiffResult, +} from '@/features/git-sync' import { FileExpander } from './file-expander' @@ -25,6 +31,7 @@ const DIFF_LINE_TYPE = { export default function GitHistoryDialog({ open, onOpenChange }: Props) { const qc = useQueryClient() + const { activeWorkspaceId } = useAuthContext() const [selectedCommit, setSelectedCommit] = React.useState(null) const [commitDiffs, setCommitDiffs] = React.useState([]) const [diffLoading, setDiffLoading] = React.useState(false) @@ -34,16 +41,16 @@ export default function GitHistoryDialog({ open, onOpenChange }: Props) { const isMobile = useIsMobile() const [mobileView, setMobileView] = React.useState<'list' | 'detail'>('list') React.useEffect(() => { - if (open) { - try { qc.removeQueries({ queryKey: ['git-history'] }) } catch {} - qc.prefetchQuery({ queryKey: ['git-history'], queryFn: () => getHistory() }) + if (open && activeWorkspaceId) { + try { qc.removeQueries({ queryKey: ['git-history', activeWorkspaceId] }) } catch {} + qc.prefetchQuery({ queryKey: ['git-history', activeWorkspaceId], queryFn: () => getHistory(activeWorkspaceId) }) } - }, [open, qc]) + }, [open, qc, activeWorkspaceId]) const { data, isLoading, isFetching, error } = useQuery({ - queryKey: ['git-history'], - queryFn: () => getHistory(), - enabled: open, + queryKey: ['git-history', activeWorkspaceId], + queryFn: () => activeWorkspaceId ? getHistory(activeWorkspaceId) : [], + enabled: open && !!activeWorkspaceId, refetchOnMount: 'always', staleTime: 0, retry: false, @@ -66,15 +73,16 @@ export default function GitHistoryDialog({ open, onOpenChange }: Props) { } }, [isMobile, selectedCommit]) - const commits: GitCommitItem[] = data?.commits ?? [] + const commits: GitCommitItem[] = data ?? [] const fetchCommitDiffs = React.useCallback(async (commit: GitCommitItem) => { + if (!activeWorkspaceId) return try { setDiffLoading(true) setDiffError(null) setCommitDiffs([]) const parent = commit.hash + '^' - const r = await getCommitDiff({ _from: parent, to: commit.hash }) + const r = await getCommitDiff(activeWorkspaceId, parent, commit.hash) setCommitDiffs(r) setExpanded(new Set(r.map((d) => d.file_path))) } catch (e: any) { @@ -82,7 +90,7 @@ export default function GitHistoryDialog({ open, onOpenChange }: Props) { } finally { setDiffLoading(false) } - }, []) + }, [activeWorkspaceId]) const selectCommit = React.useCallback( (commit: GitCommitItem) => { @@ -116,7 +124,7 @@ export default function GitHistoryDialog({ open, onOpenChange }: Props) {
) : ( conflicts.map((conflict) => { - const docId = conflict.document_id + const docId = conflict.documentId const conflictLink = docId ? { id: docId } : null return (
> -type GitConfig = Awaited> +interface GitStatusResult { + initialized: boolean + branch?: string + changes: number + ahead: number + behind: number +} function useGitSyncController() { const qc = useQueryClient() const isMobile = useIsMobile() + const { activeWorkspaceId } = useAuthContext() + const [showChanges, setShowChanges] = useState(false) const [showHistory, setShowHistory] = useState(false) const [showPullDialog, setShowPullDialog] = useState(false) - const [pullConflicts, setPullConflicts] = useState(() => readConflicts()) - const [emptyConflictWarning, setEmptyConflictWarning] = useState(false) - const [polling, setPolling] = useState(false) - const [sessionId, setSessionIdState] = useState(() => readSessionId()) + const [pullConflicts, setPullConflicts] = useState([]) - useEffect(() => { - const unsubscribe = subscribeSessionId((sid) => setSessionIdState(sid)) - return () => unsubscribe() - }, []) + // Check if git credentials exist + const { + data: hasCredentials, + isLoading: credentialsLoading, + } = useQuery({ + queryKey: ['git-credentials', activeWorkspaceId], + queryFn: () => activeWorkspaceId ? hasGitCredentials() : false, + enabled: !!activeWorkspaceId, + }) + // Get git status (client-side) const { data: status, isLoading: statusLoading, error: statusError, - } = useQuery({ queryKey: ['git-status'], queryFn: () => getStatus(), refetchInterval: 10000, retry: false }) - const { data: config } = useQuery({ queryKey: ['git-config'], queryFn: () => getConfig(), retry: false }) + } = useQuery({ + queryKey: ['git-status', activeWorkspaceId], + queryFn: () => activeWorkspaceId ? getGitStatus(activeWorkspaceId) : { initialized: false, changes: 0, ahead: 0, behind: 0 }, + enabled: !!activeWorkspaceId && !!hasCredentials, + refetchInterval: 30000, + retry: false, + }) + // Sync mutation (client-side) const syncMutation = useMutation({ - mutationFn: () => syncNow({ requestBody: { message: undefined } }), - onSuccess: (data: any) => { - const ok = !!data?.success - const changed = data?.files_changed ?? 0 - const msg = data?.message || 'Sync completed' - if (ok) toast.success(`${msg}: ${changed} files changed`) - // Do not surface push failures in toast; rely on status panel instead - qc.invalidateQueries({ queryKey: ['git-status'] }) + mutationFn: async () => { + if (!activeWorkspaceId) throw new Error('No workspace selected') + return syncWorkspaceToGit({ workspaceId: activeWorkspaceId }) }, - onError: (e: any) => { - const raw = e?.body?.message || e?.message || `${e}` - const lower = typeof raw === 'string' ? raw.toLowerCase() : '' - if (lower.includes('git_repo_not_found') || lower.includes('repo_not_found') || lower.includes('git_http_not_found')) { - toast.error('Git sync failed: repository URL or branch was not found. Please check the URL/branch and try again.') - } else if (lower.includes('git_auth_redirect') || lower.includes('too many redirects') || lower.includes('http (34)')) { - toast.error('Git sync failed: remote requires re-authentication. Please re-enter your token/SSH key and ensure SSO is approved.') - } else if (e?.status === 409) { - toast.error('Remote is ahead. Pull and resolve conflicts before syncing.') - clearResolutions() - const fallback = readConflicts() - setPullConflicts(fallback) - setConflicts(fallback) - if (!fallback.length) setEmptyConflictWarning(true) - setShowPullDialog(true) - pullMutation.mutate({ resolutions: [] }) + onSuccess: (result) => { + if (result.success) { + toast.success(`${result.message}: ${result.filesChanged} files changed`) } else { - toast.error(`Sync failed: ${raw}`) + toast.error(result.message) } - qc.invalidateQueries({ queryKey: ['git-status'] }) + qc.invalidateQueries({ queryKey: ['git-status', activeWorkspaceId] }) }, - }) - - const initMutation = useMutation({ - mutationFn: () => initRepository(), - onSuccess: () => { - toast.success('Git repository initialized') - qc.invalidateQueries({ queryKey: ['git-status'] }) + onError: (e: Error) => { + toast.error(`Sync failed: ${e.message}`) + qc.invalidateQueries({ queryKey: ['git-status', activeWorkspaceId] }) }, - onError: (e: any) => toast.error(`Initialization failed: ${e?.message || e}`), }) + // Pull mutation (client-side) const pullMutation = useMutation({ - mutationFn: async (payload?: { resolutions?: GitPullResolution[] }) => - performPullSession(payload?.resolutions, { sessionId }), + mutationFn: async () => { + if (!activeWorkspaceId) throw new Error('No workspace selected') + return pullFromGit(activeWorkspaceId) + }, onSuccess: (result) => { - setSessionIdState(result.sessionId ?? null) - setPullConflicts(result.conflicts) - setEmptyConflictWarning(Boolean(result.emptyConflictWarning)) - - if (result.status === 'stale') { - toast.error('Pull session expired. Please pull again.') - qc.invalidateQueries({ queryKey: ['git-status'] }) - return - } - - if (result.status === 'conflicts') { - if (result.emptyConflictWarning) { - toast.error('Conflicts reported but server returned no list.') - } + if (result.success) { + toast.success(result.message) + qc.invalidateQueries({ queryKey: ['git-status', activeWorkspaceId] }) + } else if (result.conflicts.length > 0) { + setPullConflicts(result.conflicts) setShowPullDialog(true) - return - } - - if (result.status === 'merged') { - toast.success('Pull completed') - qc.invalidateQueries({ queryKey: ['git-status'] }) - return + } else { + toast.error(result.message) } - - toast.error(result.message || 'Pull failed') + }, + onError: (e: Error) => { + toast.error(`Pull failed: ${e.message}`) }, }) - const syncPending = syncMutation.isPending || initMutation.isPending - const hasChanges = ((status?.uncommitted_changes || 0) + (status?.untracked_files || 0)) > 0 - const isConfigured = Boolean(config) && Boolean(status?.repository_initialized) + const syncPending = syncMutation.isPending + const hasChanges = (status?.changes || 0) > 0 + const isConfigured = !!hasCredentials && !!status?.initialized const canSync = isConfigured && !statusError - const showButton = statusLoading || statusError || Boolean(status?.repository_initialized) + const showButton = credentialsLoading || statusLoading || statusError || !!hasCredentials const handleSync = useCallback(() => { - if (!config || !status?.repository_initialized) return + if (!isConfigured) return syncMutation.mutate() - }, [config, status, syncMutation]) + }, [isConfigured, syncMutation]) + + const handlePull = useCallback(() => { + if (!isConfigured) return + pullMutation.mutate() + }, [isConfigured, pullMutation]) const openChanges = useCallback(() => { - if (!status?.repository_initialized) return + if (!status?.initialized) return setShowChanges(true) }, [status]) + const openHistory = useCallback(() => { - if (!status?.repository_initialized) return + if (!status?.initialized) return setShowHistory(true) }, [status]) const statusErrorMessage = useMemo(() => { if (!statusError) return null - const raw = (statusError as any)?.body?.message || (statusError as any)?.message || `${statusError}` - return raw || 'Failed to load Git status' + return (statusError as Error)?.message || 'Failed to load Git status' }, [statusError]) const statusText = useMemo(() => { - if (statusLoading) return 'Loading…' + if (credentialsLoading || statusLoading) return 'Loading…' if (statusError) return 'Status unavailable' - if (!config) return 'Configuration required' - if (!status?.repository_initialized) return 'Repository not initialized' - if (hasChanges) return `${(status?.uncommitted_changes || 0) + (status?.untracked_files || 0)} changes` - if (status?.has_remote && status?.last_sync_status === 'error') return 'Push failed' + if (!hasCredentials) return 'Configuration required' + if (!status?.initialized) return 'Repository not initialized' + if (hasChanges) return `${status.changes} changes` return 'Up to date' - }, [config, hasChanges, status, statusError, statusLoading]) + }, [hasCredentials, hasChanges, status, statusError, credentialsLoading, statusLoading]) const tooltipText = useMemo(() => { if (statusError) return statusErrorMessage || 'Failed to load Git status' - if (!config) return 'Git configuration required' - if (!status?.repository_initialized) return 'Click to configure Git' + if (!hasCredentials) return 'Git configuration required' + if (!status?.initialized) return 'Click to configure Git' if (hasChanges) return 'Click to sync changes' - if (status?.has_remote && status?.last_sync_status === 'error') return status?.last_sync_message || 'Last push failed' return 'Repository is up to date' - }, [config, hasChanges, status, statusError, statusErrorMessage]) + }, [hasCredentials, hasChanges, status, statusError, statusErrorMessage]) const icon = useMemo(() => { - if (syncPending || statusLoading) return + if (syncPending || credentialsLoading || statusLoading) return if (statusError) return - if (!config || !status?.repository_initialized) return - if (!hasChanges && status?.has_remote && status?.last_sync_status === 'error') return + if (!hasCredentials || !status?.initialized) return if (hasChanges) return return - }, [config, hasChanges, status, statusError, statusLoading, syncPending]) - - useEffect(() => { - const sid = sessionId ?? readSessionId() - if (!sid) return - setPolling(true) - const timer = window.setInterval(() => { - getPullSession({ id: sid }) - .then((session) => { - if ((session as any)?.status === 'stale') { - clearSession() - clearResolutions() - setConflicts([]) - setPullConflicts([]) - setEmptyConflictWarning(true) - toast.error('Pull session expired. Please pull again.') - return - } - setSessionId(session.session_id) - const conflicts = session.conflicts ?? [] - setConflicts(conflicts) - setPullConflicts(conflicts) - setEmptyConflictWarning(false) - }) - .catch(() => {}) - }, 10000) - return () => { - window.clearInterval(timer) - setPolling(false) - } - }, [sessionId]) + }, [hasCredentials, hasChanges, status, statusError, credentialsLoading, statusLoading, syncPending]) return { - sessionId, - polling, isMobile, syncPending, canSync, @@ -217,6 +173,7 @@ function useGitSyncController() { statusText, tooltipText, handleSync, + handlePull, openChanges, openHistory, showChanges, @@ -230,16 +187,12 @@ function useGitSyncController() { pullMutation, pullConflicts, setPullConflicts, - setEmptyConflictWarning, - emptyConflictWarning, } } export default function GitSyncButton({ className, compact = false }: Props) { const controller = useGitSyncController() const { - sessionId, - polling, isMobile, syncPending, canSync, @@ -247,6 +200,7 @@ export default function GitSyncButton({ className, compact = false }: Props) { statusText, tooltipText, handleSync, + handlePull, openChanges, openHistory, showChanges, @@ -259,9 +213,6 @@ export default function GitSyncButton({ className, compact = false }: Props) { setShowPullDialog, pullMutation, pullConflicts, - setPullConflicts, - setEmptyConflictWarning, - emptyConflictWarning, } = controller const [menuOpen, setMenuOpen] = useState(false) @@ -295,16 +246,14 @@ export default function GitSyncButton({ className, compact = false }: Props) { -
-
- {icon} -
-

Git Sync

-

- {polling ? 'Synchronizing conflicts…' : statusText} -

-
+
+
+ {icon} +
+

Git Sync

+

{statusText}

+
{ - clearResolutions() - const stored = readConflicts() - setPullConflicts(stored) - setConflicts(stored) - setEmptyConflictWarning(!stored.length) - setShowPullDialog(true) - pullMutation.mutate({ resolutions: [] }) + handlePull() setMenuOpen(false) }} disabled={!isConfigured || pullMutation.isPending} @@ -339,7 +282,7 @@ export default function GitSyncButton({ className, compact = false }: Props) { ) : ( )} - Pull (resolve conflicts) + Pull { @@ -361,10 +304,6 @@ export default function GitSyncButton({ className, compact = false }: Props) { View History - - - Git Ignore - setMenuOpen(false)}> @@ -382,9 +321,7 @@ export default function GitSyncButton({ className, compact = false }: Props) { onOpenChange={setShowPullDialog} conflicts={pullConflicts} isLoading={pullMutation.isPending} - emptyWarning={emptyConflictWarning} - sessionId={sessionId} - onRetry={() => pullMutation.mutate({ resolutions: [] })} + onRetry={() => pullMutation.mutate()} /> ) diff --git a/app/src/features/git-sync/ui/working-diff-panel.tsx b/app/src/features/git-sync/ui/working-diff-panel.tsx index 33eca199..7601b42a 100644 --- a/app/src/features/git-sync/ui/working-diff-panel.tsx +++ b/app/src/features/git-sync/ui/working-diff-panel.tsx @@ -1,14 +1,18 @@ import { RefreshCw, GitBranch, AlignLeft, Columns2 } from 'lucide-react' import React from 'react' -import type { TextDiffLineType, TextDiffResult } from '@/shared/api' import { cn } from '@/shared/lib/utils' import { Alert, AlertDescription } from '@/shared/ui/alert' import { Button } from '@/shared/ui/button' import { DiffViewer } from '@/shared/ui/diff-viewer' import { ScrollArea } from '@/shared/ui/scroll-area' -import { getWorkingDiff } from '@/entities/git' +import { useAuthContext } from '@/features/auth' +import { + getWorkingDiff, + type TextDiffLineType, + type TextDiffResult, +} from '@/features/git-sync' import { FileExpander } from './file-expander' @@ -32,6 +36,7 @@ function getStats(diff: TextDiffResult): { additions: number; deletions: number } export function WorkingDiffPanel({ documentPath, className }: Props) { + const { activeWorkspaceId } = useAuthContext() const [loading, setLoading] = React.useState(true) const [error, setError] = React.useState(null) const [diffs, setDiffs] = React.useState([]) @@ -39,10 +44,11 @@ export function WorkingDiffPanel({ documentPath, className }: Props) { const [expanded, setExpanded] = React.useState>(new Set()) const load = React.useCallback(async () => { + if (!activeWorkspaceId) return try { setLoading(true) setError(null) - const r = await getWorkingDiff() + const r = await getWorkingDiff(activeWorkspaceId) setDiffs(r) if (documentPath) { const match = r.filter((d) => d.file_path === documentPath).map((d) => d.file_path) @@ -54,7 +60,7 @@ export function WorkingDiffPanel({ documentPath, className }: Props) { } finally { setLoading(false) } - }, [documentPath]) + }, [documentPath, activeWorkspaceId]) React.useEffect(() => { load() }, [load]) diff --git a/app/src/features/markdown/__tests__/renderer.test.ts b/app/src/features/markdown/__tests__/renderer.test.ts new file mode 100644 index 00000000..e2f47d39 --- /dev/null +++ b/app/src/features/markdown/__tests__/renderer.test.ts @@ -0,0 +1,183 @@ +/** + * Tests for client-side Markdown renderer + */ + +import { describe, it, expect, beforeEach } from 'vitest' + +import { resetPlaceholderCounter } from '../plugins' +import { renderMarkdown } from '../renderer' + +describe('Markdown Renderer', () => { + beforeEach(() => { + resetPlaceholderCounter() + }) + + describe('Basic Markdown', () => { + it('should render paragraphs', async () => { + const result = await renderMarkdown('Hello, world!') + expect(result.html).toContain(' { + const result = await renderMarkdown('# Heading 1\n## Heading 2') + expect(result.html).toContain(' { + const result = await renderMarkdown('[Example](https://example.com)') + expect(result.html).toContain(' { + const result = await renderMarkdown('```javascript\nconst x = 1;\n```') + expect(result.html).toContain(' { + it('should render tables', async () => { + const md = `| A | B | +|---|---| +| 1 | 2 |` + const result = await renderMarkdown(md) + expect(result.html).toContain(' { + const result = await renderMarkdown('~~deleted~~') + expect(result.html).toContain(' { + const result = await renderMarkdown('- [x] Done\n- [ ] Todo') + expect(result.html).toContain('type="checkbox"') + }) + }) + + describe('Wikilinks', () => { + it('should transform [[target]] to refmd-wikilink', async () => { + const result = await renderMarkdown('Link to [[document]]') + expect(result.html).toContain(' { + const result = await renderMarkdown('Link to [[doc-id|My Document]]') + expect(result.html).toContain(' { + const result = await renderMarkdown('Link to [[doc-id|alias|inline]]') + expect(result.html).toContain('variant="inline"') + }) + + it('should transform #wiki:target syntax', async () => { + const result = await renderMarkdown('Link to #wiki:document') + expect(result.html).toContain(' { + it('should transform #tag to anchor', async () => { + const result = await renderMarkdown('This is #test tag') + expect(result.html).toContain('') + expect(result.html).toContain('#test') + }) + + it('should not transform hashtags in URLs', async () => { + const result = await renderMarkdown('Visit https://example.com#section') + expect(result.html).not.toContain('class="hashtag"') + }) + + it('should not transform hashtags after alphanumeric', async () => { + const result = await renderMarkdown('Issue#123') + expect(result.html).not.toContain('class="hashtag"') + }) + }) + + describe('Mentions', () => { + it('should transform #mention:user to anchor', async () => { + const result = await renderMarkdown('Hello #mention:john') + expect(result.html).toContain(' { + it('should convert code blocks to placeholders when kind matches', async () => { + const result = await renderMarkdown('```mermaid\ngraph TD\n```', { + placeholderKinds: ['mermaid'], + }) + expect(result.html).toContain('data-refmd-placeholder="true"') + expect(result.html).toContain('data-placeholder-kind="mermaid"') + expect(result.placeholders).toHaveLength(1) + expect(result.placeholders[0].kind).toBe('mermaid') + }) + + it('should not convert code blocks when kind does not match', async () => { + const result = await renderMarkdown('```javascript\nconst x = 1;\n```', { + placeholderKinds: ['mermaid'], + }) + expect(result.html).not.toContain('data-refmd-placeholder') + expect(result.placeholders).toHaveLength(0) + }) + }) + + describe('Sourcepos', () => { + it('should add data-sourcepos attributes', async () => { + const result = await renderMarkdown('# Heading') + expect(result.html).toContain('data-sourcepos') + }) + }) + + describe('Hash', () => { + it('should return consistent hash for same input', async () => { + const result1 = await renderMarkdown('Hello') + const result2 = await renderMarkdown('Hello') + expect(result1.hash).toBe(result2.hash) + }) + + it('should return different hash for different input', async () => { + const result1 = await renderMarkdown('Hello') + const result2 = await renderMarkdown('World') + expect(result1.hash).not.toBe(result2.hash) + }) + }) + + describe('Sanitization', () => { + it('should sanitize script tags by default', async () => { + const result = await renderMarkdown('') + expect(result.html).not.toContain(' { + const result = await renderMarkdown('
Content
', { + sanitize: false, + }) + expect(result.html).toContain('
') + }) + }) +}) + +describe('Syntax Highlighting', () => { + it('should highlight code when highlight feature is enabled', async () => { + const result = await renderMarkdown('```javascript\nconst x = 1;\n```', { + features: ['gfm', 'highlight'], + }) + expect(result.html).toContain('not-prose') + expect(result.html).toContain('shiki') + expect(result.html).toContain('') + }) +}) diff --git a/app/src/features/markdown/index.ts b/app/src/features/markdown/index.ts new file mode 100644 index 00000000..2af4cb10 --- /dev/null +++ b/app/src/features/markdown/index.ts @@ -0,0 +1,13 @@ +/** + * Client-side Markdown rendering feature + * Replaces server-side Comrak rendering for E2EE support + */ + +export { renderMarkdown, renderMarkdownMany, initHighlighter } from './renderer' +export type { + RenderOptions, + RenderResponse, + PlaceholderItem, + RenderRequest, + requestToOptions, +} from './types' diff --git a/app/src/features/markdown/lib/add-placeholder-hydration.ts b/app/src/features/markdown/lib/add-placeholder-hydration.ts new file mode 100644 index 00000000..659bf379 --- /dev/null +++ b/app/src/features/markdown/lib/add-placeholder-hydration.ts @@ -0,0 +1,197 @@ +/** + * Add hydration attributes to placeholder elements + * This is needed because the client-side markdown renderer creates placeholders + * but doesn't add the hydration metadata that tells the hydrator where to load modules from. + */ + +import type { ManifestItem } from '@/shared/api' +import { API_BASE_URL } from '@/shared/lib/config' + +/** + * Get the API origin for plugin asset URLs + */ +function getApiOrigin(): string { + try { + if (API_BASE_URL) { + return new URL(API_BASE_URL).origin + } + } catch { + // Fallback to current origin + } + if (typeof window !== 'undefined') { + return window.location.origin + } + return '' +} + +interface RendererSpec { + kind: string + pluginId: string + pluginVersion: string + scope: string + hydrate?: { + /** Signed URL path from the server */ + module: string + export?: string + } +} + +/** + * Extract renderer specs from plugin manifests + * The manifest's renderers[].hydrate.module contains pre-signed URLs from the server + */ +export function collectRendererSpecs(manifests: ManifestItem[]): RendererSpec[] { + const specs: RendererSpec[] = [] + + for (const manifest of manifests) { + const renderers = (manifest as any)?.renderers + if (!Array.isArray(renderers)) continue + + for (const renderer of renderers) { + const kind = renderer?.kind + if (typeof kind !== 'string' || !kind.trim()) continue + + const hydrate = renderer?.hydrate + if (!hydrate || typeof hydrate !== 'object') continue + + // The module URL should be pre-signed by the server + const module = hydrate?.module + if (typeof module !== 'string' || !module.trim()) continue + + specs.push({ + kind: kind.trim().toLowerCase(), + pluginId: manifest.id, + pluginVersion: manifest.version || 'dev', + scope: (manifest as any)?.scope || 'global', + hydrate: { + module: module.trim(), + export: typeof hydrate.export === 'string' ? hydrate.export : undefined, + }, + }) + } + } + + return specs +} + +/** + * Get the hydration module URL (pre-signed by server) + */ +function getHydrateUrl(spec: RendererSpec): string { + const modulePath = spec.hydrate!.module + + // If it's already an absolute URL, return as-is + if (modulePath.startsWith('http://') || modulePath.startsWith('https://')) { + return modulePath + } + + // Prepend API origin to the signed path + const apiOrigin = getApiOrigin() + return apiOrigin ? `${apiOrigin}${modulePath}` : modulePath +} + +/** + * Build hydration context as base64-encoded JSON + */ +function buildHydrateContext( + placeholderId: string, + kind: string, + code: string, + spec: RendererSpec, + options?: { theme?: string; docId?: string; token?: string } +): string { + const context = { + request: { + kind, + id: placeholderId, + code, + options: { + theme: options?.theme, + doc_id: options?.docId, + token: options?.token, + }, + }, + plugin: { + id: spec.pluginId, + version: spec.pluginVersion, + scope: spec.scope, + }, + } + + try { + return btoa(JSON.stringify(context)) + } catch { + return '' + } +} + +/** + * Add hydration attributes to placeholder elements in HTML string + * + * @param html - The rendered HTML with placeholder divs + * @param placeholders - Array of placeholder items with code content + * @param specs - Renderer specs from plugin manifests + * @param options - Additional options for context + * @returns Modified HTML with hydration attributes added + */ +export function addPlaceholderHydration( + html: string, + placeholders: Array<{ kind: string; id: string; code: string }>, + specs: RendererSpec[], + options?: { theme?: string; docId?: string; token?: string } +): string { + if (!placeholders.length || !specs.length) return html + + // Build a map of kind -> spec (first matching spec wins) + const specByKind = new Map() + for (const spec of specs) { + if (!specByKind.has(spec.kind)) { + specByKind.set(spec.kind, spec) + } + } + + let result = html + + for (const placeholder of placeholders) { + const spec = specByKind.get(placeholder.kind.toLowerCase()) + if (!spec || !spec.hydrate) continue + + const hydrateUrl = getHydrateUrl(spec) + const exportName = spec.hydrate.export || 'default' + const context = buildHydrateContext( + placeholder.id, + placeholder.kind, + placeholder.code, + spec, + options + ) + + // Find the placeholder div and add attributes + const needle = `data-placeholder-id="${placeholder.id}"` + const idx = result.indexOf(needle) + if (idx === -1) continue + + // Find the closing > of the opening tag + const afterNeedle = result.slice(idx + needle.length) + const closeIdx = afterNeedle.indexOf('>') + if (closeIdx === -1) continue + + const insertPos = idx + needle.length + closeIdx + + // Build attribute string + const attrs = ` data-placeholder-hydrate="${escapeHtml(hydrateUrl)}" data-placeholder-hydrate-export="${escapeHtml(exportName)}" data-placeholder-hydrate-context="${escapeHtml(context)}" data-placeholder-plugin="${escapeHtml(spec.pluginId)}" data-placeholder-version="${escapeHtml(spec.pluginVersion)}" data-placeholder-scope="${escapeHtml(spec.scope)}"` + + result = result.slice(0, insertPos) + attrs + result.slice(insertPos) + } + + return result +} + +function escapeHtml(str: string): string { + return str + .replace(/&/g, '&') + .replace(//g, '>') + .replace(/"/g, '"') + .replace(/'/g, ''') +} diff --git a/app/src/features/markdown/plugins/attachments.ts b/app/src/features/markdown/plugins/attachments.ts new file mode 100644 index 00000000..2326f28f --- /dev/null +++ b/app/src/features/markdown/plugins/attachments.ts @@ -0,0 +1,124 @@ +/** + * Attachments plugin for rehype + * Rewrites attachment URLs to absolute API paths + * + * Comrak compatibility: + * - ./attachments/xxx -> /api/uploads/{docId}/attachments/xxx + * - attachments/xxx -> /api/uploads/{docId}/attachments/xxx + * - Adds ?token=xxx if share token provided + * - Adds class="file-attachment" to attachment links + */ + +import type { Root, Element } from 'hast' +import type { Plugin } from 'unified' +import { visit } from 'unist-util-visit' + +export interface AttachmentOptions { + /** Document ID for URL rewriting */ + docId?: string + /** Share token for authentication */ + token?: string + /** Base origin for absolute URLs */ + baseOrigin?: string + /** Enable absolute URL rewriting (default: false) */ + absoluteAttachments?: boolean +} + +function isAttachmentUrl(url: string): boolean { + return url.startsWith('./attachments/') || url.startsWith('attachments/') +} + +function isUploadsUrl(url: string): boolean { + return url.startsWith('/api/uploads/') +} + +function rewriteAttachmentUrl(url: string, options: AttachmentOptions): string | null { + const { docId, token, baseOrigin, absoluteAttachments } = options + + if (!absoluteAttachments || !docId) { + return null + } + + let path: string + + if (url.startsWith('./attachments/')) { + path = `/api/uploads/${docId}/${url.slice(2)}` // Remove './' + } else if (url.startsWith('attachments/')) { + path = `/api/uploads/${docId}/${url}` + } else if (url.startsWith('/api/uploads/')) { + path = url + } else { + return null + } + + // Add token if provided + if (token) { + const encodedToken = encodeURIComponent(token) + if (path.includes('?')) { + path += `&token=${encodedToken}` + } else { + path += `?token=${encodedToken}` + } + } + + // Add base origin if provided + if (baseOrigin) { + const origin = baseOrigin.replace(/\/$/, '') + return `${origin}${path}` + } + + return path +} + + +export const rehypeAttachments: Plugin<[AttachmentOptions?], Root> = (options = {}) => { + return (tree) => { + visit(tree, 'element', (node: Element, index: number | undefined, parent) => { + if (index === undefined || !parent) return + + // Handle links + if (node.tagName === 'a') { + const href = node.properties?.href + if (typeof href !== 'string') return + + if (isAttachmentUrl(href) || isUploadsUrl(href)) { + const newUrl = rewriteAttachmentUrl(href, options) + + if (newUrl) { + node.properties = node.properties || {} + node.properties.href = newUrl + } + + // Add file-attachment class + const existingClass = node.properties?.className + if (Array.isArray(existingClass)) { + if (!existingClass.includes('file-attachment')) { + existingClass.push('file-attachment') + } + } else if (typeof existingClass === 'string') { + node.properties.className = [existingClass, 'file-attachment'] + } else { + node.properties.className = ['file-attachment'] + } + } + } + + // Handle images + if (node.tagName === 'img') { + const src = node.properties?.src + if (typeof src !== 'string') return + + if (isAttachmentUrl(src) || isUploadsUrl(src)) { + const newUrl = rewriteAttachmentUrl(src, options) + + if (newUrl) { + node.properties = node.properties || {} + node.properties.src = newUrl + } + } + } + }) + } +} + +export default rehypeAttachments diff --git a/app/src/features/markdown/plugins/hashtag.ts b/app/src/features/markdown/plugins/hashtag.ts new file mode 100644 index 00000000..8d37dd80 --- /dev/null +++ b/app/src/features/markdown/plugins/hashtag.ts @@ -0,0 +1,124 @@ +/** + * Hashtag plugin for rehype + * Transforms #tag syntax to elements + * + * Comrak compatibility: + * - #tag -> #tag + * - Pattern: #[A-Za-z0-9_]{1,50} + * - Skip if preceded by alphanumeric or /:@.-_+~=?&% + */ + +import type { Root, Element, Text } from 'hast' +import type { Plugin } from 'unified' +import { visit } from 'unist-util-visit' + +// Characters that prevent hashtag recognition when immediately preceding # +const SKIP_PREV_CHARS = /[A-Za-z0-9\/:@.\-_+~=?&%]$/ + +// Hashtag pattern: # followed by 1-50 alphanumeric or underscore characters +const TAG_PATTERN = /#([A-Za-z0-9_]{1,50})(?![A-Za-z0-9_])/g + +function createHashtagElement(tag: string): Element { + const href = `#tag-${tag}` + const text = `#${tag}` + + return { + type: 'element', + tagName: 'a', + properties: { + href: href, + class: 'hashtag', + }, + children: [{ type: 'text', value: text }], + } +} + +interface ProcessedNode { + type: 'text' | 'element' + node: Text | Element +} + +function processText(text: string): ProcessedNode[] { + const result: ProcessedNode[] = [] + let lastIndex = 0 + + // Reset regex state + TAG_PATTERN.lastIndex = 0 + + let match: RegExpExecArray | null + while ((match = TAG_PATTERN.exec(text)) !== null) { + const matchIndex = match.index + const tag = match[1] + + // Check if preceded by a character that should prevent matching + if (matchIndex > 0) { + const prevChar = text[matchIndex - 1] + if (SKIP_PREV_CHARS.test(prevChar)) { + // Skip this match, continue searching + continue + } + } + + // Add text before match + if (matchIndex > lastIndex) { + result.push({ + type: 'text', + node: { type: 'text', value: text.slice(lastIndex, matchIndex) } + }) + } + + // Add hashtag element + result.push({ + type: 'element', + node: createHashtagElement(tag), + }) + + lastIndex = matchIndex + match[0].length + } + + // Add remaining text + if (lastIndex < text.length) { + result.push({ + type: 'text', + node: { type: 'text', value: text.slice(lastIndex) } + }) + } + + return result +} + +export const rehypeHashtag: Plugin<[], Root> = () => { + return (tree) => { + visit(tree, 'text', (node: Text, index: number | undefined, parent) => { + if (index === undefined || !parent || parent.type !== 'element') return + + const text = node.value + if (!text.includes('#')) { + return + } + + // Skip if inside a link + const parentElement = parent as Element + if (parentElement.tagName === 'a') { + return + } + + const processed = processText(text) + + if (processed.length === 0) { + return + } + + if (processed.length === 1 && processed[0].type === 'text' && (processed[0].node as Text).value === text) { + // No changes needed + return + } + + // Replace the text node with the processed nodes + const newNodes = processed.map((p) => p.node) + parentElement.children.splice(index, 1, ...newNodes) + }) + } +} + +export default rehypeHashtag diff --git a/app/src/features/markdown/plugins/highlight.ts b/app/src/features/markdown/plugins/highlight.ts new file mode 100644 index 00000000..ad4dbf84 --- /dev/null +++ b/app/src/features/markdown/plugins/highlight.ts @@ -0,0 +1,199 @@ +/** + * Syntax highlighting plugin for rehype using Shiki + * Replaces code blocks with highlighted HTML + * + * Comrak compatibility: + * - Wraps highlighted code in
...
+ * - Default theme: one-dark-pro (similar to OneHalfDark in Syntect) + * + * Bundle optimization: + * - Uses fine-grained imports to avoid bundling all languages/themes + * - Languages and themes are loaded on-demand via dynamic imports + */ + +import type { Root, Element } from 'hast' +import { createHighlighterCore, type HighlighterCore } from 'shiki/core' +import { createOnigurumaEngine } from 'shiki/engine/oniguruma' +import type { Plugin } from 'unified' +import { visit } from 'unist-util-visit' + +export interface HighlightOptions { + /** Theme name (default: 'one-dark-pro') */ + theme?: string + /** Skip highlighting for these languages (e.g., placeholder kinds) */ + skipLanguages?: Set +} + +// Singleton highlighter instance +let highlighterPromise: Promise | null = null +let loadedLanguages = new Set() + +async function getHighlighter(): Promise { + if (!highlighterPromise) { + highlighterPromise = createHighlighterCore({ + engine: createOnigurumaEngine(import('shiki/wasm')), + // Load themes via dynamic imports (fine-grained bundle) + themes: [ + import('shiki/themes/one-dark-pro.mjs'), + import('shiki/themes/github-light.mjs'), + import('shiki/themes/github-dark.mjs'), + ], + // Load common languages via dynamic imports (fine-grained bundle) + langs: [ + import('shiki/langs/javascript.mjs'), + import('shiki/langs/typescript.mjs'), + import('shiki/langs/jsx.mjs'), + import('shiki/langs/tsx.mjs'), + import('shiki/langs/json.mjs'), + import('shiki/langs/html.mjs'), + import('shiki/langs/css.mjs'), + import('shiki/langs/markdown.mjs'), + import('shiki/langs/python.mjs'), + import('shiki/langs/rust.mjs'), + import('shiki/langs/go.mjs'), + import('shiki/langs/bash.mjs'), + import('shiki/langs/shell.mjs'), + import('shiki/langs/yaml.mjs'), + import('shiki/langs/toml.mjs'), + import('shiki/langs/sql.mjs'), + import('shiki/langs/graphql.mjs'), + ], + }) + } + return highlighterPromise +} + +async function ensureLanguage(highlighter: HighlighterCore, lang: string): Promise { + if (loadedLanguages.has(lang)) { + return true + } + + try { + const languages = highlighter.getLoadedLanguages() + if (languages.includes(lang)) { + loadedLanguages.add(lang) + return true + } + + // Try to load the language dynamically via dynamic import + try { + const langModule = await import(`shiki/langs/${lang}.mjs`) + await highlighter.loadLanguage(langModule.default) + loadedLanguages.add(lang) + return true + } catch { + // Language module not found + return false + } + } catch { + // Language not available, will use plaintext + return false + } +} + +function getCodeContent(node: Element): string { + return node.children + .map((child) => { + if (child.type === 'text') return child.value + if (child.type === 'element') return getCodeContent(child) + return '' + }) + .join('') +} + +function getLanguageFromClass(className: unknown): string | null { + if (!className || !Array.isArray(className)) return null + + const langClass = className.find( + (c): c is string => typeof c === 'string' && c.startsWith('language-') + ) + + if (!langClass) return null + return langClass.slice('language-'.length).toLowerCase() +} + +export const rehypeHighlight: Plugin<[HighlightOptions?], Root> = (options) => { + const theme = options?.theme || 'one-dark-pro' + const skipLanguages = options?.skipLanguages || new Set() + + return async (tree) => { + const highlighter = await getHighlighter() + + // Collect all code blocks to process + const codeBlocks: Array<{ + node: Element + parent: Element + index: number + lang: string + code: string + }> = [] + + visit(tree, 'element', (node: Element, index: number | undefined, parent) => { + if (index === undefined || !parent) return + if (node.tagName !== 'pre') return + + const codeChild = node.children.find( + (child): child is Element => child.type === 'element' && child.tagName === 'code' + ) + + if (!codeChild) return + + const lang = getLanguageFromClass(codeChild.properties?.className) + if (!lang) return + + // Skip if this language is handled by placeholders + if (skipLanguages.has(lang)) return + + const code = getCodeContent(codeChild) + + codeBlocks.push({ + node, + parent: parent as Element, + index, + lang, + code, + }) + }) + + // Process each code block + for (const block of codeBlocks) { + const langAvailable = await ensureLanguage(highlighter, block.lang) + const effectiveLang = langAvailable ? block.lang : 'text' + + try { + // Use codeToHast to get proper HAST nodes instead of raw HTML + const hast = highlighter.codeToHast(block.code, { + lang: effectiveLang, + theme, + }) + + // The result is a root node with a single pre element + // Extract the pre element and wrap it in a not-prose div + const preElement = hast.children[0] as Element + + // Wrap in not-prose div like Comrak does + const wrapper: Element = { + type: 'element', + tagName: 'div', + properties: { class: 'not-prose' }, + children: [preElement], + } + + // Replace the original pre element with the wrapped highlighted version + block.parent.children[block.index] = wrapper + } catch (error) { + // If highlighting fails, leave the code block as-is + console.warn(`Failed to highlight ${block.lang}:`, error) + } + } + } +} + +/** + * Pre-initialize the highlighter (call on app startup for better UX) + */ +export async function initHighlighter(): Promise { + await getHighlighter() +} + +export default rehypeHighlight diff --git a/app/src/features/markdown/plugins/index.ts b/app/src/features/markdown/plugins/index.ts new file mode 100644 index 00000000..84ce4b07 --- /dev/null +++ b/app/src/features/markdown/plugins/index.ts @@ -0,0 +1,23 @@ +/** + * Markdown plugins for unified/remark/rehype pipeline + */ + +// Remark plugins (mdast) +export { remarkSourcepos } from './sourcepos' + +// Rehype plugins (hast) +export { rehypeWikilink } from './wikilink' +export { rehypeHashtag } from './hashtag' +export { rehypeMention } from './mention' + +export { rehypePlaceholder, resetPlaceholderCounter } from './placeholder' +export type { PlaceholderOptions } from './placeholder' + +export { rehypeHighlight, initHighlighter } from './highlight' +export type { HighlightOptions } from './highlight' + +export { rehypeAttachments } from './attachments' +export type { AttachmentOptions } from './attachments' + +// Sanitization schema +export { refmdSanitizeSchema } from './sanitize' diff --git a/app/src/features/markdown/plugins/mention.ts b/app/src/features/markdown/plugins/mention.ts new file mode 100644 index 00000000..6c655bc5 --- /dev/null +++ b/app/src/features/markdown/plugins/mention.ts @@ -0,0 +1,121 @@ +/** + * Mention plugin for rehype + * Transforms #mention:target syntax to elements + * + * Comrak compatibility: + * - #mention:user -> #mention:user + * - Pattern: #mention:[A-Za-z0-9-_:@.]+ + */ + +import type { Root, Element, Text } from 'hast' +import type { Plugin } from 'unified' +import { visit } from 'unist-util-visit' + +// Characters that prevent mention recognition when immediately preceding # +const SKIP_PREV_CHARS = /[A-Za-z0-9\/:@.\-_+~=?&%]$/ + +// Mention pattern: #mention: followed by valid characters +const MENTION_PATTERN = /#mention:([A-Za-z0-9\-_:@.]+)/g + +function createMentionElement(target: string): Element { + const href = `#mention:${target}` + const text = `#mention:${target}` + + return { + type: 'element', + tagName: 'a', + properties: { + href: href, + class: 'mention', + 'data-mention-target': target, + }, + children: [{ type: 'text', value: text }], + } +} + +interface ProcessedNode { + type: 'text' | 'element' + node: Text | Element +} + +function processText(text: string): ProcessedNode[] { + const result: ProcessedNode[] = [] + let lastIndex = 0 + + // Reset regex state + MENTION_PATTERN.lastIndex = 0 + + let match: RegExpExecArray | null + while ((match = MENTION_PATTERN.exec(text)) !== null) { + const matchIndex = match.index + const target = match[1] + + // Check if preceded by a character that should prevent matching + if (matchIndex > 0) { + const prevChar = text[matchIndex - 1] + if (SKIP_PREV_CHARS.test(prevChar)) { + continue + } + } + + // Add text before match + if (matchIndex > lastIndex) { + result.push({ + type: 'text', + node: { type: 'text', value: text.slice(lastIndex, matchIndex) } + }) + } + + // Add mention element + result.push({ + type: 'element', + node: createMentionElement(target), + }) + + lastIndex = matchIndex + match[0].length + } + + // Add remaining text + if (lastIndex < text.length) { + result.push({ + type: 'text', + node: { type: 'text', value: text.slice(lastIndex) } + }) + } + + return result +} + +export const rehypeMention: Plugin<[], Root> = () => { + return (tree) => { + visit(tree, 'text', (node: Text, index: number | undefined, parent) => { + if (index === undefined || !parent || parent.type !== 'element') return + + const text = node.value + if (!text.includes('#mention:')) { + return + } + + // Skip if inside a link + const parentElement = parent as Element + if (parentElement.tagName === 'a') { + return + } + + const processed = processText(text) + + if (processed.length === 0) { + return + } + + if (processed.length === 1 && processed[0].type === 'text' && (processed[0].node as Text).value === text) { + return + } + + const newNodes = processed.map((p) => p.node) + parentElement.children.splice(index, 1, ...newNodes) + }) + } +} + +export default rehypeMention diff --git a/app/src/features/markdown/plugins/placeholder.ts b/app/src/features/markdown/plugins/placeholder.ts new file mode 100644 index 00000000..00d61d16 --- /dev/null +++ b/app/src/features/markdown/plugins/placeholder.ts @@ -0,0 +1,106 @@ +/** + * Placeholder plugin for rehype + * Transforms specific code blocks into placeholder divs for plugin rendering + * + * Comrak compatibility: + * - Code blocks with specific languages become: + *
+ */ + +import type { Root, Element } from 'hast' +import type { Plugin } from 'unified' +import { visit } from 'unist-util-visit' + +import type { PlaceholderItem } from '../types' + +export interface PlaceholderOptions { + /** Set of code block languages to convert to placeholders */ + kinds: Set + /** Callback to collect placeholder items */ + onPlaceholder?: (item: PlaceholderItem) => void +} + +let placeholderCounter = 0 + +export const rehypePlaceholder: Plugin<[PlaceholderOptions?], Root> = (options) => { + const kinds = options?.kinds || new Set() + const onPlaceholder = options?.onPlaceholder + + return (tree) => { + if (kinds.size === 0) { + return + } + + visit(tree, 'element', (node: Element, index: number | undefined, parent) => { + if (index === undefined || !parent) return + + // Look for
...
+ if (node.tagName !== 'pre') return + + const codeChild = node.children.find( + (child): child is Element => child.type === 'element' && child.tagName === 'code' + ) + + if (!codeChild) return + + // Get language from class + const className = codeChild.properties?.className + if (!className || !Array.isArray(className)) return + + const langClass = className.find( + (c): c is string => typeof c === 'string' && c.startsWith('language-') + ) + + if (!langClass) return + + const lang = langClass.slice('language-'.length).toLowerCase() + + if (!kinds.has(lang)) return + + // Extract code content + const codeContent = codeChild.children + .map((child) => { + if (child.type === 'text') return child.value + return '' + }) + .join('') + + // Generate unique ID + placeholderCounter++ + const id = `p${placeholderCounter}` + + // Report placeholder + if (onPlaceholder) { + onPlaceholder({ + kind: lang, + id, + code: codeContent, + }) + } + + // Replace with placeholder div + const placeholderDiv: Element = { + type: 'element', + tagName: 'div', + properties: { + 'data-refmd-placeholder': 'true', + 'data-placeholder-id': id, + 'data-placeholder-kind': lang, + }, + children: [], + } + + // Replace the pre element with the placeholder div + ;(parent as Element).children[index] = placeholderDiv + }) + } +} + +/** + * Reset placeholder counter (useful for testing) + */ +export function resetPlaceholderCounter(): void { + placeholderCounter = 0 +} + +export default rehypePlaceholder diff --git a/app/src/features/markdown/plugins/sanitize.ts b/app/src/features/markdown/plugins/sanitize.ts new file mode 100644 index 00000000..1fa17a83 --- /dev/null +++ b/app/src/features/markdown/plugins/sanitize.ts @@ -0,0 +1,122 @@ +/** + * Sanitization schema for rehype-sanitize + * Compatible with Ammonia settings used in Comrak backend + * + * Ammonia compatibility: + * - Allow common attributes: class, id, title, data-* + * - Allow placeholder metadata attributes + * - Allow code-related tags with style for syntax highlighting + * - Allow input for tasklist checkboxes + * - Allow custom elements like refmd-wikilink + */ + +import { defaultSchema } from 'rehype-sanitize' + +/** + * Merge two arrays, avoiding duplicates + */ +function mergeArrays(a: T[] | null | undefined, b: T[]): T[] { + const set = new Set([...(a || []), ...b]) + return Array.from(set) +} + +/** + * Sanitization schema compatible with Ammonia + */ +export const refmdSanitizeSchema = { + ...defaultSchema, + + // Allow custom elements + tagNames: mergeArrays(defaultSchema.tagNames, [ + // Custom components + 'refmd-wikilink', + // Form elements for tasklist + 'input', + // Keep existing allowed tags + 'pre', + 'code', + 'span', + 'div', + ]), + + attributes: { + ...defaultSchema.attributes, + + // Global attributes allowed on all elements + '*': mergeArrays(defaultSchema.attributes?.['*'], [ + 'className', + 'class', + 'id', + 'title', + // Placeholder attributes + 'data-refmd-placeholder', + 'data-placeholder-id', + 'data-placeholder-kind', + 'data-placeholder-hydrate', + 'data-placeholder-hydrate-export', + 'data-placeholder-hydrate-context', + 'data-placeholder-plugin', + 'data-placeholder-version', + 'data-placeholder-scope', + // Editor sync + 'data-sourcepos', + // Link metadata + 'data-wiki-target', + 'data-mention-target', + 'data-embed-target', + // Syntax highlighting (inline styles) + 'style', + ]), + + // Custom wikilink element + 'refmd-wikilink': ['target', 'href', 'variant', 'class'], + + // Tasklist checkbox + input: ['type', 'checked', 'disabled', 'class'], + + // Links + a: mergeArrays(defaultSchema.attributes?.a, [ + 'href', + 'rel', + 'target', + 'class', + 'data-wiki-target', + 'data-mention-target', + ]), + + // Images + img: mergeArrays(defaultSchema.attributes?.img, ['src', 'alt', 'title', 'class']), + + // Code blocks with syntax highlighting + pre: ['class', 'style', 'data-language'], + code: ['class', 'style', 'data-language'], + span: ['class', 'style'], + + // Divs for placeholders and not-prose wrapper + div: ['class', 'style', 'data-refmd-placeholder', 'data-placeholder-id', 'data-placeholder-kind'], + }, + + // Allow relative URLs and hash URLs + protocols: { + ...defaultSchema.protocols, + href: ['http', 'https', 'mailto', '#', 'data'], + src: ['http', 'https', 'data'], + }, + + // Strip certain elements completely + strip: ['script', 'style'], + + // Allow clobbering for specific attributes (needed for some functionality) + clobberPrefix: '', + clobber: [], + + // Required attributes + required: { + input: { + type: 'checkbox', + disabled: true, + }, + }, +} + +export default refmdSanitizeSchema diff --git a/app/src/features/markdown/plugins/sourcepos.ts b/app/src/features/markdown/plugins/sourcepos.ts new file mode 100644 index 00000000..1562a288 --- /dev/null +++ b/app/src/features/markdown/plugins/sourcepos.ts @@ -0,0 +1,37 @@ +/** + * Sourcepos plugin for remark + * Adds data-sourcepos attributes to elements for editor<->preview sync + * + * Comrak compatibility: + * - Adds data-sourcepos="startLine:startCol-endLine:endCol" to all elements + */ + +import type { Root } from 'mdast' +import type { Plugin } from 'unified' +import { visit } from 'unist-util-visit' + +export const remarkSourcepos: Plugin<[], Root> = () => { + return (tree) => { + visit(tree, (node) => { + if (!node.position) { + return + } + + const { start, end } = node.position + const sourcepos = `${start.line}:${start.column}-${end.line}:${end.column}` + + // Create data object if it doesn't exist + const nodeWithData = node as typeof node & { + data?: { + hProperties?: Record + } + } + + nodeWithData.data = nodeWithData.data || {} + nodeWithData.data.hProperties = nodeWithData.data.hProperties || {} + nodeWithData.data.hProperties['data-sourcepos'] = sourcepos + }) + } +} + +export default remarkSourcepos diff --git a/app/src/features/markdown/plugins/wikilink.ts b/app/src/features/markdown/plugins/wikilink.ts new file mode 100644 index 00000000..7320f903 --- /dev/null +++ b/app/src/features/markdown/plugins/wikilink.ts @@ -0,0 +1,175 @@ +/** + * Wikilink plugin for rehype + * Transforms [[target]] and #wiki:target syntax to elements + * + * Comrak compatibility: + * - [[target]] -> target + * - [[target|alias]] -> uses alias as display text + * - [[target|alias|inline]] -> variant="inline" + * - #wiki:target -> same transformation + */ + +import type { Root, Element, Text } from 'hast' +import type { Plugin } from 'unified' +import { visit } from 'unist-util-visit' + +function normalizeWikilinkLabel(raw: string): { label: string; inline: boolean } { + let label = raw.trim() + if (!label) { + return { label: '', inline: false } + } + + let inline = false + const lower = label.toLowerCase() + + // Check for |inline suffix + const pipeInlinePos = lower.lastIndexOf('|inline') + if (pipeInlinePos !== -1 && lower.slice(pipeInlinePos).trim() === '|inline') { + label = label.slice(0, pipeInlinePos).trimEnd() + inline = true + } + + // Remove leading # if present + if (label.startsWith('#')) { + label = label.slice(1) + } + + // Remove wiki: prefix if present + if (label.toLowerCase().startsWith('wiki:')) { + label = label.slice(5).trim() + } + + return { label: label.trim(), inline } +} + +function createWikilinkElement(target: string, displayLabel: string, inline: boolean): Element { + const variant = inline ? 'inline' : 'embed' + const href = `#wiki:${target}` + + return { + type: 'element', + tagName: 'refmd-wikilink', + properties: { + class: 'wikilink', + target: target, + href: href, + variant: variant, + }, + children: [{ type: 'text', value: displayLabel }], + } +} + +interface ProcessedNode { + type: 'text' | 'element' + node: Text | Element +} + +function processText(text: string): ProcessedNode[] { + const result: ProcessedNode[] = [] + let lastIndex = 0 + + // Combine both patterns and process in order + const combined = /(\[\[[^\]]+\]\])|(#wiki:[A-Za-z0-9:\-\/_]+)/g + + let match: RegExpExecArray | null + while ((match = combined.exec(text)) !== null) { + // Add text before match + if (match.index > lastIndex) { + const textBefore = text.slice(lastIndex, match.index) + result.push({ + type: 'text', + node: { type: 'text', value: textBefore } + }) + } + + if (match[1]) { + // Bracket wikilink [[target]] or [[target|alias]] + const inside = match[1].slice(2, -2) // Remove [[ and ]] + const parts = inside.split('|') + const target = parts[0].trim() + + if (target) { + // Get alias (everything after first |) or use target + const aliasRaw = parts.length > 1 ? parts.slice(1).join('|') : target + const { label, inline } = normalizeWikilinkLabel(aliasRaw) + const displayLabel = label || target + + result.push({ + type: 'element', + node: createWikilinkElement(target, displayLabel, inline), + }) + } else { + // Empty target, keep as literal + result.push({ + type: 'text', + node: { type: 'text', value: match[0] } + }) + } + } else if (match[2]) { + // Hash wiki #wiki:target + const full = match[2] + const target = full.slice(6) // Remove #wiki: + + // Check previous character to avoid matching in URLs + if (match.index > 0) { + const prevChar = text[match.index - 1] + if (/[A-Za-z0-9\/:@.\-_+~=?&%]/.test(prevChar)) { + result.push({ + type: 'text', + node: { type: 'text', value: full } + }) + lastIndex = match.index + full.length + continue + } + } + + const { label, inline } = normalizeWikilinkLabel(full) + result.push({ + type: 'element', + node: createWikilinkElement(target, label || target, inline), + }) + } + + lastIndex = match.index + match[0].length + } + + // Add remaining text + if (lastIndex < text.length) { + result.push({ + type: 'text', + node: { type: 'text', value: text.slice(lastIndex) } + }) + } + + return result +} + +export const rehypeWikilink: Plugin<[], Root> = () => { + return (tree) => { + visit(tree, 'text', (node: Text, index: number | undefined, parent) => { + if (index === undefined || !parent || parent.type !== 'element') return + + const text = node.value + if (!text.includes('[[') && !text.includes('#wiki:')) { + return + } + + const processed = processText(text) + + if (processed.length === 0) { + return + } + + if (processed.length === 1 && processed[0].type === 'text') { + // No changes needed + return + } + + // Replace the text node with the processed nodes + const newNodes = processed.map((p) => p.node) + ;(parent as Element).children.splice(index, 1, ...newNodes) + }) + } +} + +export default rehypeWikilink diff --git a/app/src/features/markdown/renderer.ts b/app/src/features/markdown/renderer.ts new file mode 100644 index 00000000..0eb90174 --- /dev/null +++ b/app/src/features/markdown/renderer.ts @@ -0,0 +1,177 @@ +/** + * Client-side Markdown renderer using unified/remark/rehype + * Replaces server-side Comrak rendering for E2EE support + */ + +import rehypeKatex from 'rehype-katex' +import rehypeSanitize from 'rehype-sanitize' +import rehypeStringify from 'rehype-stringify' +import remarkBreaks from 'remark-breaks' +import remarkGfm from 'remark-gfm' +import remarkMath from 'remark-math' +import remarkParse from 'remark-parse' +import remarkRehype from 'remark-rehype' +import { unified } from 'unified' + +import { + remarkSourcepos, + rehypeWikilink, + rehypeHashtag, + rehypeMention, + rehypePlaceholder, + rehypeHighlight, + rehypeAttachments, + refmdSanitizeSchema, + resetPlaceholderCounter, +} from './plugins' +import type { RenderOptions, RenderResponse, PlaceholderItem } from './types' + +/** + * Compute SHA-256 hash for cache key + */ +async function computeHash(text: string, options: RenderOptions): Promise { + const optionsStr = JSON.stringify(options) + const canonical = `${text}\n${optionsStr}` + + const encoder = new TextEncoder() + const data = encoder.encode(canonical) + const hashBuffer = await crypto.subtle.digest('SHA-256', data) + const hashArray = Array.from(new Uint8Array(hashBuffer)) + return hashArray.map((b) => b.toString(16).padStart(2, '0')).join('') +} + +/** + * Check if a feature is enabled + */ +function wantsFeature(options: RenderOptions, name: string): boolean { + if (options.features) { + return options.features.some((f) => f.toLowerCase() === name.toLowerCase()) + } + + // Default behavior matching Comrak + switch (name.toLowerCase()) { + case 'gfm': + // GFM enabled by default unless flavor is 'commonmark' + return options.flavor?.toLowerCase() !== 'commonmark' + case 'highlight': + return false + default: + return false + } +} + +/** + * Render Markdown to HTML + * + * @param markdown - Source Markdown text + * @param options - Rendering options + * @returns Rendered HTML, placeholders, and hash + */ +export async function renderMarkdown( + markdown: string, + options: RenderOptions = {} +): Promise { + // Reset placeholder counter for consistent IDs + resetPlaceholderCounter() + + const placeholders: PlaceholderItem[] = [] + + // Determine which features to enable + const enableGfm = wantsFeature(options, 'gfm') + const enableHighlight = wantsFeature(options, 'highlight') + const enableSanitize = options.sanitize ?? true + + // Build placeholder kinds set + const placeholderKinds = new Set(options.placeholderKinds || []) + + // Create unified processor pipeline + const processor = unified().use(remarkParse) + + // Add GFM support if enabled + if (enableGfm) { + processor.use(remarkGfm) + } + + // Add line break support (soft breaks become
) + processor.use(remarkBreaks) + + // Add math support + processor.use(remarkMath) + + // Add sourcepos (remark level, before conversion to hast) + processor.use(remarkSourcepos) + + // Convert to rehype (HTML AST) + processor.use(remarkRehype, { + allowDangerousHtml: true, + }) + + // Apply rehype plugins for custom transformations + // Order: mention before hashtag to catch #mention: first + // These must run BEFORE sanitization + processor.use(rehypeWikilink) + processor.use(rehypeMention) + processor.use(rehypeHashtag) + + // Add rehype plugins + // Placeholder must come before highlight to prevent highlighting placeholder code + if (placeholderKinds.size > 0) { + processor.use(rehypePlaceholder, { + kinds: placeholderKinds, + onPlaceholder: (item) => placeholders.push(item), + }) + } + + // Add syntax highlighting if enabled + if (enableHighlight) { + processor.use(rehypeHighlight, { + theme: options.theme || 'one-dark-pro', + skipLanguages: placeholderKinds, + }) + } + + // Add KaTeX for math rendering + processor.use(rehypeKatex) + + // Rewrite attachment URLs + processor.use(rehypeAttachments, { + docId: options.docId, + token: options.token, + baseOrigin: options.baseOrigin, + absoluteAttachments: options.absoluteAttachments, + }) + + // Sanitize HTML if enabled (must be last before stringify) + if (enableSanitize) { + processor.use(rehypeSanitize, refmdSanitizeSchema) + } + + // Stringify to HTML + processor.use(rehypeStringify, { + allowDangerousHtml: true, + }) + + // Process the markdown + const file = await processor.process(markdown) + const html = String(file) + + // Compute hash for caching + const hash = await computeHash(markdown, options) + + return { + html, + placeholders, + hash, + } +} + +/** + * Render multiple Markdown documents + */ +export async function renderMarkdownMany( + items: Array<{ text: string; options?: RenderOptions }> +): Promise { + return Promise.all(items.map((item) => renderMarkdown(item.text, item.options))) +} + +export { initHighlighter } from './plugins' diff --git a/app/src/features/markdown/types.ts b/app/src/features/markdown/types.ts new file mode 100644 index 00000000..92f103fd --- /dev/null +++ b/app/src/features/markdown/types.ts @@ -0,0 +1,74 @@ +/** + * Markdown rendering types + * Compatible with backend Comrak API + */ + +export interface RenderOptions { + /** Document flavor: 'doc' | 'commonmark' */ + flavor?: string + /** Enabled features: ['gfm', 'highlight'] */ + features?: string[] + /** Treat soft line breaks as
*/ + hardbreaks?: boolean + /** Syntax highlight theme name */ + theme?: string + /** Document ID for attachment URL rewriting */ + docId?: string + /** Share token for attachment URLs */ + token?: string + /** Base origin for absolute URLs */ + baseOrigin?: string + /** Plugin placeholder kinds to detect */ + placeholderKinds?: string[] + /** Enable HTML sanitization (default: true) */ + sanitize?: boolean + /** Rewrite attachment URLs to absolute */ + absoluteAttachments?: boolean +} + +export interface PlaceholderItem { + kind: string + id: string + code: string +} + +export interface RenderResponse { + html: string + placeholders: PlaceholderItem[] + hash: string +} + +/** + * API-compatible request type + */ +export interface RenderRequest { + text: string + flavor?: string + features?: string[] + hardbreaks?: boolean + theme?: string + doc_id?: string + token?: string + base_origin?: string + placeholder_kinds?: string[] + sanitize?: boolean + absolute_attachments?: boolean +} + +/** + * Convert API request to internal options + */ +export function requestToOptions(request: RenderRequest): RenderOptions { + return { + flavor: request.flavor, + features: request.features, + hardbreaks: request.hardbreaks, + theme: request.theme, + docId: request.doc_id, + token: request.token, + baseOrigin: request.base_origin, + placeholderKinds: request.placeholder_kinds, + sanitize: request.sanitize, + absoluteAttachments: request.absolute_attachments, + } +} diff --git a/app/src/entities/plugin/hooks/usePluginExecutor.ts b/app/src/features/plugins/hooks/usePluginExecutor.ts similarity index 66% rename from app/src/entities/plugin/hooks/usePluginExecutor.ts rename to app/src/features/plugins/hooks/usePluginExecutor.ts index 4d0d3103..e21f6db1 100644 --- a/app/src/entities/plugin/hooks/usePluginExecutor.ts +++ b/app/src/features/plugins/hooks/usePluginExecutor.ts @@ -3,12 +3,17 @@ import { toast } from 'sonner' import { API_BASE_URL } from '@/shared/lib/config' -import { execPluginAction, getPluginKv } from '../api' -import type { PluginManifestItem } from '../api' +import { getPluginKv, type PluginManifestItem } from '@/entities/plugin/api' + +import { handleEffects as handleEffectsFull } from '@/features/plugins/lib/effect-handler' +import { loadPluginWasm, hasPluginWasm } from '@/features/plugins/lib/wasm-loader' +import { getWasmRuntime } from '@/features/plugins/lib/wasm-runtime' +import { getKeyVaultService, fetchDocumentDek } from '@/features/security' type Options = { plugins: PluginManifestItem[] shareToken?: string | null + workspaceId?: string | null refreshDocuments: () => void navigate: (to: string) => void getCurrentDocumentId: () => string | null @@ -23,9 +28,31 @@ type PluginModule = { const uuidPattern = /[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}/ +/** + * Get document DEK for E2EE encryption. + * Returns null if E2EE is not available or document has no DEK. + */ +async function getDocumentDEK( + docId: string | null, + workspaceId: string | null +): Promise { + if (!docId || !workspaceId) return null + + try { + const service = getKeyVaultService() + if (!service.isInitialized || !service.isUnlocked) return null + + return await fetchDocumentDek(docId, workspaceId) + } catch { + // E2EE not available for this document + return null + } +} + export function usePluginExecutor({ plugins, shareToken, + workspaceId, refreshDocuments, navigate, getCurrentDocumentId, @@ -190,8 +217,21 @@ export function usePluginExecutor({ else toast(message) }, api: { - exec: (actionName: string, payload: any) => - execPluginAction(pluginId, actionName, payload, shareToken ?? undefined), + exec: async (actionName: string, payload: any) => { + const manifest = plugins.find((p) => p.id === pluginId) + if (!manifest) { + throw new Error(`Plugin ${pluginId} manifest not found`) + } + if (!hasPluginWasm(manifest)) { + throw new Error(`Plugin ${pluginId} has no WASM module`) + } + const runtime = getWasmRuntime() + if (!runtime.isLoaded(pluginId)) { + const wasmUrl = await loadPluginWasm(pluginId, manifest) + await runtime.loadPlugin(pluginId, wasmUrl) + } + return runtime.execute(pluginId, actionName, payload, { docId: null, userId: null }) + }, }, } @@ -215,58 +255,83 @@ export function usePluginExecutor({ } } - handleEffects(result?.effects, navigate) - if (!result?.effects || result.effects.length === 0) { - toast.success('Action executed') + // Handle effects with full effect handler + if (result?.effects && result.effects.length > 0) { + // Get document DEK for E2EE encryption + const effectDocId = selectedDocId || null + const documentDEK = await getDocumentDEK(effectDocId, workspaceId ?? null) + + await handleEffectsFull(result.effects, { + pluginId, + docId: effectDocId, + workspaceId: workspaceId ?? null, + documentDEK, + token: shareToken ?? null, + navigate, + }) + } else if (result?.ok === false && result?.error) { + toast.error(result.error.message || result.error.code || 'Action failed') } refreshDocuments() return } - let response = await execPluginAction(pluginId, action, defaultPayload, shareToken ?? undefined) - const errCode = (response as any)?.error?.code - const errMsg = String((response as any)?.error?.message || '') + // Fallback: use client-side WASM execution + const manifest = plugins.find((p) => p.id === pluginId) + if (!manifest) { + toast.error(`Plugin ${pluginId} not found`) + return + } + if (!hasPluginWasm(manifest)) { + toast.error(`Plugin ${pluginId} has no executable module`) + return + } + const runtime = getWasmRuntime() + if (!runtime.isLoaded(pluginId)) { + const wasmUrl = await loadPluginWasm(pluginId, manifest) + await runtime.loadPlugin(pluginId, wasmUrl) + } + let response = await runtime.execute(pluginId, action, defaultPayload, { docId: null, userId: null }) + const errCode = response?.error?.code + const errMsg = String(response?.error?.message || '') if (errCode === 'BAD_REQUEST' && errMsg.toLowerCase().includes('docid')) { const input = await resolveRequestDocumentId() if (input) { - response = await execPluginAction( + response = await runtime.execute( pluginId, action, { ...(defaultPayload || {}), docId: input }, - shareToken ?? undefined, + { docId: input, userId: null }, ) } else { toast.error('Select a document before running this command') return } } - handleEffects(response?.effects, navigate) - if (!response?.effects || response.effects.length === 0) { - toast.success('Action executed') + // Handle effects with full effect handler + const effectDocId = selectedDocId || null + if (response?.effects && response.effects.length > 0) { + // Get document DEK for E2EE encryption + const documentDEK = await getDocumentDEK(effectDocId, workspaceId ?? null) + + await handleEffectsFull(response.effects, { + pluginId, + docId: effectDocId, + workspaceId: workspaceId ?? null, + documentDEK, + token: shareToken ?? null, + navigate, + }) + } else if (response?.ok === false && response?.error) { + toast.error(response.error.message || response.error.code || 'Action failed') } refreshDocuments() } catch (err: any) { toast.error(err?.message || 'Failed to execute command') } }, - [apiOrigin, getCurrentDocumentId, importPluginModule, navigate, refreshDocuments, resolveRequestDocumentId], + [apiOrigin, getCurrentDocumentId, importPluginModule, navigate, plugins, refreshDocuments, resolveRequestDocumentId, shareToken, workspaceId], ) return { runPluginCommand, resolveDocRoute } } - -function handleEffects(effects: any[], navigate: (to: string) => void) { - if (!Array.isArray(effects)) return - for (const effect of effects) { - if (!effect || typeof effect !== 'object') continue - if (effect.type === 'navigate' && typeof effect.to === 'string') { - navigate(effect.to) - } else if (effect.type === 'showToast' && typeof effect.message === 'string') { - const level = effect.level || 'info' - if (level === 'success') toast.success(effect.message) - else if (level === 'warn' || level === 'warning') (toast as any).warning?.(effect.message) || toast(effect.message) - else if (level === 'error') toast.error(effect.message) - else toast(effect.message) - } - } -} diff --git a/app/src/features/plugins/lib/effect-handler.ts b/app/src/features/plugins/lib/effect-handler.ts new file mode 100644 index 00000000..53fc2f84 --- /dev/null +++ b/app/src/features/plugins/lib/effect-handler.ts @@ -0,0 +1,262 @@ +/** + * Effect Handler + * + * Processes effects returned by plugin WASM execution. + * Handles encryption for E2EE documents before sending to server. + */ + +import { toast } from 'sonner' + +import { + createDocument as apiCreateDocument, +} from '@/entities/document' +import { + createPluginRecord as apiCreateRecord, + updatePluginRecord as apiUpdateRecord, + deletePluginRecord as apiDeleteRecord, + putPluginKv as apiPutKv, +} from '@/entities/plugin/api' + +import { createDocumentDekIfNeeded, getDocumentDekForPlugin } from '@/features/security/lib/document-keys' +import { encryptRecordData, encryptKV } from '@/features/security/lib/plugins' + +import type { Effect } from './wasm-runtime' + +/** Context for effect handling */ +export interface EffectHandlerContext { + pluginId: string + docId: string | null + workspaceId: string | null + documentDEK: Uint8Array | null + token?: string | null + navigate: (to: string) => void +} + +/** + * Resolve :createdDocId placeholder in a string + */ +function resolveCreatedDocId(value: string, createdDocId: string | null): string { + if (!createdDocId) return value + return value.replace(/:createdDocId/g, createdDocId) +} + +/** + * Get the effective docId, resolving :createdDocId if present + */ +function resolveDocId( + effectDocId: unknown, + contextDocId: string | null, + createdDocId: string | null +): string | null { + if (typeof effectDocId === 'string') { + const resolved = resolveCreatedDocId(effectDocId, createdDocId) + if (resolved && resolved !== ':createdDocId') { + return resolved + } + } + return createdDocId ?? contextDocId +} + +/** + * Handle effects returned by plugin WASM execution. + * + * For E2EE documents, data is encrypted before being sent to the server. + * + * @param effects - Array of effects to process + * @param ctx - Effect handler context + */ +export async function handleEffects( + effects: Effect[], + ctx: EffectHandlerContext +): Promise { + // Track document ID created by createDocument effect + let createdDocId: string | null = null + // Track DEK for newly created document + let createdDocDEK: Uint8Array | null = null + + for (const effect of effects) { + if (!effect || typeof effect !== 'object') continue + + try { + switch (effect.type) { + case 'createDocument': { + const title = typeof effect.title === 'string' ? effect.title : 'Untitled' + const docType = effect.docType as string | undefined + const parentId = effect.parentId as string | undefined + + // Create the document + const response = await apiCreateDocument({ + title, + parent_id: parentId ?? null, + type: docType === 'folder' ? 'folder' : 'document', + }) + + // Store the created document ID for subsequent effects + const newDocId = (response as any)?.id + if (typeof newDocId === 'string') { + createdDocId = newDocId + + // Create and fetch DEK for the new document if E2EE is enabled + if (ctx.workspaceId) { + try { + await createDocumentDekIfNeeded(newDocId, ctx.workspaceId) + // Fetch the DEK for subsequent effects + createdDocDEK = await getDocumentDekForPlugin(newDocId, ctx.workspaceId) + } catch (err) { + console.warn('[effect-handler] Failed to create/fetch document DEK:', err) + } + } + } + break + } + + case 'createRecord': { + const kind = effect.kind as string + if (!kind) { + console.warn('createRecord effect missing kind') + break + } + + let data = effect.data + const docId = resolveDocId(effect.docId, ctx.docId, createdDocId) + + if (!docId) { + console.warn('createRecord effect: no docId available') + break + } + + // E2EE encryption - use createdDocDEK for newly created documents + const effectDEK = (docId === createdDocId && createdDocDEK) ? createdDocDEK : ctx.documentDEK + if (data !== undefined) { + if (!effectDEK) { + throw new Error(`E2EE: DEK not available for createRecord on document ${docId}`) + } + data = await encryptRecordData(data, effectDEK, ctx.pluginId) + } + + await apiCreateRecord(ctx.pluginId, docId, kind, data, ctx.token ?? undefined) + break + } + + case 'updateRecord': { + const recordId = effect.recordId as string + if (!recordId) { + console.warn('updateRecord effect missing recordId') + break + } + + let patch = effect.patch + + // E2EE encryption (updateRecord doesn't have createdDocId context) + if (patch !== undefined) { + if (!ctx.documentDEK) { + throw new Error(`E2EE: DEK not available for updateRecord ${recordId}`) + } + patch = await encryptRecordData(patch, ctx.documentDEK, ctx.pluginId) + } + + await apiUpdateRecord(ctx.pluginId, recordId, patch) + break + } + + case 'deleteRecord': { + const recordId = effect.recordId as string + if (!recordId) { + console.warn('deleteRecord effect missing recordId') + break + } + + await apiDeleteRecord(ctx.pluginId, recordId) + break + } + + case 'putKv': { + const key = effect.key as string + if (!key) { + console.warn('putKv effect missing key') + break + } + + let value = effect.value + const docId = resolveDocId(effect.docId, ctx.docId, createdDocId) + + if (!docId) { + console.warn('putKv effect: no docId available') + break + } + + // E2EE encryption - use createdDocDEK for newly created documents + const effectDEK = (docId === createdDocId && createdDocDEK) ? createdDocDEK : ctx.documentDEK + if (value !== null && value !== undefined) { + if (!effectDEK) { + throw new Error(`E2EE: DEK not available for putKv on document ${docId}`) + } + value = await encryptKV(value, effectDEK, ctx.pluginId) + } + + await apiPutKv(ctx.pluginId, docId, key, value, ctx.token ?? undefined) + break + } + + case 'showToast': { + const message = effect.message as string + if (!message) break + + const level = (effect.level as string) ?? 'info' + + switch (level) { + case 'success': + toast.success(message) + break + case 'warn': + case 'warning': + toast.warning?.(message) ?? toast(message) + break + case 'error': + toast.error(message) + break + default: + toast(message) + } + break + } + + case 'navigate': { + let to = effect.to as string + if (!to) break + + // Resolve :createdDocId in navigation path + to = resolveCreatedDocId(to, createdDocId) + ctx.navigate(to) + break + } + + case 'log': { + const message = effect.message as string ?? '' + const level = (effect.level as string) ?? 'info' + + switch (level) { + case 'debug': + console.debug('[plugin]', message) + break + case 'warn': + case 'warning': + console.warn('[plugin]', message) + break + case 'error': + console.error('[plugin]', message) + break + default: + console.log('[plugin]', message) + } + break + } + + default: + console.warn(`Unknown effect type: ${effect.type}`) + } + } catch (err) { + console.error(`Failed to handle effect ${effect.type}:`, err) + } + } +} diff --git a/app/src/features/plugins/lib/resolution.ts b/app/src/features/plugins/lib/resolution.ts index d6cc4627..be9917fe 100644 --- a/app/src/features/plugins/lib/resolution.ts +++ b/app/src/features/plugins/lib/resolution.ts @@ -382,13 +382,14 @@ export async function mountResolvedPlugin( match: DocumentPluginMatch, container: HTMLElement, mode: 'primary' | 'secondary', - options: { tweakHost?: (host: any) => void } = {}, + options: { tweakHost?: (host: any) => void; workspaceId?: string | null } = {}, ) { const host = await createPluginHost(match.manifest, { docId: match.docId, route: match.route, token: match.token ?? undefined, mode, + workspaceId: options.workspaceId ?? null, }) try { options.tweakHost?.(host) @@ -416,9 +417,10 @@ export async function mountRoutePlugin( setDocumentStatus?: (status?: string | null) => void setDocumentBadge?: (badge?: string | null) => void setDocumentActions?: (actions: DocumentHeaderAction[]) => void + workspaceId?: string | null } = {}, ) { - const { navigate, setDocumentId, setDocumentTitle, setDocumentStatus, setDocumentBadge, setDocumentActions } = options + const { navigate, setDocumentId, setDocumentTitle, setDocumentStatus, setDocumentBadge, setDocumentActions, workspaceId } = options const host = await createPluginHost(match.manifest, { mode: 'primary', navigate, @@ -426,6 +428,7 @@ export async function mountRoutePlugin( setDocumentStatus, setDocumentBadge, setDocumentActions, + workspaceId: workspaceId ?? null, }) try { ;(match.module as any).__host__ = host diff --git a/app/src/features/plugins/lib/runtime.ts b/app/src/features/plugins/lib/runtime.ts index 773dde58..5654f97e 100644 --- a/app/src/features/plugins/lib/runtime.ts +++ b/app/src/features/plugins/lib/runtime.ts @@ -3,8 +3,6 @@ import { toast } from 'sonner' import { - renderMarkdown, - renderMarkdownMany, uploadFile, me as fetchMe, OpenAPI, @@ -12,18 +10,27 @@ import { } from '@/shared/api' import type { DocumentHeaderAction } from '@/shared/types/document' +import * as documentWc from '@/entities/document/wc' import { - execPluginAction as apiExecPluginAction, getPluginKv as apiGetPluginKv, listPluginRecords as apiListPluginRecords, putPluginKv as apiPutPluginKv, } from '@/entities/plugin/api' +import { renderMarkdown, renderMarkdownMany } from '@/features/markdown' import { mountSplitEditorStage, type SplitEditorPreviewDelegate, type SplitEditorDocumentApi, } from '@/features/plugins/ui/SplitEditorHost' +import { getKeyVaultService, fetchDocumentDek } from '@/features/security' +import { decryptRecords, decryptKV, encryptKV } from '@/features/security/lib/plugins' + +import { handleEffects } from './effect-handler' +import { loadPluginWasm, hasPluginWasm } from './wasm-loader' +import { getWasmRuntime } from './wasm-runtime' + + export type HostMode = 'primary' | 'secondary' @@ -37,6 +44,10 @@ export type PluginHostContext = { setDocumentStatus?: (status?: string | null) => void setDocumentBadge?: (badge?: string | null) => void setDocumentActions?: (actions: DocumentHeaderAction[]) => void + /** Document DEK for E2EE encryption/decryption */ + documentDEK?: Uint8Array | null + /** Workspace ID for E2EE key management */ + workspaceId?: string | null } const pluginModuleCache = new Map>() @@ -128,6 +139,42 @@ export function extractDocIdFromRoute(route?: string | null) { return null } +/** + * Lazy DEK getter that caches the result after first fetch. + */ +function createLazyDEKGetter( + docId: string | null, + workspaceId: string | null, + initialDEK: Uint8Array | null +): () => Promise { + let cachedDEK: Uint8Array | null = initialDEK + let fetched = initialDEK !== null + + return async () => { + if (fetched) return cachedDEK + + if (!docId || !workspaceId) { + fetched = true + return null + } + + try { + const service = getKeyVaultService() + if (!service.isInitialized || !service.isUnlocked) { + fetched = true + return null + } + + cachedDEK = await fetchDocumentDek(docId, workspaceId) + fetched = true + return cachedDEK + } catch { + fetched = true + return null + } + } +} + export async function createPluginHost(manifest: ManifestItem, ctx: PluginHostContext) { const fallbackRoute = ctx.route ?? getWindowRoute() const resolvedDocId = ctx.docId ?? (fallbackRoute ? extractDocIdFromRoute(fallbackRoute) : null) @@ -163,24 +210,66 @@ export async function createPluginHost(manifest: ManifestItem, ctx: PluginHostCo } fallbackNavigate(target) } + + // Create lazy DEK getter that fetches on first use + const getDEK = createLazyDEKGetter( + resolvedDocId, + ctx.workspaceId ?? null, + ctx.documentDEK ?? null + ) + const host = { exec: async (action: string, args: any = {}) => { + // Get DEK lazily for E2EE operations + const documentDEK = await getDEK() + const hostHandled = await executeHostAction(action, args, { pluginId: manifest.id, docId: resolvedDocId, token: resolvedToken, navigate: performNavigate, + documentDEK, }) if (hostHandled) return hostHandled - const json = await apiExecPluginAction( - manifest.id, - action, - args, - resolvedToken ?? undefined, - ) - if (json?.effects) applyEffects(json.effects, performNavigate) - return json + // Client-side WASM execution (E2EE compatible) + + // Check if plugin has backend WASM + if (!hasPluginWasm(manifest)) { + return { + ok: false, + data: null, + effects: [], + error: { code: 'NO_BACKEND', message: `Plugin ${manifest.id} has no backend WASM` }, + } + } + + // Load WASM if not already loaded + const runtime = getWasmRuntime() + if (!runtime.isLoaded(manifest.id)) { + const wasmUrl = await loadPluginWasm(manifest.id, manifest) + await runtime.loadPlugin(manifest.id, wasmUrl) + } + + // Execute action in client-side WASM + const result = await runtime.execute(manifest.id, action, args, { + docId: resolvedDocId, + userId: null, + }) + + // Handle effects (with E2EE encryption) + if (result.effects && result.effects.length > 0) { + await handleEffects(result.effects, { + pluginId: manifest.id, + docId: resolvedDocId, + workspaceId: ctx.workspaceId ?? null, + documentDEK, + token: resolvedToken, + navigate: performNavigate, + }) + } + + return result }, navigate: performNavigate, toast: (level: string, message: string) => { @@ -191,31 +280,28 @@ export async function createPluginHost(manifest: ManifestItem, ctx: PluginHostCo origin: apiOrigin, api: { me: () => fetchMe(), - renderMarkdown: (text: string, options: any) => - renderMarkdown({ requestBody: { text, options } }), - renderMarkdownMany: (items: Array<{ text: string; options: any }>) => - renderMarkdownMany({ requestBody: { items } }), + renderMarkdown: (text: string, options?: any) => + renderMarkdown(text, options), + renderMarkdownMany: (items: Array<{ text: string; options?: any }>) => + renderMarkdownMany(items), }, ui: { hydrateAttachments: async (root: Element) => { if (!root) return - const wc = await import('@/entities/document/wc') try { - wc.upgradeAttachments(root) + documentWc.upgradeAttachments(root, ctx.docId ?? undefined) } catch {} }, hydrateWikiLinks: async (root: Element) => { if (!root) return - const wc = await import('@/entities/document/wc') try { - wc.upgradeWikiLinks(root) + documentWc.upgradeWikiLinks(root) } catch {} }, hydrateAll: async (root: Element) => { if (!root) return - const wc = await import('@/entities/document/wc') try { - wc.upgradeAll(root) + documentWc.upgradeAll(root, ctx.docId ?? undefined) } catch {} }, setDocumentTitle: (title?: string | null) => { @@ -413,32 +499,6 @@ function extractQueryParam(route: string, key: string) { } } -function applyEffects(effects: any[], navigate?: (to: string) => void) { - for (const effect of effects || []) { - if (!effect || typeof effect !== 'object') continue - if (effect.type === 'navigate' && typeof effect.to === 'string') { - if (navigate) { - navigate(effect.to) - } else { - try { - window.history.pushState({}, '', effect.to) - window.dispatchEvent(new PopStateEvent('popstate')) - } catch { - window.location.href = effect.to - } - } - } - if (effect.type === 'showToast' && typeof effect.message === 'string') { - const level = effect.level || 'info' - if (level === 'success') toast.success(effect.message) - else if (level === 'warn' || level === 'warning') - toast.warning?.(effect.message) || toast(effect.message) - else if (level === 'error') toast.error(effect.message) - else toast(effect.message) - } - } -} - async function loadHostYjs() { if (!sharedYjsImport) { sharedYjsImport = import('yjs') @@ -458,6 +518,8 @@ type HostActionContext = { docId: string | null token: string | null navigate: (to: string) => void + /** Document DEK for E2EE encryption/decryption */ + documentDEK: Uint8Array | null } async function executeHostAction( @@ -485,24 +547,45 @@ async function executeHostAction( const docId = ensureDocId(args?.docId) const kind = args?.kind if (typeof kind !== 'string' || !kind) throw fail('BAD_REQUEST', 'kind required') + if (!ctx.documentDEK) throw fail('E2EE_REQUIRED', 'DEK not available for records.list') const token = (args?.token ?? ctx.token) || undefined const response = await apiListPluginRecords(ctx.pluginId, docId, kind, token) + + // E2EE decryption + if (response?.items) { + const decryptedItems = await decryptRecords(response.items, ctx.documentDEK, ctx.pluginId) + return ok({ ...response, items: decryptedItems }) + } return ok(response) } case 'host.kv.get': { const docId = ensureDocId(args?.docId) const key = args?.key if (typeof key !== 'string' || !key) throw fail('BAD_REQUEST', 'key required') + if (!ctx.documentDEK) throw fail('E2EE_REQUIRED', 'DEK not available for kv.get') const token = (args?.token ?? ctx.token) || undefined const response = await apiGetPluginKv(ctx.pluginId, docId, key, token) + + // E2EE decryption + if (response?.value !== undefined) { + const decryptedValue = await decryptKV(response.value, ctx.documentDEK, ctx.pluginId) + return ok({ ...response, value: decryptedValue }) + } return ok(response) } case 'host.kv.put': { const docId = ensureDocId(args?.docId) const key = args?.key if (typeof key !== 'string' || !key) throw fail('BAD_REQUEST', 'key required') - const value = args?.value ?? null + let value = args?.value ?? null const token = (args?.token ?? ctx.token) || undefined + + // E2EE encryption (required) + if (value !== null) { + if (!ctx.documentDEK) throw fail('E2EE_REQUIRED', 'DEK not available for kv.put') + value = await encryptKV(value, ctx.documentDEK, ctx.pluginId) + } + const response = await apiPutPluginKv(ctx.pluginId, docId, key, value, token) return ok(response) } @@ -511,7 +594,8 @@ async function executeHostAction( const file: File | undefined = args?.file if (!(file instanceof File)) throw fail('BAD_REQUEST', 'file required') const response = await uploadFile({ - formData: { document_id: docId, file } as any, + docId, + formData: { file }, }) return ok(response) } diff --git a/app/src/features/plugins/lib/sandbox.ts b/app/src/features/plugins/lib/sandbox.ts new file mode 100644 index 00000000..c82cdcbd --- /dev/null +++ b/app/src/features/plugins/lib/sandbox.ts @@ -0,0 +1,122 @@ +/** + * Plugin Sandbox Configuration + * + * Provides CSP (Content Security Policy) settings for plugin iframes + * to prevent malicious plugins from exfiltrating decrypted data. + * + * ## Current Security Model + * + * Plugin frontends currently run in the main browser context (not in iframes). + * Security is enforced through: + * + * 1. **E2EE Encryption**: All plugin KV/Records data is transparently + * encrypted/decrypted using document-specific DEKs. Plugins never see + * encryption keys directly. + * + * 2. **Client-side WASM**: Plugin backend logic runs in the browser via + * Extism WASM runtime. No plaintext is sent to the server. + * + * 3. **Host API Gateway**: Plugins interact with the system only through + * the `host` object. Direct API calls are not exposed. + * + * 4. **Effect Processing**: WASM outputs effects that are processed by + * the host, with E2EE encryption applied before server communication. + * + * ## Future Iframe Isolation + * + * The CSP configurations below are prepared for future iframe-based + * plugin isolation. When implemented: + * - Plugins will run in sandboxed iframes + * - `connect-src: 'none'` will block all network requests + * - Communication will be via postMessage only + */ + +/** CSP directives for plugin sandboxes */ +export const PLUGIN_SANDBOX_CSP = { + 'default-src': ["'self'"], + 'connect-src': ["'none'"], // Block all network requests + 'script-src': ["'self'"], + 'style-src': ["'self'", "'unsafe-inline'"], + 'img-src': ["'self'", 'data:', 'blob:'], + 'font-src': ["'self'"], + 'object-src': ["'none'"], + 'base-uri': ["'self'"], + 'form-action': ["'none'"], + 'frame-ancestors': ["'none'"], +} as const + +/** + * Build CSP header string from directives. + */ +export function buildCspString( + directives: Record = PLUGIN_SANDBOX_CSP +): string { + return Object.entries(directives) + .map(([key, values]) => `${key} ${values.join(' ')}`) + .join('; ') +} + +/** + * Create a sandboxed iframe for plugin execution. + * + * The iframe has restricted permissions: + * - allow-scripts: Allow JavaScript execution + * - NO allow-same-origin: Prevents access to parent document + * - NO allow-forms: Prevents form submission + * - NO allow-popups: Prevents opening new windows + * + * @param pluginId - Plugin identifier for debugging + * @returns Configured iframe element + */ +export function createPluginIframe(pluginId: string): HTMLIFrameElement { + const iframe = document.createElement('iframe') + + // Set sandbox attribute with minimal permissions + iframe.sandbox.add('allow-scripts') + // Intentionally NOT adding: + // - allow-same-origin (would allow access to parent) + // - allow-forms (would allow form submission) + // - allow-popups (would allow opening windows) + // - allow-top-navigation (would allow navigation) + + // Set CSP via attribute (some browsers support this) + iframe.setAttribute('csp', buildCspString()) + + // Set referrer policy + iframe.referrerPolicy = 'no-referrer' + + // Add plugin ID for debugging + iframe.dataset.pluginId = pluginId + + // Style for visibility + iframe.style.display = 'none' + + return iframe +} + +/** + * Sandbox attributes for plugin iframes. + * + * These are the recommended attributes for maximum security + * while still allowing plugin functionality. + */ +export const SANDBOX_ATTRIBUTES = [ + 'allow-scripts', // Required for JavaScript +] as const + +/** + * Forbidden sandbox attributes that should never be added. + */ +export const FORBIDDEN_SANDBOX_ATTRIBUTES = [ + 'allow-same-origin', // Would bypass sandbox + 'allow-top-navigation', // Could navigate away + 'allow-top-navigation-by-user-activation', + 'allow-forms', // Could submit data externally + 'allow-popups', // Could open external windows + 'allow-popups-to-escape-sandbox', + 'allow-modals', // Could block user + 'allow-orientation-lock', + 'allow-pointer-lock', + 'allow-presentation', + 'allow-downloads', // Could trigger downloads +] as const diff --git a/app/src/features/plugins/lib/wasm-loader.ts b/app/src/features/plugins/lib/wasm-loader.ts new file mode 100644 index 00000000..964e8a69 --- /dev/null +++ b/app/src/features/plugins/lib/wasm-loader.ts @@ -0,0 +1,139 @@ +/** + * WASM Module Loader + * + * Loads plugin WASM modules from the server with caching support. + */ + +import type { ManifestItem } from '@/shared/api' +import { API_BASE_URL } from '@/shared/lib/config' + +/** Cache name for plugin WASM modules */ +const WASM_CACHE_NAME = 'plugin-wasm-v1' + +/** + * Get the API origin for plugin asset URLs + */ +function getApiOrigin(): string { + try { + if (API_BASE_URL) { + return new URL(API_BASE_URL).origin + } + } catch { + // Fallback to current origin + } + if (typeof window !== 'undefined') { + return window.location.origin + } + return '' +} + +/** + * Get the WASM URL for a plugin. + * + * The backend field contains a pre-signed URL path for the WASM module. + * We prepend the API origin to make it a full URL. + * + * @param _pluginId - Plugin identifier (unused, kept for API compatibility) + * @param manifest - Plugin manifest + * @returns URL to fetch WASM from + */ +export function getPluginWasmUrl(_pluginId: string, manifest: ManifestItem): string | null { + // Extract backend from manifest - backend.wasm contains the signed URL path + const backend = manifest.backend as { wasm?: string } | undefined | null + const wasmPath = backend?.wasm + + if (!wasmPath) { + return null + } + + // If it's already an absolute URL, return as-is + if (wasmPath.startsWith('http://') || wasmPath.startsWith('https://')) { + return wasmPath + } + + // Prepend API origin to the signed path + const apiOrigin = getApiOrigin() + return apiOrigin ? `${apiOrigin}${wasmPath}` : wasmPath +} + +/** + * Load plugin WASM with Service Worker caching. + * + * @param pluginId - Plugin identifier + * @param manifest - Plugin manifest + * @returns URL to the WASM module (from cache or network) + */ +export async function loadPluginWasm( + pluginId: string, + manifest: ManifestItem +): Promise { + const wasmUrl = getPluginWasmUrl(pluginId, manifest) + + if (!wasmUrl) { + throw new Error(`Plugin ${pluginId} does not have a backend WASM module`) + } + + // Use Service Worker cache if available + if (typeof window !== 'undefined' && 'caches' in window) { + try { + const cache = await caches.open(WASM_CACHE_NAME) + const cached = await cache.match(wasmUrl) + + if (cached) { + // Return cached URL + return wasmUrl + } + + // Fetch and cache + const response = await fetch(wasmUrl) + if (response.ok) { + await cache.put(wasmUrl, response.clone()) + } + } catch { + // Cache API not available or failed, continue without caching + } + } + + return wasmUrl +} + +/** + * Check if a plugin has a backend WASM module. + */ +export function hasPluginWasm(manifest: ManifestItem): boolean { + const backend = manifest.backend as { wasm?: string } | undefined | null + return typeof backend?.wasm === 'string' && backend.wasm.length > 0 +} + +/** + * Clear the WASM cache. + */ +export async function clearWasmCache(): Promise { + if (typeof window !== 'undefined' && 'caches' in window) { + try { + await caches.delete(WASM_CACHE_NAME) + } catch { + // Ignore errors + } + } +} + +/** + * Clear cached WASM for a specific plugin. + */ +export async function clearPluginWasmCache( + pluginId: string, + manifest: ManifestItem +): Promise { + const wasmUrl = getPluginWasmUrl(pluginId, manifest) + if (!wasmUrl) return + + if (typeof window !== 'undefined' && 'caches' in window) { + try { + const cache = await caches.open(WASM_CACHE_NAME) + await cache.delete(wasmUrl) + } catch { + // Ignore errors + } + } +} diff --git a/app/src/features/plugins/lib/wasm-runtime.ts b/app/src/features/plugins/lib/wasm-runtime.ts new file mode 100644 index 00000000..871f2e08 --- /dev/null +++ b/app/src/features/plugins/lib/wasm-runtime.ts @@ -0,0 +1,192 @@ +/** + * Client-side WASM Runtime + * + * Executes plugin WASM modules in the browser using Extism JS SDK. + * This replaces server-side WASM execution for E2EE compatibility. + */ + +import { createPlugin, type Plugin } from '@extism/extism' + +/** Plugin execution context */ +export interface ExecContext { + docId: string | null + userId: string | null +} + +/** Plugin execution output */ +export interface ExecOutput { + ok: boolean + data: unknown + effects: Effect[] + error: { code: string; message: string } | null +} + +/** Effect types that plugins can emit */ +export interface Effect { + type: string + [key: string]: unknown +} + +/** + * Client-side WASM runtime for plugin execution. + * + * Manages plugin lifecycle and execution in the browser. + */ +export class ClientWasmRuntime { + private plugins: Map = new Map() + private loadingPromises: Map> = new Map() + + /** + * Load a plugin WASM module. + * + * @param pluginId - Unique plugin identifier + * @param wasmUrl - URL to fetch WASM module from + */ + async loadPlugin(pluginId: string, wasmUrl: string): Promise { + // Return existing loading promise if already loading + const existing = this.loadingPromises.get(pluginId) + if (existing) { + return existing + } + + // Return immediately if already loaded + if (this.plugins.has(pluginId)) { + return + } + + const loadPromise = (async () => { + try { + const response = await fetch(wasmUrl) + if (!response.ok) { + throw new Error(`Failed to fetch WASM: ${response.status} ${response.statusText}`) + } + + const wasmBytes = await response.arrayBuffer() + + // Extism requires a manifest with wasm key + const manifest = { + wasm: [{ data: new Uint8Array(wasmBytes) }], + } + + const plugin = await createPlugin(manifest, { + useWasi: true, + // runInWorker requires crossOriginIsolated which needs COOP/COEP headers + runInWorker: false, + }) + + this.plugins.set(pluginId, plugin) + } finally { + this.loadingPromises.delete(pluginId) + } + })() + + this.loadingPromises.set(pluginId, loadPromise) + return loadPromise + } + + /** + * Check if a plugin is loaded. + */ + isLoaded(pluginId: string): boolean { + return this.plugins.has(pluginId) + } + + /** + * Execute a plugin action. + * + * @param pluginId - Plugin to execute + * @param action - Action name + * @param payload - Action payload + * @param ctx - Execution context + * @returns Execution result with effects + */ + async execute( + pluginId: string, + action: string, + payload: unknown, + ctx: ExecContext + ): Promise { + const plugin = this.plugins.get(pluginId) + if (!plugin) { + return { + ok: false, + data: null, + effects: [], + error: { code: 'PLUGIN_NOT_LOADED', message: `Plugin ${pluginId} is not loaded` }, + } + } + + try { + const input = JSON.stringify({ + action, + payload: payload ?? {}, + ctx: { + doc_id: ctx.docId, + user_id: ctx.userId, + }, + }) + + const output = await plugin.call('exec', input) + if (!output) { + return { + ok: false, + data: null, + effects: [], + error: { code: 'WASM_NO_OUTPUT', message: 'Plugin returned no output' }, + } + } + const result = output.json() + + return { + ok: result.ok ?? true, + data: result.data ?? null, + effects: result.effects ?? [], + error: result.error ?? null, + } + } catch (err) { + const message = err instanceof Error ? err.message : String(err) + return { + ok: false, + data: null, + effects: [], + error: { code: 'WASM_EXECUTION_ERROR', message }, + } + } + } + + /** + * Unload a plugin and free resources. + */ + async unload(pluginId: string): Promise { + const plugin = this.plugins.get(pluginId) + if (plugin) { + try { + await plugin.close() + } catch { + // Ignore close errors + } + this.plugins.delete(pluginId) + } + } + + /** + * Unload all plugins. + */ + async unloadAll(): Promise { + const pluginIds = Array.from(this.plugins.keys()) + await Promise.all(pluginIds.map((id) => this.unload(id))) + } +} + +/** Singleton instance for shared use */ +let sharedRuntime: ClientWasmRuntime | null = null + +/** + * Get the shared WASM runtime instance. + */ +export function getWasmRuntime(): ClientWasmRuntime { + if (!sharedRuntime) { + sharedRuntime = new ClientWasmRuntime() + } + return sharedRuntime +} diff --git a/app/src/features/plugins/model/usePluginExecutor.ts b/app/src/features/plugins/model/usePluginExecutor.ts index 872c8f6b..02fb2cee 100644 --- a/app/src/features/plugins/model/usePluginExecutor.ts +++ b/app/src/features/plugins/model/usePluginExecutor.ts @@ -1 +1 @@ -export { usePluginExecutor } from '@/entities/plugin/hooks/usePluginExecutor' +export { usePluginExecutor } from '@/features/plugins/hooks/usePluginExecutor' diff --git a/app/src/features/plugins/ui/PluginDocumentMount.tsx b/app/src/features/plugins/ui/PluginDocumentMount.tsx index 4afd96a2..de9e83c8 100644 --- a/app/src/features/plugins/ui/PluginDocumentMount.tsx +++ b/app/src/features/plugins/ui/PluginDocumentMount.tsx @@ -88,41 +88,44 @@ export function PluginDocumentMount({ match, container, mode, - variant === 'preview' - ? { - tweakHost: (host) => { - if (!host || typeof host !== 'object') return - if (!host.ui || typeof host.ui !== 'object') host.ui = {} - ;(host.ui as any).mountSplitEditor = (target: Element, options?: any) => { - if (typeof window === 'undefined') return undefined - if (!target) return undefined - const el = target as HTMLElement - const previewDelegate = options?.preview?.delegate - const onDocumentReady = options?.document?.onReady - const nextDocId = options?.docId ?? host?.context?.docId ?? null - const nextToken = options?.token ?? host?.context?.token ?? null - if (typeof nextDocId === 'string' && nextDocId.trim()) { - try { - window.dispatchEvent( - new CustomEvent<{ docId: string }>(PLUGIN_USES_SPLIT_EDITOR_EVENT, { - detail: { docId: nextDocId.trim() }, - }), - ) - } catch { - /* noop */ + { + workspaceId: activeWorkspaceId ?? null, + ...(variant === 'preview' + ? { + tweakHost: (host) => { + if (!host || typeof host !== 'object') return + if (!host.ui || typeof host.ui !== 'object') host.ui = {} + ;(host.ui as any).mountSplitEditor = (target: Element, options?: any) => { + if (typeof window === 'undefined') return undefined + if (!target) return undefined + const el = target as HTMLElement + const previewDelegate = options?.preview?.delegate + const onDocumentReady = options?.document?.onReady + const nextDocId = options?.docId ?? host?.context?.docId ?? null + const nextToken = options?.token ?? host?.context?.token ?? null + if (typeof nextDocId === 'string' && nextDocId.trim()) { + try { + window.dispatchEvent( + new CustomEvent<{ docId: string }>(PLUGIN_USES_SPLIT_EDITOR_EVENT, { + detail: { docId: nextDocId.trim() }, + }), + ) + } catch { + /* noop */ + } } + return mountSplitEditorPreviewStage(el, { + docId: nextDocId, + token: nextToken, + host, + previewDelegate, + onDocumentReady, + }) } - return mountSplitEditorPreviewStage(el, { - docId: nextDocId, - token: nextToken, - host, - previewDelegate, - onDocumentReady, - }) - } - }, - } - : {}, + }, + } + : {}), + }, )) as any if (cancelled) { diff --git a/app/src/features/plugins/ui/SplitEditorHost.tsx b/app/src/features/plugins/ui/SplitEditorHost.tsx index 848fd7b0..d355ba46 100644 --- a/app/src/features/plugins/ui/SplitEditorHost.tsx +++ b/app/src/features/plugins/ui/SplitEditorHost.tsx @@ -282,11 +282,10 @@ function useDocContent(doc: any) { } function PluginSplitEditorStageInner({ docId, token, host, previewDelegate, onDocumentReady }: StageInnerProps) { - const { user } = useAuthContext() + const { user, activeWorkspaceId } = useAuthContext() const { status, doc, awareness, isReadOnly, error } = useCollaborativeDocument(docId, token ?? undefined, { contributeToRealtimeContext: false, useUrlShareTokenFallback: false, - disablePersistence: true, }) const [anonIdentity] = useState(() => { if (user) return null @@ -358,6 +357,7 @@ function PluginSplitEditorStageInner({ docId, token, host, previewDelegate, onDo userId={user?.id || anonIdentity?.id} userName={user?.name || anonIdentity?.name} documentId={docId} + workspaceId={activeWorkspaceId} readOnly={isReadOnly} extraRight={undefined} renderPreview={renderPreview} @@ -371,7 +371,6 @@ function PluginSplitPreviewStageInner({ docId, token, host, previewDelegate, onD const { status, doc, error } = useCollaborativeDocument(docId, token ?? undefined, { contributeToRealtimeContext: false, useUrlShareTokenFallback: false, - disablePersistence: true, }) const shouldShowOverlay = Boolean(error) || !doc diff --git a/app/src/features/realtime/context/index.ts b/app/src/features/realtime/context/index.ts new file mode 100644 index 00000000..4ff26957 --- /dev/null +++ b/app/src/features/realtime/context/index.ts @@ -0,0 +1,7 @@ +/** + * Realtime Context + * + * React context for realtime connection state and document metadata. + */ + +export { RealtimeProvider, useRealtime } from './realtime-context' diff --git a/app/src/features/realtime/context/realtime-context.tsx b/app/src/features/realtime/context/realtime-context.tsx new file mode 100644 index 00000000..0fd6ab22 --- /dev/null +++ b/app/src/features/realtime/context/realtime-context.tsx @@ -0,0 +1,107 @@ +import React, { createContext, useContext, useMemo, useState, useCallback } from 'react' + +import type { DocumentHeaderAction } from '@/shared/types/document' + +type RealtimeState = { + connected: boolean + userCount: number + onlineUsers: Array<{ id: string; name: string; color?: string; clientId?: number }> + documentTitle?: string + documentPath?: string + documentId?: string + documentPluginId?: string + showEditorFeatures: boolean + documentStatus?: string + documentBadge?: string + documentActions: DocumentHeaderAction[] + setConnected: (v: boolean) => void + setUserCount: (n: number) => void + setOnlineUsers: (list: Array<{ id: string; name: string; color?: string; clientId?: number }>) => void + setDocumentTitle: (t?: string | null) => void + setDocumentPath: (p?: string | null) => void + setDocumentId: (id?: string | null) => void + setDocumentPluginId: (id?: string | null) => void + setShowEditorFeatures: (v: boolean) => void + setDocumentStatus: (status?: string | null) => void + setDocumentBadge: (badge?: string | null) => void + setDocumentActions: (actions: DocumentHeaderAction[]) => void +} + +const Ctx = createContext(null) + +export function RealtimeProvider({ children }: { children: React.ReactNode }) { + const [connected, setConnected] = useState(false) + const [userCount, setUserCount] = useState(0) + const [onlineUsers, setOnlineUsers] = useState>([]) + const [documentTitle, setDocumentTitleState] = useState(undefined) + const [documentPath, setDocumentPathState] = useState(undefined) + const [documentId, setDocumentIdState] = useState(undefined) + const [documentPluginId, setDocumentPluginIdState] = useState(undefined) + const [showEditorFeatures, setShowEditorFeaturesState] = useState(false) + const [documentStatus, setDocumentStatusState] = useState(undefined) + const [documentBadge, setDocumentBadgeState] = useState(undefined) + const [documentActions, setDocumentActionsState] = useState([]) + const setDocumentTitle = useCallback((title?: string | null) => setDocumentTitleState(title ?? undefined), []) + const setDocumentPath = useCallback((pathValue?: string | null) => setDocumentPathState(pathValue ?? undefined), []) + const setDocumentId = useCallback((identifier?: string | null) => setDocumentIdState(identifier ?? undefined), []) + const setDocumentPluginId = useCallback((identifier?: string | null) => setDocumentPluginIdState(identifier ?? undefined), []) + const setShowEditorFeatures = useCallback((value: boolean) => setShowEditorFeaturesState(value), []) + const setDocumentStatus = useCallback((status?: string | null) => setDocumentStatusState(status ?? undefined), []) + const setDocumentBadge = useCallback((badge?: string | null) => setDocumentBadgeState(badge ?? undefined), []) + const setDocumentActions = useCallback((actions: DocumentHeaderAction[]) => setDocumentActionsState(actions), []) + + const value = useMemo(() => ({ + connected, + userCount, + onlineUsers, + documentTitle, + documentPath, + documentId, + documentPluginId, + showEditorFeatures, + documentStatus, + documentBadge, + documentActions, + setConnected, + setUserCount, + setOnlineUsers, + setDocumentTitle, + setDocumentPath, + setDocumentId, + setDocumentPluginId, + setShowEditorFeatures, + setDocumentStatus, + setDocumentBadge, + setDocumentActions, + }), [ + connected, + userCount, + onlineUsers, + documentTitle, + documentPath, + documentId, + documentPluginId, + showEditorFeatures, + documentStatus, + documentBadge, + documentActions, + setConnected, + setUserCount, + setOnlineUsers, + setDocumentTitle, + setDocumentPath, + setDocumentId, + setDocumentPluginId, + setShowEditorFeatures, + setDocumentStatus, + setDocumentBadge, + setDocumentActions, + ]) + return {children} +} + +export function useRealtime() { + const v = useContext(Ctx) + if (!v) throw new Error('useRealtime must be used within RealtimeProvider') + return v +} diff --git a/app/src/features/realtime/index.ts b/app/src/features/realtime/index.ts new file mode 100644 index 00000000..cd07b0f2 --- /dev/null +++ b/app/src/features/realtime/index.ts @@ -0,0 +1,8 @@ +/** + * Realtime Feature + * + * Provides encrypted Yjs synchronization over WebSocket. + */ + +export * from './lib' +export * from './context' diff --git a/app/src/features/realtime/lib/ephemeral.ts b/app/src/features/realtime/lib/ephemeral.ts new file mode 100644 index 00000000..18fdd304 --- /dev/null +++ b/app/src/features/realtime/lib/ephemeral.ts @@ -0,0 +1,542 @@ +/** + * E2EE Ephemeral Message Handlers + * + * Implements ephemeral message protocol for Yjs Awareness. + * Uses DEK directly for encryption with 4-step session handshake. + */ + +import { + encrypt, + decrypt, + sign, + verify, + toBase64, + fromBase64, + fromBase64Json, + canonicalizeAndToBase64, + getSodium, + SIGNATURE_DOMAINS, + type Ed25519KeyPair, + type SigningMessage, +} from '@/shared/lib/crypto' + +// ============================================================================ +// Constants +// ============================================================================ + +/** Session ID length in bytes */ +export const SESSION_ID_LENGTH = 24 + +/** Counter length in bytes */ +export const COUNTER_LENGTH = 4 + +/** Max value for initial random counter */ +const MAX_INITIAL_COUNTER = 2147483647 // Math.floor(0xffffffff / 2) + +// ============================================================================ +// Message Types +// ============================================================================ + +/** + * Ephemeral message types for session handshake. + */ +export const messageTypes = { + /** New client announces presence */ + initialize: 1, + /** Send proof and request proof from remote */ + proofAndRequestProof: 2, + /** Send proof only */ + proof: 3, + /** Actual awareness data */ + message: 4, +} as const + +export type MessageType = keyof typeof messageTypes + +// ============================================================================ +// Types +// ============================================================================ + +/** Validated sessions from other clients */ +export type ValidSessions = { + [authorPublicKey: string]: { + sessionId: string + sessionCounter: number + } +} + +/** Ephemeral session state */ +export interface EphemeralSession { + /** Own session ID (24 bytes, Base64) */ + id: string + /** Own counter (incremented on each send) */ + counter: number + /** Validated sessions from other clients */ + validSessions: ValidSessions +} + +/** Ephemeral message public data (not encrypted, used as AAD) */ +export interface EphemeralPublicData { + docId: string + pubKey: string // Ed25519 signing public key (Base64) +} + +/** Wire format for ephemeral messages */ +export interface EphemeralMessage { + ciphertext: string // Base64 + nonce: string // Base64 + signature: string // Base64 Ed25519 + publicData: string // Base64-encoded canonicalized JSON +} + +/** Result of verifying and decrypting an ephemeral message */ +export interface VerifyResult { + /** Updated valid sessions */ + validSessions?: ValidSessions + /** Proof to send back (if requested) */ + proof?: Uint8Array + /** Whether to request proof from remote */ + requestProof?: boolean + /** Decrypted content (only for message type) */ + content?: Uint8Array + /** Error if verification failed */ + error?: Error +} + +// ============================================================================ +// Session Management +// ============================================================================ + +/** + * Generate a random session ID. + * + * @returns Base64-encoded session ID (24 bytes) + */ +export async function generateSessionId(): Promise { + const bytes = new Uint8Array(SESSION_ID_LENGTH) + crypto.getRandomValues(bytes) + return toBase64(bytes) +} + +/** + * Generate a random initial counter. + * + * Using a large random initial value hides metadata about when the session started. + * + * @returns Random counter between 0 and MAX_INITIAL_COUNTER + */ +function generateInitialCounter(): number { + const array = new Uint32Array(1) + crypto.getRandomValues(array) + return array[0] % MAX_INITIAL_COUNTER +} + +/** + * Create a new ephemeral session. + * + * @returns New session with random ID and counter + */ +export async function createEphemeralSession(): Promise { + return { + id: await generateSessionId(), + counter: generateInitialCounter(), + validSessions: {}, + } +} + +// ============================================================================ +// Message Creation +// ============================================================================ + +/** + * Create an ephemeral message. + * + * Message structure (before encryption): + * [type (1 byte)] + [sessionId (24 bytes)] + [counter (4 bytes)] + [content] + * + * @param content - Message content (awareness update or proof) + * @param messageType - Type of message + * @param publicData - Public data (docId, pubKey) used as AAD + * @param dek - Document encryption key + * @param signatureKeyPair - Ed25519 signing key pair + * @param session - Current session state + * @returns Encrypted and signed ephemeral message + */ +export async function createEphemeralMessage( + content: Uint8Array, + messageType: MessageType, + publicData: EphemeralPublicData, + dek: Uint8Array, + signatureKeyPair: Ed25519KeyPair, + session: EphemeralSession +): Promise<{ message: EphemeralMessage; updatedSession: EphemeralSession }> { + // Increment counter for this message + const newCounter = session.counter + 1 + const updatedSession = { ...session, counter: newCounter } + + // Build prefixed content: [type] + [sessionId] + [counter] + [content] + const sessionIdBytes = await fromBase64(session.id) + const prefixedContent = prefixWithSessionInfo( + content, + messageTypes[messageType], + sessionIdBytes, + newCounter + ) + + // Encrypt with DEK + const { ciphertext, nonce } = await encrypt(dek, prefixedContent) + + // Encode to Base64 + const ciphertextBase64 = await toBase64(ciphertext) + const nonceBase64 = await toBase64(nonce) + const publicDataBase64 = await canonicalizeAndToBase64(publicData) + + // Build signing message + const signingMessage: SigningMessage = { + ciphertext: ciphertextBase64, + nonce: nonceBase64, + publicData: publicDataBase64, + } + + // Sign the message + const signature = await sign(signatureKeyPair.privateKey, SIGNATURE_DOMAINS.EPHEMERAL, signingMessage) + const signatureBase64 = await toBase64(signature) + + const message: EphemeralMessage = { + ciphertext: ciphertextBase64, + nonce: nonceBase64, + signature: signatureBase64, + publicData: publicDataBase64, + } + + return { message, updatedSession } +} + +/** + * Create an initialize message to announce presence. + */ +export async function createInitializeMessage( + publicData: EphemeralPublicData, + dek: Uint8Array, + signatureKeyPair: Ed25519KeyPair, + session: EphemeralSession +): Promise<{ message: EphemeralMessage; updatedSession: EphemeralSession }> { + // Initialize message has empty content + return createEphemeralMessage( + new Uint8Array(0), + 'initialize', + publicData, + dek, + signatureKeyPair, + session + ) +} + +// ============================================================================ +// Message Verification and Decryption +// ============================================================================ + +/** + * Verify and decrypt an ephemeral message. + * + * @param message - Received ephemeral message + * @param dek - Document encryption key + * @param currentDocId - Current document ID (for validation) + * @param session - Current session state + * @param signatureKeyPair - Own Ed25519 key pair (for creating proofs) + * @returns Verification result with updated sessions and/or content + */ +export async function verifyAndDecryptEphemeralMessage( + message: EphemeralMessage, + dek: Uint8Array, + currentDocId: string, + session: EphemeralSession, + signatureKeyPair: Ed25519KeyPair +): Promise { + try { + // Decode publicData from Base64 string to object + const publicData = await fromBase64Json(message.publicData) + + // Validate document ID + if (publicData.docId !== currentDocId) { + return { validSessions: session.validSessions } + } + + const senderPublicKey = await fromBase64(publicData.pubKey) + + // Build signing message for verification + // Use message.publicData directly since it's already Base64-encoded + const signingMessage: SigningMessage = { + ciphertext: message.ciphertext, + nonce: message.nonce, + publicData: message.publicData, + } + + // Verify signature + const signature = await fromBase64(message.signature) + const isValid = await verify(senderPublicKey, signature, SIGNATURE_DOMAINS.EPHEMERAL, signingMessage) + if (!isValid) { + return { error: new Error('EPHEMERAL_ERROR_308') } // Invalid signature + } + + // Decrypt content + const ciphertext = await fromBase64(message.ciphertext) + const nonce = await fromBase64(message.nonce) + let decrypted: Uint8Array + try { + decrypted = await decrypt(dek, ciphertext, nonce) + } catch { + return { error: new Error('EPHEMERAL_ERROR_301') } // Decryption failed + } + + // Parse prefix: [type (1)] + [sessionId (24)] + [counter (4)] + [content] + const { type, sessionId, counter, content } = await parsePrefix(decrypted) + const senderPublicKeyBase64 = publicData.pubKey + + // Handle by message type + switch (type) { + case messageTypes.initialize: + // Create proof and request proof back + const initProof = await createEphemeralSessionProof( + sessionId, + session.id, + signatureKeyPair + ) + return { + proof: initProof, + requestProof: true, + validSessions: session.validSessions, + } + + case messageTypes.proofAndRequestProof: + case messageTypes.proof: { + // Verify the proof + const isProofValid = await verifyEphemeralSessionProof( + content, + session.id, + sessionId, + senderPublicKey + ) + + if (isProofValid) { + // Session established + const newValidSessions: ValidSessions = { + ...session.validSessions, + [senderPublicKeyBase64]: { + sessionId, + sessionCounter: counter, + }, + } + + // Create response proof if requested + let responseProof: Uint8Array | undefined + if (type === messageTypes.proofAndRequestProof) { + responseProof = await createEphemeralSessionProof( + sessionId, + session.id, + signatureKeyPair + ) + } + + return { + validSessions: newValidSessions, + proof: responseProof, + requestProof: false, + } + } else { + return { validSessions: session.validSessions } + } + } + + case messageTypes.message: { + const existingSession = session.validSessions[senderPublicKeyBase64] + + // Check if session is valid + if (!existingSession || existingSession.sessionId !== sessionId) { + // Unknown session - treat as initialize + const msgProof = await createEphemeralSessionProof( + sessionId, + session.id, + signatureKeyPair + ) + return { + proof: msgProof, + requestProof: true, + validSessions: session.validSessions, + error: new Error('EPHEMERAL_ERROR_302'), // Session not established + } + } + + // Replay attack check + if (existingSession.sessionCounter >= counter) { + return { error: new Error('EPHEMERAL_ERROR_303') } // Replay attack + } + + // Update counter and return content + const newValidSessions: ValidSessions = { + ...session.validSessions, + [senderPublicKeyBase64]: { + sessionId, + sessionCounter: counter, + }, + } + + return { + content, + validSessions: newValidSessions, + } + } + + default: + return { error: new Error('EPHEMERAL_ERROR_305') } // Unknown message type + } + } catch (err) { + console.error('[ephemeral] Error processing message:', err) + return { error: new Error('EPHEMERAL_ERROR_307') } // General error + } +} + +// ============================================================================ +// Session Proof +// ============================================================================ + +/** + * Create a session proof. + * + * The proof is a signature over: + * { remoteClientSessionId, currentClientSessionId } + * + * @param remoteSessionId - Remote client's session ID (Base64) + * @param currentSessionId - Own session ID (Base64) + * @param signatureKeyPair - Own Ed25519 key pair + * @returns Proof as signature bytes + */ +export async function createEphemeralSessionProof( + remoteSessionId: string, + currentSessionId: string, + signatureKeyPair: Ed25519KeyPair +): Promise { + const sodium = await getSodium() + + // Create proof data as JSON + const proofData = JSON.stringify({ + currentClientSessionId: currentSessionId, + remoteClientSessionId: remoteSessionId, + }) + + // Domain + proof data + const domain = 'refmd_ephemeral_session_proof' + const encoder = new TextEncoder() + const domainBytes = encoder.encode(domain) + const dataBytes = encoder.encode(proofData) + + const messageBytes = new Uint8Array(domainBytes.length + dataBytes.length) + messageBytes.set(domainBytes, 0) + messageBytes.set(dataBytes, domainBytes.length) + + return sodium.crypto_sign_detached(messageBytes, signatureKeyPair.privateKey) +} + +/** + * Verify a session proof. + * + * @param proof - Proof bytes (signature) + * @param expectedCurrentSessionId - Expected value of currentClientSessionId in proof + * @param remoteSessionId - Remote client's session ID + * @param remotePublicKey - Remote client's Ed25519 public key + * @returns True if proof is valid + */ +export async function verifyEphemeralSessionProof( + proof: Uint8Array, + expectedCurrentSessionId: string, + remoteSessionId: string, + remotePublicKey: Uint8Array +): Promise { + const sodium = await getSodium() + + // Create expected proof data (note: currentClient is the remote, remoteClient is us) + const proofData = JSON.stringify({ + currentClientSessionId: remoteSessionId, + remoteClientSessionId: expectedCurrentSessionId, + }) + + // Domain + proof data + const domain = 'refmd_ephemeral_session_proof' + const encoder = new TextEncoder() + const domainBytes = encoder.encode(domain) + const dataBytes = encoder.encode(proofData) + + const messageBytes = new Uint8Array(domainBytes.length + dataBytes.length) + messageBytes.set(domainBytes, 0) + messageBytes.set(dataBytes, domainBytes.length) + + try { + return sodium.crypto_sign_verify_detached(proof, messageBytes, remotePublicKey) + } catch { + return false + } +} + +// ============================================================================ +// Utility Functions +// ============================================================================ + +/** + * Prefix content with session info. + * + * Format: [type (1)] + [sessionId (24)] + [counter (4)] + [content] + */ +function prefixWithSessionInfo( + content: Uint8Array, + type: number, + sessionId: Uint8Array, + counter: number +): Uint8Array { + const counterBytes = intToUint8Array(counter) + + const result = new Uint8Array(1 + SESSION_ID_LENGTH + COUNTER_LENGTH + content.length) + result[0] = type + result.set(sessionId, 1) + result.set(counterBytes, 1 + SESSION_ID_LENGTH) + result.set(content, 1 + SESSION_ID_LENGTH + COUNTER_LENGTH) + + return result +} + +/** + * Parse prefix from decrypted content. + */ +async function parsePrefix(data: Uint8Array): Promise<{ + type: number + sessionId: string + counter: number + content: Uint8Array +}> { + const type = data[0] + const sessionIdBytes = data.slice(1, 1 + SESSION_ID_LENGTH) + const sessionId = await toBase64(sessionIdBytes) + const counterBytes = data.slice(1 + SESSION_ID_LENGTH, 1 + SESSION_ID_LENGTH + COUNTER_LENGTH) + const counter = uint8ArrayToInt(counterBytes) + const content = data.slice(1 + SESSION_ID_LENGTH + COUNTER_LENGTH) + + return { type, sessionId, counter, content } +} + +/** + * Convert integer to 4-byte Uint8Array (big-endian). + */ +function intToUint8Array(num: number): Uint8Array { + const arr = new Uint8Array(4) + arr[0] = (num >> 24) & 0xff + arr[1] = (num >> 16) & 0xff + arr[2] = (num >> 8) & 0xff + arr[3] = num & 0xff + return arr +} + +/** + * Convert 4-byte Uint8Array to integer (big-endian). + */ +function uint8ArrayToInt(arr: Uint8Array): number { + return (arr[0] << 24) | (arr[1] << 16) | (arr[2] << 8) | arr[3] +} diff --git a/app/src/features/realtime/lib/index.ts b/app/src/features/realtime/lib/index.ts new file mode 100644 index 00000000..42b11de6 --- /dev/null +++ b/app/src/features/realtime/lib/index.ts @@ -0,0 +1,70 @@ +/** + * Realtime Sync Module + * + * Provides encrypted Yjs synchronization over WebSocket. + * Replaces y-websocket with encrypted communication. + */ + +// Main sync functionality +export { + createConnection, + Sync, + type Connection, + type ConnectionOptions, + type ShareModeOptions, + type SyncState, + type SyncStatus, + type StatusEvent, + type StatusEventHandler, +} from './sync' + +// Message creation and verification +export { + createUpdate, + createSnapshot, + verifyAndDecryptUpdate, + verifyAndDecryptSnapshot, + decryptInitSnapshot, + decryptSyncUpdate, + isServerInitMessage, + isServerSyncUpdate, + isRealtimeMessage, + type ServerInitMessage, + type ServerSyncUpdate, + type ServerMessage, + type DecryptedUpdate, + type DecryptedSnapshot, + type DecryptedInit, + type DecryptedSyncUpdate, + type RealtimeMessage, + type UpdatePublicData, + type SnapshotPublicData, + type EphemeralPublicData, +} from './messages' + +// Ephemeral (awareness) - session management with 4-step handshake +export { + // Session management + createEphemeralSession, + generateSessionId, + // Message creation + createEphemeralMessage, + createInitializeMessage, + // Message verification + verifyAndDecryptEphemeralMessage, + // Session proof + createEphemeralSessionProof, + verifyEphemeralSessionProof, + // Message types + messageTypes, + // Constants + SESSION_ID_LENGTH, + COUNTER_LENGTH, + // Types + type MessageType, + type ValidSessions, + type EphemeralSession, + type EphemeralPublicData as EphemeralPublicDataType, + type EphemeralMessage, + type VerifyResult, +} from './ephemeral' diff --git a/app/src/features/realtime/lib/messages.ts b/app/src/features/realtime/lib/messages.ts new file mode 100644 index 00000000..55ac107f --- /dev/null +++ b/app/src/features/realtime/lib/messages.ts @@ -0,0 +1,393 @@ +/** + * E2EE Realtime Message Handlers + * + * Functions for creating and processing encrypted Yjs updates and snapshots. + * Compatible with backend (api/crates/infrastructure/src/documents/realtime/hub.rs) + */ + +import { + encrypt, + decrypt, + sign, + verify, + SIGNATURE_DOMAINS, + canonicalizeAndToBase64, + toBase64, + fromBase64, + fromBase64Json, + type SigningMessage, +} from '@/shared/lib/crypto' +import { + createRealtimeMessage, + type RealtimeMessage, + type UpdatePublicData, + type SnapshotPublicData, + type EphemeralPublicData, +} from '@/shared/types/security' + +// Ephemeral messages are now handled by ephemeral.ts +// See: createEphemeralMessage, verifyAndDecryptEphemeralMessage + +// Re-export types for consumers +export type { RealtimeMessage, UpdatePublicData, SnapshotPublicData, EphemeralPublicData } + +// ============================================ +// Types for server messages +// ============================================ + +/** Server init message (snapshot + seq) */ +export interface ServerInitMessage { + type: 'init' + snapshot: { + data: string // Base64 encrypted Yjs state (or unencrypted if nonce is null) + nonce: string | null // Base64 nonce (null for unencrypted/empty docs) + signature: string | null // Base64 Ed25519 signature (null for unencrypted/empty docs) + seq_at_snapshot: number | null // Sequence number at snapshot (null for empty docs) + } +} + +/** Server sync_update message */ +export interface ServerSyncUpdate { + type: 'sync_update' + update: { + data: string // Base64 encrypted Yjs update + nonce: string // Base64 nonce + signature: string // Base64 Ed25519 signature + public_key: string // Base64 Ed25519 public key + seq: number // Sequence number + } +} + +/** Union type for all server messages */ +export type ServerMessage = ServerInitMessage | ServerSyncUpdate | RealtimeMessage + +// ============================================ +// Create functions (client -> server) +// ============================================ + +/** + * Create an encrypted and signed update message. + * + * @param update - Raw Yjs update bytes + * @param dek - Document encryption key (32 bytes) + * @param signingKeyPair - Ed25519 key pair for signing + * @param publicData - Update metadata + * @returns RealtimeMessage ready to send + */ +export async function createUpdate( + update: Uint8Array, + dek: Uint8Array, + signingKeyPair: { publicKey: Uint8Array; privateKey: Uint8Array }, + publicData: UpdatePublicData +): Promise { + // 1. Encrypt the update + const { ciphertext, nonce } = await encrypt(dek, update) + + // 2. Encode to Base64 + const ciphertextBase64 = await toBase64(ciphertext) + const nonceBase64 = await toBase64(nonce) + const publicDataBase64 = await canonicalizeAndToBase64(publicData) + + // 3. Build signing message + const signingMessage: SigningMessage = { + ciphertext: ciphertextBase64, + nonce: nonceBase64, + publicData: publicDataBase64, + } + + // 4. Sign the message + const signature = await sign(signingKeyPair.privateKey, SIGNATURE_DOMAINS.UPDATE, signingMessage) + const signatureBase64 = await toBase64(signature) + + // 5. Create RealtimeMessage + return createRealtimeMessage('update', ciphertextBase64, nonceBase64, signatureBase64, publicDataBase64) +} + +/** + * Create an encrypted and signed snapshot message. + * + * @param snapshot - Raw Yjs snapshot bytes (from Y.encodeStateAsUpdateV2) + * @param dek - Document encryption key (32 bytes) + * @param signingKeyPair - Ed25519 key pair for signing + * @param publicData - Snapshot metadata + * @returns RealtimeMessage ready to send + */ +export async function createSnapshot( + snapshot: Uint8Array, + dek: Uint8Array, + signingKeyPair: { publicKey: Uint8Array; privateKey: Uint8Array }, + publicData: SnapshotPublicData +): Promise { + // 1. Encrypt the snapshot + const { ciphertext, nonce } = await encrypt(dek, snapshot) + + // 2. Encode to Base64 + const ciphertextBase64 = await toBase64(ciphertext) + const nonceBase64 = await toBase64(nonce) + const publicDataBase64 = await canonicalizeAndToBase64(publicData) + + // 3. Build signing message + const signingMessage: SigningMessage = { + ciphertext: ciphertextBase64, + nonce: nonceBase64, + publicData: publicDataBase64, + } + + // 4. Sign the message + const signature = await sign(signingKeyPair.privateKey, SIGNATURE_DOMAINS.SNAPSHOT, signingMessage) + const signatureBase64 = await toBase64(signature) + + // 5. Create RealtimeMessage + return createRealtimeMessage('snapshot', ciphertextBase64, nonceBase64, signatureBase64, publicDataBase64) +} + +// NOTE: createAwareness has been removed. +// Use createEphemeralMessage from ephemeral.ts for awareness messages. + +// ============================================ +// Verify and Decrypt functions (for relayed messages) +// ============================================ + +/** Result of verifying and decrypting an update */ +export interface DecryptedUpdate { + /** Decrypted Yjs update bytes */ + update: Uint8Array + /** Parsed public data */ + publicData: UpdatePublicData +} + +/** Result of verifying and decrypting a snapshot */ +export interface DecryptedSnapshot { + /** Decrypted Yjs snapshot bytes */ + snapshot: Uint8Array + /** Parsed public data */ + publicData: SnapshotPublicData +} + +/** + * Verify and decrypt an update message from another client. + * + * @param message - Received RealtimeMessage + * @param dek - Document encryption key (32 bytes) + * @returns Decrypted update with public data + * @throws Error if signature verification fails or decryption fails + */ +export async function verifyAndDecryptUpdate( + message: RealtimeMessage, + dek: Uint8Array +): Promise { + if (message.type !== 'update') { + throw new Error(`Expected update message, got ${message.type}`) + } + + // 1. Parse public data to get sender's public key + const publicData = await fromBase64Json(message.publicData) + const senderPublicKey = await fromBase64(publicData.pubKey) + + // 2. Build signing message for verification + const signingMessage: SigningMessage = { + ciphertext: message.ciphertext, + nonce: message.nonce, + publicData: message.publicData, + } + + // 3. Verify signature + const signature = await fromBase64(message.signature) + const isValid = await verify(senderPublicKey, signature, SIGNATURE_DOMAINS.UPDATE, signingMessage) + + if (!isValid) { + throw new Error('Update signature verification failed') + } + + // 4. Decrypt the update + const ciphertext = await fromBase64(message.ciphertext) + const nonce = await fromBase64(message.nonce) + const update = await decrypt(dek, ciphertext, nonce) + + return { update, publicData } +} + +/** + * Verify and decrypt a snapshot message from another client. + * + * @param message - Received RealtimeMessage + * @param dek - Document encryption key (32 bytes) + * @returns Decrypted snapshot with public data + * @throws Error if signature verification fails or decryption fails + */ +export async function verifyAndDecryptSnapshot( + message: RealtimeMessage, + dek: Uint8Array +): Promise { + if (message.type !== 'snapshot') { + throw new Error(`Expected snapshot message, got ${message.type}`) + } + + // 1. Parse public data to get sender's public key + const publicData = await fromBase64Json(message.publicData) + const senderPublicKey = await fromBase64(publicData.pubKey) + + // 2. Build signing message for verification + const signingMessage: SigningMessage = { + ciphertext: message.ciphertext, + nonce: message.nonce, + publicData: message.publicData, + } + + // 3. Verify signature + const signature = await fromBase64(message.signature) + const isValid = await verify(senderPublicKey, signature, SIGNATURE_DOMAINS.SNAPSHOT, signingMessage) + + if (!isValid) { + throw new Error('Snapshot signature verification failed') + } + + // 4. Decrypt the snapshot + const ciphertext = await fromBase64(message.ciphertext) + const nonce = await fromBase64(message.nonce) + const snapshot = await decrypt(dek, ciphertext, nonce) + + return { snapshot, publicData } +} + +// NOTE: verifyAndDecryptAwareness has been removed. +// Use verifyAndDecryptEphemeralMessage from ephemeral.ts for awareness messages. + +// ============================================ +// Server message decryption (init, sync_update) +// ============================================ + +/** Result of decrypting an init message */ +export interface DecryptedInit { + /** Decrypted Yjs snapshot bytes (null for empty docs) */ + snapshot: Uint8Array | null + /** Sequence number at snapshot */ + seqAtSnapshot: number +} + +/** Result of decrypting a sync_update message */ +export interface DecryptedSyncUpdate { + /** Decrypted Yjs update bytes */ + update: Uint8Array + /** Sequence number */ + seq: number + /** Sender's public key */ + publicKey: Uint8Array +} + +/** + * Decrypt an init message from the server. + * Server has already verified the signature. + * + * @param message - Server init message + * @param dek - Document encryption key (32 bytes) + * @returns Decrypted snapshot + */ +export async function decryptInitSnapshot( + message: ServerInitMessage, + dek: Uint8Array +): Promise { + // Handle unencrypted/empty snapshots (nonce is null) + if (message.snapshot.nonce === null) { + // Data is unencrypted - just decode from Base64 + const snapshot = await fromBase64(message.snapshot.data) + // Check if it's an empty/invalid snapshot (too short to be valid Yjs data) + // Valid Yjs updates are at least a few bytes long + const isEmpty = snapshot.length < 4 + return { + snapshot: isEmpty ? null : snapshot, + seqAtSnapshot: message.snapshot.seq_at_snapshot ?? 0, + } + } + + // Encrypted snapshot - decrypt normally + const ciphertext = await fromBase64(message.snapshot.data) + const nonce = await fromBase64(message.snapshot.nonce) + const snapshot = await decrypt(dek, ciphertext, nonce) + + return { + snapshot, + seqAtSnapshot: message.snapshot.seq_at_snapshot ?? 0, + } +} + +/** + * Decrypt a sync_update message from the server. + * Server has already verified the signature. + * + * @param message - Server sync_update message + * @param dek - Document encryption key (32 bytes) + * @returns Decrypted update + */ +export async function decryptSyncUpdate( + message: ServerSyncUpdate, + dek: Uint8Array +): Promise { + const ciphertext = await fromBase64(message.update.data) + const nonce = await fromBase64(message.update.nonce) + const update = await decrypt(dek, ciphertext, nonce) + const publicKey = await fromBase64(message.update.public_key) + + return { + update, + seq: message.update.seq, + publicKey, + } +} + +// ============================================ +// Message type detection +// ============================================ + +/** + * Check if a message is a server init message. + */ +export function isServerInitMessage(msg: unknown): msg is ServerInitMessage { + if ( + typeof msg !== 'object' || + msg === null || + (msg as ServerInitMessage).type !== 'init' || + !('snapshot' in msg) + ) { + return false + } + + const snapshot = (msg as ServerInitMessage).snapshot + // Validate snapshot object exists and has data field + // nonce, signature, seq_at_snapshot can be null for unencrypted/empty docs + return ( + typeof snapshot === 'object' && + snapshot !== null && + typeof snapshot.data === 'string' && + (snapshot.nonce === null || typeof snapshot.nonce === 'string') && + (snapshot.signature === null || typeof snapshot.signature === 'string') && + (snapshot.seq_at_snapshot === null || typeof snapshot.seq_at_snapshot === 'number') + ) +} + +/** + * Check if a message is a server sync_update message. + */ +export function isServerSyncUpdate(msg: unknown): msg is ServerSyncUpdate { + return ( + typeof msg === 'object' && + msg !== null && + (msg as ServerSyncUpdate).type === 'sync_update' && + 'update' in msg + ) +} + +/** + * Check if a message is a relayed RealtimeMessage. + */ +export function isRealtimeMessage(msg: unknown): msg is RealtimeMessage { + if (typeof msg !== 'object' || msg === null) return false + const m = msg as RealtimeMessage + return ( + (m.type === 'update' || m.type === 'snapshot' || m.type === 'awareness') && + typeof m.ciphertext === 'string' && + typeof m.nonce === 'string' && + typeof m.signature === 'string' && + typeof m.publicData === 'string' + ) +} diff --git a/app/src/features/realtime/lib/sync.ts b/app/src/features/realtime/lib/sync.ts new file mode 100644 index 00000000..b71ad297 --- /dev/null +++ b/app/src/features/realtime/lib/sync.ts @@ -0,0 +1,1242 @@ +/** + * Realtime Sync + * + * Synchronization state machine for Yjs documents. + * Replaces y-websocket with encrypted communication. + */ + +import type { Awareness } from 'y-protocols/awareness' +import type * as Y from 'yjs' + +import { getMyWorkspaceKey, getDocumentKey } from '@/shared/api/client' +import { getDocument } from '@/shared/api/client' +import { getSodium, fromBase64 } from '@/shared/lib/crypto' + +import { getPublishStatus, publishDocument } from '@/entities/public' +import { updateDocumentTagsFromContent } from '@/entities/tag' + + +import { decryptDocumentTitle } from '@/features/git-sync/lib/sync' +import { getKeyVaultService, fetchWorkspaceKek } from '@/features/security' + +import { + createEphemeralSession, + createEphemeralMessage, + createInitializeMessage, + verifyAndDecryptEphemeralMessage, + generateSessionId, + type EphemeralSession, + type EphemeralMessage, + type EphemeralPublicData as EphemeralPublicDataFromEphemeral, +} from './ephemeral' +import { + createUpdate, + createSnapshot, + verifyAndDecryptUpdate, + verifyAndDecryptSnapshot, + decryptInitSnapshot, + decryptSyncUpdate, + isServerInitMessage, + isServerSyncUpdate, + isRealtimeMessage, + type ServerMessage, + type ServerInitMessage, + type UpdatePublicData, + type SnapshotPublicData, +} from './messages' + +// ============================================ +// Types +// ============================================ + +/** Sync status */ +export type SyncStatus = 'disconnected' | 'connecting' | 'syncing' | 'ready' | 'error' + +/** Sync state */ +export interface SyncState { + status: SyncStatus + lastSeq: number + localClock: number + currentSnapshotId: string | null + updateClocks: Map + error: string | null +} + +/** Status event payload (compatible with y-websocket) */ +export interface StatusEvent { + status: 'connecting' | 'connected' | 'disconnected' +} + +/** Server error message (sent when persistence fails) */ +export interface ServerErrorMessage { + type: 'error' + error: string + document_id: string +} + +/** Check if a message is a server error message */ +function isServerErrorMessage(msg: unknown): msg is ServerErrorMessage { + return ( + typeof msg === 'object' && + msg !== null && + (msg as ServerErrorMessage).type === 'error' && + typeof (msg as ServerErrorMessage).error === 'string' + ) +} + +/** Status event handler */ +export type StatusEventHandler = (event: StatusEvent) => void + +/** Share mode options for anonymous access via share links */ +export interface ShareModeOptions { + /** Pre-decrypted DEK (decrypted using share key) */ + dek: Uint8Array +} + +/** Options for creating a connection */ +export interface ConnectionOptions { + token?: string | null + connect?: boolean + workspaceId: string + /** For share-based access - if provided, skips workspace-based key derivation */ + shareMode?: ShareModeOptions + /** Callback to fetch encrypted KEK from API */ + fetchKek?: () => Promise + /** Callback to fetch encrypted DEK from API */ + fetchDek?: () => Promise<{ encryptedDek: string; nonce: string }> +} + +/** Connection interface (compatible with WebsocketProvider API) */ +export interface Connection { + awareness: Awareness + readonly connected: boolean + readonly syncState: SyncState + /** Whether the connection should automatically connect */ + shouldConnect: boolean + connect(): void + disconnect(): void + destroy(): void + /** Listen to status events */ + on(event: 'status', handler: StatusEventHandler): void + /** Stop listening to status events */ + off(event: 'status', handler: StatusEventHandler): void +} + +// ============================================ +// Constants +// ============================================ + +/** Number of updates before creating a new snapshot */ +const SNAPSHOT_THRESHOLD = 100 + +/** Debounce delay for tag updates in milliseconds (auto-save style) */ +const TAG_UPDATE_DEBOUNCE_MS = 2000 + +/** Debounce delay for public content updates in milliseconds */ +const PUBLIC_CONTENT_UPDATE_DEBOUNCE_MS = 2000 + +/** Reconnect delay in milliseconds */ +const RECONNECT_DELAY = 1000 + +/** Max reconnect delay */ +const MAX_RECONNECT_DELAY = 30000 + +// ============================================ +// Utility Functions +// ============================================ + +/** + * Compute snapshot proof chain hash. + * Uses BLAKE2b to create a hash of parent snapshot info. + */ +async function computeSnapshotProof( + parentSnapshotId: string, + parentCiphertextHash: string, + updateClocks: Record +): Promise { + const sodium = await getSodium() + + // Build proof data: parentSnapshotId || parentCiphertextHash || sorted(updateClocks) + const clocksJson = JSON.stringify( + Object.entries(updateClocks).sort(([a], [b]) => a.localeCompare(b)) + ) + const proofInput = `${parentSnapshotId}:${parentCiphertextHash}:${clocksJson}` + + // Hash with BLAKE2b (32 bytes) + const proofBytes = sodium.crypto_generichash(32, sodium.from_string(proofInput)) + return sodium.to_base64(proofBytes, sodium.base64_variants.ORIGINAL) +} + +// ============================================ +// Sync class +// ============================================ + +/** + * Sync - WebSocket synchronization for Yjs + */ +export class Sync { + private doc: Y.Doc + private documentId: string + private workspaceId: string + private serverUrl: string + private token: string | null + private options: ConnectionOptions + + private ws: WebSocket | null = null + private awareness: Awareness | null = null + private _connected = false + private _destroyed = false + private _shouldConnect = true + private reconnectAttempts = 0 + private reconnectTimeout: ReturnType | null = null + + // Event listeners + private statusListeners: Set = new Set() + + private dek: Uint8Array | null = null + private signingKeyPair: { publicKey: Uint8Array; privateKey: Uint8Array } | null = null + private publicKeyBase64: string | null = null + + // Ephemeral session state for awareness + private ephemeralSession: EphemeralSession | null = null + + // Parent snapshot ciphertext hash for proof chain + private parentSnapshotCiphertextHash: string = '' + + private state: SyncState = { + status: 'disconnected', + lastSeq: 0, + localClock: 0, + currentSnapshotId: null, + updateClocks: new Map(), + error: null, + } + + private pendingUpdates: Uint8Array[] = [] + private updatesSinceSnapshot = 0 + private tagUpdateDebounceTimer: ReturnType | null = null + private hasUnsavedTagChanges = false + + // Public content auto-sync state + private publicContentDebounceTimer: ReturnType | null = null + private isPublished: boolean | null = null // null = unknown, needs check + private hasUnsavedPublicContentChanges = false + + // Event handlers + private updateHandler: ((update: Uint8Array, origin: unknown) => void) | null = null + private awarenessHandler: ((changes: { added: number[]; updated: number[]; removed: number[] }, origin: unknown) => void) | null = null + + constructor( + serverUrl: string, + doc: Y.Doc, + documentId: string, + options: ConnectionOptions + ) { + this.serverUrl = serverUrl + this.doc = doc + this.documentId = documentId + this.workspaceId = options.workspaceId + this.token = options.token ?? null + this.options = options + } + + /** + * Initialize the sync connection. + * Must be called before connect(). + */ + async initialize(): Promise { + // Check for share mode (anonymous access via share links) + if (this.options.shareMode) { + await this.initializeShareMode() + return + } + + // Normal mode: workspace-based key derivation + const service = getKeyVaultService() + await service.ready() + + // Verify session is unlocked + service.ensureUnlocked() + + // Get signing key pair (via underlying KeyManager) + this.signingKeyPair = service.keyManager.getSigningKeyPair() + const publicKeys = await service.keyManager.getPublicKeysBase64() + this.publicKeyBase64 = publicKeys.signingPublicKey + + // Get KEK for this workspace + const fetchKek = this.options.fetchKek ?? (async () => { + const response = await getMyWorkspaceKey({ id: this.workspaceId }) + return response.encryptedKek + }) + + const kek = await service.getWorkspaceKek(this.workspaceId, fetchKek) + + // Get DEK for this document + const fetchDek = this.options.fetchDek ?? (async () => { + const response = await getDocumentKey({ id: this.documentId }) + return { encryptedDek: response.encryptedDek, nonce: response.nonce } + }) + + this.dek = await service.getDocumentDek(this.documentId, kek, fetchDek) + + // Create ephemeral session for awareness + this.ephemeralSession = await createEphemeralSession() + + // Initialize Awareness + const { Awareness } = await import('y-protocols/awareness') + this.awareness = new Awareness(this.doc) + + // Attach doc listeners early to capture all updates (including clock 0). + // Updates will be queued in pendingUpdates until WebSocket connects. + this.attachDocListeners() + } + + /** + * Initialize in share mode (anonymous access via share links). + * Uses pre-decrypted DEK and ephemeral signing keys. + */ + private async initializeShareMode(): Promise { + const shareMode = this.options.shareMode! + + // Use provided DEK (already decrypted using share key) + this.dek = shareMode.dek + + // Generate ephemeral signing key pair for this session + const sodium = await getSodium() + const keyPair = sodium.crypto_sign_keypair() + this.signingKeyPair = { + publicKey: keyPair.publicKey, + privateKey: keyPair.privateKey, + } + this.publicKeyBase64 = sodium.to_base64(keyPair.publicKey, sodium.base64_variants.ORIGINAL) + + // Create ephemeral session for awareness + this.ephemeralSession = await createEphemeralSession() + + // Initialize Awareness + const { Awareness } = await import('y-protocols/awareness') + this.awareness = new Awareness(this.doc) + + // Attach doc listeners early to capture all updates (including clock 0). + // Updates will be queued in pendingUpdates until WebSocket connects. + this.attachDocListeners() + } + + /** + * Get the Awareness instance. + */ + getAwareness(): Awareness { + if (!this.awareness) { + throw new Error('Sync not initialized. Call initialize() first.') + } + return this.awareness + } + + /** + * Check if connected. + */ + get connected(): boolean { + return this._connected + } + + /** + * Get/set whether the connection should be active. + */ + get shouldConnect(): boolean { + return this._shouldConnect + } + + set shouldConnect(value: boolean) { + this._shouldConnect = value + if (value && !this._connected && !this.ws) { + this.connect() + } else if (!value && this._connected) { + this.disconnect() + } + } + + /** + * Get current sync state. + */ + get syncState(): SyncState { + return { ...this.state } + } + + /** + * Add event listener. + */ + on(event: 'status', handler: StatusEventHandler): void { + if (event === 'status') { + this.statusListeners.add(handler) + } + } + + /** + * Remove event listener. + */ + off(event: 'status', handler: StatusEventHandler): void { + if (event === 'status') { + this.statusListeners.delete(handler) + } + } + + /** + * Emit status event. + */ + private emitStatus(status: 'connecting' | 'connected' | 'disconnected'): void { + const event: StatusEvent = { status } + for (const handler of this.statusListeners) { + try { + handler(event) + } catch (err) { + console.error('[Sync] Error in status handler:', err) + } + } + } + + /** + * Connect to the WebSocket server. + */ + connect(): void { + if (this._destroyed) return + if (this.ws) return // Already connected or connecting + + this.setState({ status: 'connecting', error: null }) + this.emitStatus('connecting') + + // Build WebSocket URL + const params = new URLSearchParams() + if (this.token) { + params.set('token', this.token) + } + const queryString = params.toString() + const wsUrl = `${this.serverUrl}/${this.documentId}${queryString ? '?' + queryString : ''}` + + this.ws = new WebSocket(wsUrl) + this.ws.binaryType = 'arraybuffer' + + this.ws.onopen = this.handleOpen.bind(this) + this.ws.onmessage = this.handleMessage.bind(this) + this.ws.onclose = this.handleClose.bind(this) + this.ws.onerror = this.handleError.bind(this) + } + + /** + * Disconnect from the WebSocket server. + */ + disconnect(): void { + this.cancelReconnect() + + if (this.ws) { + this.ws.onopen = null + this.ws.onmessage = null + this.ws.onclose = null + this.ws.onerror = null + this.ws.close() + this.ws = null + } + + this._connected = false + this.setState({ status: 'disconnected' }) + this.emitStatus('disconnected') + this.detachDocListeners() + } + + /** + * Destroy the sync instance. + */ + destroy(): void { + this._destroyed = true + + // Flush pending tag update before destroying (skip for share mode) + if (this.tagUpdateDebounceTimer) { + clearTimeout(this.tagUpdateDebounceTimer) + this.tagUpdateDebounceTimer = null + } + // Fire and forget - don't wait for tag update to complete + if (this.hasUnsavedTagChanges && !this.options.shareMode) { + this.updateDocumentTags().catch(() => { + // Ignore errors on destroy + }) + } + + // Clean up public content update timer (skip for share mode) + if (this.publicContentDebounceTimer) { + clearTimeout(this.publicContentDebounceTimer) + this.publicContentDebounceTimer = null + } + // Fire and forget - don't wait for public content update to complete + if (this.hasUnsavedPublicContentChanges && this.isPublished && !this.options.shareMode) { + this.updatePublicContentIfPublished().catch(() => { + // Ignore errors on destroy + }) + } + + this.disconnect() + + if (this.awareness) { + this.awareness.destroy() + this.awareness = null + } + + // Clear keys from memory + if (this.dek) { + this.dek.fill(0) + this.dek = null + } + if (this.signingKeyPair) { + this.signingKeyPair.privateKey.fill(0) + this.signingKeyPair = null + } + + // Clear ephemeral session + this.ephemeralSession = null + } + + // ============================================ + // WebSocket handlers + // ============================================ + + private async handleOpen(): Promise { + this._connected = true + this.reconnectAttempts = 0 + this.setState({ status: 'syncing' }) + this.emitStatus('connected') + + // Doc listeners are already attached in initialize(). + // Flush any updates that were queued before connection was ready. + await this.flushPendingUpdates() + + // Send initialize message to announce presence (4-step handshake) + await this.sendInitializeMessage() + } + + /** + * Flush pending updates that were queued before WebSocket connected. + */ + private async flushPendingUpdates(): Promise { + if (this.pendingUpdates.length === 0) return + + const updates = [...this.pendingUpdates] + this.pendingUpdates = [] + + for (const update of updates) { + await this.handleLocalUpdate(update) + } + } + + /** + * Send initialize message to announce presence to other clients. + */ + private async sendInitializeMessage(): Promise { + // Capture references at the start to avoid race conditions during async operations + const ws = this.ws + const dek = this.dek + const signingKeyPair = this.signingKeyPair + const publicKeyBase64 = this.publicKeyBase64 + const ephemeralSession = this.ephemeralSession + + if (!this._connected || !ws || !dek || !signingKeyPair || !publicKeyBase64 || !ephemeralSession) { + return + } + + try { + const publicData: EphemeralPublicDataFromEphemeral = { + docId: this.documentId, + pubKey: publicKeyBase64, + } + + const { message, updatedSession } = await createInitializeMessage( + publicData, + dek, + signingKeyPair, + ephemeralSession + ) + this.ephemeralSession = updatedSession + + // Check if WebSocket is still open before sending + if (ws.readyState !== WebSocket.OPEN) { + return + } + + ws.send(JSON.stringify({ type: 'awareness', ...message })) + } catch (err) { + console.error('[Sync] Error sending initialize message:', err) + } + } + + private async handleMessage(event: MessageEvent): Promise { + try { + // Parse message + let message: ServerMessage + if (typeof event.data === 'string') { + message = JSON.parse(event.data) + } else { + // Binary message - convert to string then parse + const text = new TextDecoder().decode(event.data) + message = JSON.parse(text) + } + + await this.processMessage(message) + } catch (err) { + console.error('[Sync] Error processing message:', err) + } + } + + private handleClose(event: CloseEvent): void { + this._connected = false + this.ws = null + // Don't detach listeners here - keep capturing updates for when we reconnect. + // Listeners will be detached in disconnect() or destroy(). + + if (!this._destroyed && event.code !== 1000 && this._shouldConnect) { + // Abnormal close - attempt reconnect + this.scheduleReconnect() + this.emitStatus('disconnected') + } else { + this.setState({ status: 'disconnected' }) + this.emitStatus('disconnected') + } + } + + private handleError(_event: Event): void { + this.setState({ status: 'error', error: 'WebSocket error' }) + } + + // ============================================ + // Message processing + // ============================================ + + private async processMessage(message: ServerMessage): Promise { + if (!this.dek) { + console.error('[Sync] DEK not available') + return + } + + // Handle error messages from server + if (isServerErrorMessage(message)) { + await this.handleErrorMessage(message) + return + } + + if (isServerInitMessage(message)) { + await this.handleInitMessage(message) + } else if (isServerSyncUpdate(message)) { + await this.handleSyncUpdate(message) + } else if (isRealtimeMessage(message)) { + await this.handleRelayedMessage(message) + } else { + console.warn('[Sync] Unknown message type:', message) + } + } + + private async handleErrorMessage(message: ServerErrorMessage): Promise { + console.error('[Sync] Server error:', message.error, 'document:', message.document_id) + this.setState({ + status: 'error', + error: message.error, + }) + } + + private async handleInitMessage(message: ServerInitMessage): Promise { + try { + const { snapshot, seqAtSnapshot } = await decryptInitSnapshot(message, this.dek!) + + // Apply snapshot to document (skip if null/empty) + if (snapshot !== null) { + const Y = await import('yjs') + Y.applyUpdateV2(this.doc, snapshot, 'e2ee-remote') + } + + this.setState({ + lastSeq: seqAtSnapshot, + status: 'syncing', + }) + } catch (err) { + console.error('[Sync] Error processing init message:', err) + this.setState({ status: 'error', error: 'Failed to process init message' }) + } + } + + private async handleSyncUpdate(message: { type: 'sync_update'; update: { data: string; nonce: string; signature: string; public_key: string; seq: number } }): Promise { + try { + const { update, seq } = await decryptSyncUpdate(message, this.dek!) + + // Apply update to document + const Y = await import('yjs') + Y.applyUpdateV2(this.doc, update, 'e2ee-remote') + + this.setState({ lastSeq: Math.max(this.state.lastSeq, seq) }) + + // Check if we've received all sync updates + if (this.state.status === 'syncing') { + this.setState({ status: 'ready' }) + } + } catch (err) { + console.error('[Sync] Error processing sync_update:', err) + } + } + + private async handleRelayedMessage(message: { type: 'update' | 'snapshot' | 'awareness'; ciphertext: string; nonce: string; signature: string; publicData: string }): Promise { + try { + if (message.type === 'update') { + const { update, publicData } = await verifyAndDecryptUpdate(message, this.dek!) + + // Skip our own updates + if (publicData.pubKey === this.publicKeyBase64) { + return + } + + const Y = await import('yjs') + Y.applyUpdateV2(this.doc, update, 'e2ee-remote') + + // Update clocks + const currentClock = this.state.updateClocks.get(publicData.pubKey) ?? 0 + if (publicData.clock > currentClock) { + this.state.updateClocks.set(publicData.pubKey, publicData.clock) + } + + this.updatesSinceSnapshot++ + } else if (message.type === 'snapshot') { + const { snapshot, publicData } = await verifyAndDecryptSnapshot(message, this.dek!) + + // Skip our own snapshots + if (publicData.pubKey === this.publicKeyBase64) { + return + } + + const Y = await import('yjs') + Y.applyUpdateV2(this.doc, snapshot, 'e2ee-remote') + + // Update snapshot info + this.setState({ currentSnapshotId: publicData.snapshotId }) + this.updatesSinceSnapshot = 0 + + // Store ciphertext hash for proof chain + const sodium = await getSodium() + const ciphertextBytes = await fromBase64(message.ciphertext) + const hash = sodium.crypto_generichash(32, ciphertextBytes) + this.parentSnapshotCiphertextHash = sodium.to_base64(hash, sodium.base64_variants.ORIGINAL) + } else if (message.type === 'awareness') { + await this.handleAwarenessMessage(message) + } + } catch (err) { + console.error(`[Sync] Error processing ${message.type}:`, err) + } + } + + /** + * Handle incoming awareness message with session handshake. + */ + private async handleAwarenessMessage(message: { type: 'update' | 'snapshot' | 'awareness'; ciphertext: string; nonce: string; signature: string; publicData: string }): Promise { + if (!this.dek || !this.signingKeyPair || !this.ephemeralSession) { + return + } + + // Parse publicData for pubKey check (the original is a Base64 string) + const parsedPublicData = JSON.parse(atob(message.publicData)) as EphemeralPublicDataFromEphemeral + + // Skip our own messages + if (parsedPublicData.pubKey === this.publicKeyBase64) { + return + } + + // Convert wire format to EphemeralMessage (publicData stays as Base64 string) + const ephemeralMessage: EphemeralMessage = { + ciphertext: message.ciphertext, + nonce: message.nonce, + signature: message.signature, + publicData: message.publicData, + } + + // Verify, decrypt, and handle session handshake + const result = await verifyAndDecryptEphemeralMessage( + ephemeralMessage, + this.dek, + this.documentId, + this.ephemeralSession, + this.signingKeyPair + ) + + // Update session state + if (result.validSessions) { + this.ephemeralSession = { + ...this.ephemeralSession, + validSessions: result.validSessions, + } + } + + // Send proof response if requested + if (result.proof) { + await this.sendProofResponse( + parsedPublicData, + result.proof, + result.requestProof ?? false + ) + } + + // Apply awareness update if content is available + if (result.content && this.awareness) { + try { + const { applyAwarenessUpdate } = await import('y-protocols/awareness') + applyAwarenessUpdate(this.awareness, result.content, null) + } catch (err) { + console.error('[Sync] Error applying awareness update:', err) + } + } + } + + /** + * Send proof response message. + */ + private async sendProofResponse( + _remotePublicData: EphemeralPublicDataFromEphemeral, + proof: Uint8Array, + requestProof: boolean + ): Promise { + // Capture references at the start to avoid race conditions during async operations + const ws = this.ws + const dek = this.dek + const signingKeyPair = this.signingKeyPair + const publicKeyBase64 = this.publicKeyBase64 + const ephemeralSession = this.ephemeralSession + + if (!this._connected || !ws || !dek || !signingKeyPair || !publicKeyBase64 || !ephemeralSession) { + return + } + + try { + const publicData: EphemeralPublicDataFromEphemeral = { + docId: this.documentId, + pubKey: publicKeyBase64, + } + + const messageType = requestProof ? 'proofAndRequestProof' : 'proof' + const { message, updatedSession } = await createEphemeralMessage( + proof, + messageType, + publicData, + dek, + signingKeyPair, + ephemeralSession + ) + this.ephemeralSession = updatedSession + + // Check if WebSocket is still open before sending + if (ws.readyState !== WebSocket.OPEN) { + return + } + + ws.send(JSON.stringify({ type: 'awareness', ...message })) + } catch (err) { + console.error('[Sync] Error sending proof response:', err) + } + } + + // ============================================ + // Local change handlers + // ============================================ + + private attachDocListeners(): void { + // Doc update listener + this.updateHandler = (update: Uint8Array, origin: unknown) => { + if (origin === 'e2ee-remote') { + return // Don't re-broadcast remote updates + } + this.handleLocalUpdate(update).catch(console.error) + } + this.doc.on('updateV2', this.updateHandler) + + // Awareness listener + if (this.awareness) { + this.awarenessHandler = ({ added, updated, removed }, origin) => { + if (origin === 'e2ee-remote') { + return + } + const changedClients = [...added, ...updated, ...removed] + this.handleLocalAwarenessChange(changedClients).catch(console.error) + } + this.awareness.on('update', this.awarenessHandler) + } + } + + private detachDocListeners(): void { + if (this.updateHandler) { + this.doc.off('updateV2', this.updateHandler) + this.updateHandler = null + } + + if (this.awareness && this.awarenessHandler) { + this.awareness.off('update', this.awarenessHandler) + this.awarenessHandler = null + } + } + + private async handleLocalUpdate(update: Uint8Array): Promise { + // Capture references at the start to avoid race conditions during async operations + const ws = this.ws + const dek = this.dek + const signingKeyPair = this.signingKeyPair + const publicKeyBase64 = this.publicKeyBase64 + + if (!this._connected || !ws || !dek || !signingKeyPair || !publicKeyBase64) { + // Queue update for later + this.pendingUpdates.push(update) + return + } + + // Increment local clock + this.state.localClock++ + + const publicData: UpdatePublicData = { + docId: this.documentId, + pubKey: publicKeyBase64, + refSnapshotId: this.state.currentSnapshotId ?? '', + clock: this.state.localClock, + } + + try { + const message = await createUpdate(update, dek, signingKeyPair, publicData) + + // Check if WebSocket is still open before sending + if (ws.readyState !== WebSocket.OPEN) { + this.pendingUpdates.push(update) + return + } + + ws.send(JSON.stringify(message)) + + this.updatesSinceSnapshot++ + + // Check if we should create a snapshot + if (this.updatesSinceSnapshot >= SNAPSHOT_THRESHOLD) { + await this.createAndSendSnapshot() + } + + // Schedule debounced tag update (auto-save style: 2s after last edit) + // Skip for share mode - shared documents are read-only for these features + if (!this.options.shareMode) { + this.scheduleDebouncedTagUpdate() + + // Schedule debounced public content update if document is published + this.scheduleDebouncedPublicContentUpdate() + } + } catch (err) { + console.error('[Sync] Error sending update:', err) + } + } + + /** + * Schedule a debounced tag update. + * Fires 2 seconds after the last edit (auto-save pattern). + */ + private scheduleDebouncedTagUpdate(): void { + this.hasUnsavedTagChanges = true + + if (this.tagUpdateDebounceTimer) { + clearTimeout(this.tagUpdateDebounceTimer) + } + + this.tagUpdateDebounceTimer = setTimeout(() => { + this.tagUpdateDebounceTimer = null + this.hasUnsavedTagChanges = false + this.updateDocumentTags().catch((err) => { + console.warn('[Sync] Debounced tag update failed:', err) + }) + }, TAG_UPDATE_DEBOUNCE_MS) + } + + /** + * Schedule a debounced public content update. + * Fires 2 seconds after the last edit if document is published. + */ + private scheduleDebouncedPublicContentUpdate(): void { + this.hasUnsavedPublicContentChanges = true + + if (this.publicContentDebounceTimer) { + clearTimeout(this.publicContentDebounceTimer) + } + + this.publicContentDebounceTimer = setTimeout(() => { + this.publicContentDebounceTimer = null + this.hasUnsavedPublicContentChanges = false + this.updatePublicContentIfPublished().catch((err) => { + console.warn('[Sync] Debounced public content update failed:', err) + }) + }, PUBLIC_CONTENT_UPDATE_DEBOUNCE_MS) + } + + /** + * Update public content if the document is published. + * Caches publish status to avoid unnecessary API calls. + */ + private async updatePublicContentIfPublished(): Promise { + try { + // Check publish status (use cached value if available) + if (this.isPublished === null) { + try { + const status = await getPublishStatus(this.documentId) + this.isPublished = !!status?.public_url + } catch { + // If we can't get status, assume not published + this.isPublished = false + } + } + + if (!this.isPublished) { + return + } + + // Get decrypted content + const content = this.doc.getText('content').toString() + if (!content) return + + // Get document metadata for title + const meta = await getDocument({ id: this.documentId }) + const plaintextTitle = await decryptDocumentTitle(meta, this.workspaceId) + + // Update public content + await publishDocument(this.documentId, { + plaintextTitle, + plaintextContent: content, + }) + } catch (err) { + // Don't throw - public content update failure shouldn't break sync + console.warn('[Sync] Public content update failed:', err) + } + } + + /** + * Set the published status (called from ShareDialog when publish status changes) + */ + setPublishedStatus(isPublished: boolean): void { + this.isPublished = isPublished + } + + private async handleLocalAwarenessChange(changedClients: number[]): Promise { + // Capture references at the start to avoid race conditions during async operations + const ws = this.ws + const dek = this.dek + const signingKeyPair = this.signingKeyPair + const publicKeyBase64 = this.publicKeyBase64 + const awareness = this.awareness + const ephemeralSession = this.ephemeralSession + + if (!this._connected || !ws || !dek || !signingKeyPair || !publicKeyBase64 || !awareness || !ephemeralSession) { + return + } + + try { + const { encodeAwarenessUpdate } = await import('y-protocols/awareness') + const awarenessUpdate = encodeAwarenessUpdate(awareness, changedClients) + + const publicData: EphemeralPublicDataFromEphemeral = { + docId: this.documentId, + pubKey: publicKeyBase64, + } + + // Use new ephemeral message API with session handshake + const { message, updatedSession } = await createEphemeralMessage( + awarenessUpdate, + 'message', + publicData, + dek, + signingKeyPair, + ephemeralSession + ) + this.ephemeralSession = updatedSession + + // Check again if WebSocket is still open before sending + if (ws.readyState !== WebSocket.OPEN) { + return + } + + ws.send(JSON.stringify({ type: 'awareness', ...message })) + } catch (err) { + console.error('[Sync] Error sending awareness:', err) + } + } + + private async createAndSendSnapshot(): Promise { + // Capture references at the start to avoid race conditions during async operations + const ws = this.ws + const dek = this.dek + const signingKeyPair = this.signingKeyPair + const publicKeyBase64 = this.publicKeyBase64 + + if (!this._connected || !ws || !dek || !signingKeyPair || !publicKeyBase64) { + return + } + + try { + const Y = await import('yjs') + const snapshot = Y.encodeStateAsUpdateV2(this.doc) + + // Generate snapshot ID + const snapshotId = await generateSessionId() + + // Build update clocks record + const updateClocks: Record = {} + for (const [key, value] of this.state.updateClocks) { + updateClocks[key] = value + } + // Include our own clock + updateClocks[publicKeyBase64] = this.state.localClock + + // Compute proof chain + const parentSnapshotProof = this.state.currentSnapshotId + ? await computeSnapshotProof( + this.state.currentSnapshotId, + this.parentSnapshotCiphertextHash, + updateClocks + ) + : '' // No proof for first snapshot + + const publicData: SnapshotPublicData = { + docId: this.documentId, + pubKey: publicKeyBase64, + snapshotId, + parentSnapshotId: this.state.currentSnapshotId ?? '', + parentSnapshotProof, + parentSnapshotUpdateClocks: updateClocks, + } + + const message = await createSnapshot(snapshot, dek, signingKeyPair, publicData) + + // Check if WebSocket is still open before sending + if (ws.readyState !== WebSocket.OPEN) { + return + } + + ws.send(JSON.stringify(message)) + + // Compute and store hash of our new snapshot's ciphertext for future proofs + const sodium = await getSodium() + const ciphertextBytes = await fromBase64(message.ciphertext) + const hash = sodium.crypto_generichash(32, ciphertextBytes) + this.parentSnapshotCiphertextHash = sodium.to_base64(hash, sodium.base64_variants.ORIGINAL) + + // Update state + this.setState({ currentSnapshotId: snapshotId }) + this.updatesSinceSnapshot = 0 + + // Phase 14: Extract and update tags from document content + // Do this in background to not block the sync flow + // Skip for share mode - shared documents are read-only for these features + if (!this.options.shareMode) { + this.updateDocumentTags().catch((err) => { + console.warn('[Sync] Error updating document tags:', err) + }) + } + } catch (err) { + console.error('[Sync] Error sending snapshot:', err) + } + } + + /** + * Extract tags from document content and update on server. + * Called automatically after each snapshot. + */ + private async updateDocumentTags(): Promise { + try { + // Extract text content from Yjs document + const content = this.doc.getText('content').toString() + if (!content) return + + // Fetch KEK and update tags + const kek = await fetchWorkspaceKek(this.workspaceId) + await updateDocumentTagsFromContent(this.documentId, kek, content) + } catch (err) { + // Don't throw - tag update failure shouldn't break sync + console.warn('[Sync] Tag extraction failed:', err) + } + } + + // ============================================ + // Reconnection + // ============================================ + + private scheduleReconnect(): void { + if (this._destroyed || !this._shouldConnect) return + + const delay = Math.min( + RECONNECT_DELAY * Math.pow(2, this.reconnectAttempts), + MAX_RECONNECT_DELAY + ) + + this.reconnectAttempts++ + this.setState({ status: 'disconnected' }) + + this.reconnectTimeout = setTimeout(() => { + this.reconnectTimeout = null + if (this._shouldConnect) { + this.connect() + } + }, delay) + } + + private cancelReconnect(): void { + if (this.reconnectTimeout) { + clearTimeout(this.reconnectTimeout) + this.reconnectTimeout = null + } + } + + // ============================================ + // State management + // ============================================ + + private setState(updates: Partial): void { + this.state = { ...this.state, ...updates } + } +} + +// ============================================ +// Factory function +// ============================================ + +/** + * Create a connection for a Yjs document. + * + * This replaces y-websocket's WebsocketProvider. + * + * @param serverUrl - WebSocket server URL + * @param doc - Yjs document + * @param documentId - Document ID + * @param options - Connection options + * @returns Connection interface + */ +export async function createConnection( + serverUrl: string, + doc: Y.Doc, + documentId: string, + options: ConnectionOptions +): Promise { + const sync = new Sync(serverUrl, doc, documentId, options) + await sync.initialize() + + if (options.connect !== false) { + sync.connect() + } + + return { + awareness: sync.getAwareness(), + get connected() { + return sync.connected + }, + get syncState() { + return sync.syncState + }, + get shouldConnect() { + return sync.shouldConnect + }, + set shouldConnect(value: boolean) { + sync.shouldConnect = value + }, + connect: () => sync.connect(), + disconnect: () => sync.disconnect(), + destroy: () => sync.destroy(), + on: (event: 'status', handler: StatusEventHandler) => sync.on(event, handler), + off: (event: 'status', handler: StatusEventHandler) => sync.off(event, handler), + } +} diff --git a/app/src/features/search/hooks/useClientSearch.ts b/app/src/features/search/hooks/useClientSearch.ts new file mode 100644 index 00000000..46495d73 --- /dev/null +++ b/app/src/features/search/hooks/useClientSearch.ts @@ -0,0 +1,280 @@ +/** + * Client-side search hook for E2EE documents + * + * Replaces server-side search with MiniSearch in a Web Worker. + * Provides staged indexing: titles first (instant search), then content (background). + */ + +import { useCallback, useEffect, useMemo, useRef, useState } from 'react' + +import { listDocuments, type Document } from '@/entities/document' +import { encryptTagForApi } from '@/entities/tag' + +import { fetchWorkspaceKek } from '@/features/security' + +import { fetchDecryptedContent } from '../lib/fetch-decrypted-content' +import type { TitleDocument, WorkerRequest, WorkerResponse } from '../workers/search.worker' + +// Re-export document hit type for SearchDialog compatibility +export type DocumentHit = Pick + +export interface UseClientSearchParams { + open: boolean + query: string | null + tag: string | null + workspaceId: string | null +} + +export type SearchState = + | { status: 'idle' } + | { status: 'initializing' } + | { status: 'titles_ready'; indexed: number; total: number } + | { status: 'indexing_content'; indexed: number; total: number } + | { status: 'ready' } + | { status: 'limited'; documentCount: number } + +export interface UseClientSearchResult { + docs: DocumentHit[] + loading: boolean + searchState: SearchState +} + +// Max documents to index for content (memory limit) +const MAX_DOCUMENTS_FOR_CONTENT = 1000 + +// Debounce delay for search (ms) +const SEARCH_DEBOUNCE_MS = 160 + +export function useClientSearch(params: UseClientSearchParams): UseClientSearchResult { + const { open, query, tag, workspaceId } = params + + // All documents from API + const [allDocs, setAllDocs] = useState([]) + + // Search result IDs (null = no search, show all) + const [searchResultIds, setSearchResultIds] = useState(null) + + // Loading state + const [loading, setLoading] = useState(false) + + // Search/index state + const [searchState, setSearchState] = useState({ status: 'idle' }) + + // Worker reference + const workerRef = useRef(null) + + // Track content indexing progress + const contentIndexedCountRef = useRef(0) + + // Debounced query + const [debouncedQuery, setDebouncedQuery] = useState(null) + + // Debounce query changes + useEffect(() => { + const timer = setTimeout(() => { + setDebouncedQuery(query) + }, SEARCH_DEBOUNCE_MS) + + return () => clearTimeout(timer) + }, [query]) + + // Initialize worker + useEffect(() => { + workerRef.current = new Worker(new URL('../workers/search.worker.ts', import.meta.url), { + type: 'module', + }) + + workerRef.current.onmessage = (event: MessageEvent) => { + const { type } = event.data + + switch (type) { + case 'indexed-titles': { + const { count } = event.data as { type: 'indexed-titles'; count: number } + setSearchState({ status: 'titles_ready', indexed: count, total: count }) + break + } + + case 'indexed-content': { + contentIndexedCountRef.current++ + // Update state periodically (every 10 documents) + if (contentIndexedCountRef.current % 10 === 0) { + setSearchState((prev) => { + if (prev.status === 'indexing_content') { + return { ...prev, indexed: contentIndexedCountRef.current } + } + return prev + }) + } + break + } + + case 'search-result': { + const { ids } = event.data as { type: 'search-result'; ids: string[] } + setSearchResultIds(ids) + setLoading(false) + break + } + + case 'ready': { + setSearchState({ status: 'ready' }) + break + } + } + } + + return () => { + workerRef.current?.terminate() + workerRef.current = null + } + }, []) + + // Index content in background using requestIdleCallback + const indexContentInBackground = useCallback( + async (docs: DocumentHit[], wsId: string) => { + const docsToIndex = docs.slice(0, MAX_DOCUMENTS_FOR_CONTENT) + + if (docsToIndex.length < docs.length) { + setSearchState({ status: 'limited', documentCount: docsToIndex.length }) + } else { + setSearchState({ status: 'indexing_content', indexed: 0, total: docsToIndex.length }) + } + + contentIndexedCountRef.current = 0 + + for (const doc of docsToIndex) { + // Use requestIdleCallback for non-blocking indexing + await new Promise((resolve) => { + if ('requestIdleCallback' in window) { + requestIdleCallback(() => resolve(), { timeout: 5000 }) + } else { + setTimeout(resolve, 0) + } + }) + + try { + const content = await fetchDecryptedContent(doc.id, wsId) + + workerRef.current?.postMessage({ + type: 'index-content', + payload: { id: doc.id, content }, + } satisfies WorkerRequest) + } catch { + // Skip documents that fail to fetch + } + } + + // Mark as ready when done + if (docsToIndex.length === docs.length) { + setSearchState({ status: 'ready' }) + } + }, + [] + ) + + // Phase 1: Load documents and index titles when dialog opens + useEffect(() => { + if (!open || !workspaceId) { + setSearchState({ status: 'idle' }) + setSearchResultIds(null) + setAllDocs([]) + return + } + + let cancelled = false + setLoading(true) + setSearchState({ status: 'initializing' }) + + ;(async () => { + try { + // Encrypt tag before sending to API (Phase 14 E2EE) + let encryptedTag: string | null = null + if (tag) { + try { + const kek = await fetchWorkspaceKek(workspaceId) + encryptedTag = await encryptTagForApi(tag, kek) + } catch (err) { + console.warn('[useClientSearch] Failed to encrypt tag:', err) + // If encryption fails (e.g., session locked), proceed without tag filter + } + } + + const res = await listDocuments({ tag: encryptedTag }) + if (cancelled) return + + const items = ((res?.items ?? []) as DocumentHit[]).filter((item) => item.type === 'document') + setAllDocs(items) + + // Clear previous index and add new titles + workerRef.current?.postMessage({ type: 'clear' } satisfies WorkerRequest) + + // Small delay to ensure clear is processed + await new Promise((resolve) => setTimeout(resolve, 10)) + + // Index titles + const titleDocs: TitleDocument[] = items.map((d) => ({ + id: d.id, + title: d.title, + path: d.path ?? '', + })) + + workerRef.current?.postMessage({ + type: 'index-titles', + payload: { documents: titleDocs }, + } satisfies WorkerRequest) + + setLoading(false) + + // Phase 2-3: Index content in background + indexContentInBackground(items, workspaceId) + } catch { + if (!cancelled) { + setAllDocs([]) + setLoading(false) + setSearchState({ status: 'ready' }) + } + } + })() + + return () => { + cancelled = true + } + }, [open, tag, workspaceId, indexContentInBackground]) + + // Execute search when debounced query changes + useEffect(() => { + if (!open) return + + if (!debouncedQuery || debouncedQuery.trim().length === 0) { + // No query - show all documents + setSearchResultIds(null) + return + } + + setLoading(true) + workerRef.current?.postMessage({ + type: 'search', + payload: { query: debouncedQuery }, + } satisfies WorkerRequest) + }, [open, debouncedQuery]) + + // Filter documents based on search results + const docs = useMemo(() => { + if (searchResultIds === null) { + // No search query - return all documents + return allDocs + } + + // Filter by search results and maintain order + const idSet = new Set(searchResultIds) + const filtered = allDocs.filter((doc) => idSet.has(doc.id)) + + // Sort by search result order + return filtered.sort((a, b) => { + const aIndex = searchResultIds.indexOf(a.id) + const bIndex = searchResultIds.indexOf(b.id) + return aIndex - bIndex + }) + }, [allDocs, searchResultIds]) + + return { docs, loading, searchState } +} diff --git a/app/src/features/search/lib/fetch-decrypted-content.ts b/app/src/features/search/lib/fetch-decrypted-content.ts new file mode 100644 index 00000000..b4633cfb --- /dev/null +++ b/app/src/features/search/lib/fetch-decrypted-content.ts @@ -0,0 +1,93 @@ +/** + * Fetch and decrypt document content for search indexing + * + * All documents use end-to-end encryption: + * 1. Fetches encrypted snapshot and pending updates from API + * 2. Decrypts using document DEK (Data Encryption Key) + * 3. Reconstructs Yjs document and extracts plain text + */ + +import * as Y from 'yjs' + +import { + getDocumentContent, + type EncryptedUpdateEntry, +} from '@/shared/api/client' + +import { + decrypt, + fetchDocumentKeys, + getSodium, + SessionLockedError, +} from '@/features/security' + +/** + * Fetch document content and return plain text. + * + * All documents are encrypted. This function: + * 1. Fetches encrypted snapshot and updates from API + * 2. Decrypts using document DEK (Data Encryption Key) + * 3. Reconstructs Yjs document and extracts text + * + * @param documentId - Document ID + * @param workspaceId - Workspace ID (for KEK lookup) + * @returns Plain text content or empty string on failure + */ +export async function fetchDecryptedContent( + documentId: string, + workspaceId: string +): Promise { + try { + // 1. Fetch content from API + const contentRes = await getDocumentContent({ id: documentId }) + + // Check if we have any content to work with + const hasSnapshot = contentRes.content && contentRes.content.length > 0 + const hasUpdates = contentRes.updates && contentRes.updates.length > 0 + + if (!hasSnapshot && !hasUpdates) { + return '' + } + + const sodium = await getSodium() + const doc = new Y.Doc() + + // 2. Get decryption keys (throws SessionLockedError if locked) + let dek: Uint8Array + try { + const keys = await fetchDocumentKeys(documentId, workspaceId) + dek = keys.dek + } catch (err) { + if (err instanceof SessionLockedError) { + doc.destroy() + return '' + } + throw err + } + + // 3. Apply snapshot if present + if (hasSnapshot) { + const encryptedContent = sodium.from_base64(contentRes.content, sodium.base64_variants.ORIGINAL) + const nonce = sodium.from_base64(contentRes.nonce!, sodium.base64_variants.ORIGINAL) + const yjsState = await decrypt(dek, encryptedContent, nonce) + Y.applyUpdateV2(doc, yjsState) + } + + // 4. Apply pending updates + if (hasUpdates) { + for (const update of contentRes.updates as EncryptedUpdateEntry[]) { + const encryptedData = sodium.from_base64(update.data, sodium.base64_variants.ORIGINAL) + const nonce = sodium.from_base64(update.nonce!, sodium.base64_variants.ORIGINAL) + const yjsUpdate = await decrypt(dek, encryptedData, nonce) + Y.applyUpdateV2(doc, yjsUpdate) + } + } + + const text = doc.getText('content').toString() + doc.destroy() + + return text + } catch { + return '' + } +} diff --git a/app/src/features/search/lib/tokenizer.ts b/app/src/features/search/lib/tokenizer.ts new file mode 100644 index 00000000..f56caaa4 --- /dev/null +++ b/app/src/features/search/lib/tokenizer.ts @@ -0,0 +1,32 @@ +/** + * Japanese + English tokenizer for search indexing + */ + +// TinySegmenter doesn't have type definitions, so we need to declare the module +// @ts-expect-error - tiny-segmenter doesn't have type definitions +import TinySegmenter from 'tiny-segmenter' + +const segmenter = new TinySegmenter() + +/** + * Tokenize text for both Japanese and English content. + * - Japanese: Uses TinySegmenter for morphological analysis + * - English: Simple whitespace split with lowercasing + */ +export function tokenize(text: string): string[] { + if (!text || typeof text !== 'string') { + return [] + } + + // Japanese tokenization via TinySegmenter + const japaneseTokens: string[] = segmenter.segment(text) + + // English tokenization (simple whitespace split) + const englishTokens = text + .toLowerCase() + .split(/\s+/) + .filter((token) => token.length > 0) + + // Combine and deduplicate + return [...new Set([...japaneseTokens, ...englishTokens])] +} diff --git a/app/src/features/search/ui/SearchDialog.tsx b/app/src/features/search/ui/SearchDialog.tsx index 8c8b8d9a..faa9ba22 100644 --- a/app/src/features/search/ui/SearchDialog.tsx +++ b/app/src/features/search/ui/SearchDialog.tsx @@ -18,8 +18,13 @@ import { } from '@/shared/ui/command' import { Dialog, DialogContent } from '@/shared/ui/dialog' -import { fetchDocumentContent, listDocuments, type Document } from '@/entities/document' -import { listTags } from '@/entities/tag' +import { listDecryptedTags } from '@/entities/tag' + +import { useAuthContext } from '@/features/auth/model/auth-context' +import { fetchWorkspaceKek } from '@/features/security' + +import { useClientSearch, type DocumentHit } from '../hooks/useClientSearch' +import { fetchDecryptedContent } from '../lib/fetch-decrypted-content' const trimLeadingOwner = (segments: string[]) => { if (segments.length <= 1) return segments @@ -49,16 +54,14 @@ const normalizePathValue = (path?: string | null) => { type Props = { open: boolean; onOpenChange: (open: boolean) => void; presetTag?: string | null } type TagHit = { name: string; count: number } -type DocumentHit = Pick export default function SearchDialog({ open, onOpenChange, presetTag }: Props) { const navigate = useNavigate() const isMobile = useIsMobile() + const { activeWorkspaceId } = useAuthContext() const [query, setQuery] = React.useState('') - const [docs, setDocs] = React.useState([]) const [tags, setTags] = React.useState([]) const [selectedTag, setSelectedTag] = React.useState(null) - const [loading, setLoading] = React.useState(false) const [activeItem, setActiveItem] = React.useState(null) const [previewContent, setPreviewContent] = React.useState('') const [previewError, setPreviewError] = React.useState(null) @@ -75,6 +78,8 @@ export default function SearchDialog({ open, onOpenChange, presetTag }: Props) { const docQuery = isTagShortcut ? '' : docQueryInput.trim() const serverQuery = !hasPathShortcut && !isTagShortcut ? (docQuery || null) : null + const { docs, loading } = useClientSearch({ open, query: serverQuery, tag: selectedTag, workspaceId: activeWorkspaceId }) + const lastDocValueRef = React.useRef(null) const previewCache = React.useRef>(new Map()) @@ -108,7 +113,6 @@ export default function SearchDialog({ open, onOpenChange, presetTag }: Props) { if (!open) { setQuery('') setSelectedTag(null) - setDocs([]) setTags([]) setActiveItem(null) setPreviewContent('') @@ -119,11 +123,24 @@ export default function SearchDialog({ open, onOpenChange, presetTag }: Props) { return } + if (!activeWorkspaceId) { + setTags([]) + return + } + let cancelled = false ;(async () => { try { - const res = (await listTags(undefined)) as TagHit[] - if (!cancelled) setTags(res ?? []) + // Fetch KEK and use decrypted tags API for E2EE + const kek = await fetchWorkspaceKek(activeWorkspaceId) + const decryptedTags = await listDecryptedTags(kek) + if (!cancelled) { + const mapped = decryptedTags.map((tag) => ({ + name: tag.name, + count: tag.documentCount, + })) + setTags(mapped) + } } catch { if (!cancelled) setTags([]) } @@ -131,7 +148,7 @@ export default function SearchDialog({ open, onOpenChange, presetTag }: Props) { return () => { cancelled = true } - }, [open]) + }, [open, activeWorkspaceId]) React.useEffect(() => { if (open) { @@ -139,31 +156,6 @@ export default function SearchDialog({ open, onOpenChange, presetTag }: Props) { } }, [open, presetTag]) - React.useEffect(() => { - if (!open) return - let active = true - const handle = setTimeout(async () => { - setLoading(true) - try { - const res = await listDocuments({ - query: serverQuery, - tag: selectedTag ?? null, - }) - const items = (res?.items ?? []) as DocumentHit[] - const onlyDocuments = items.filter((item) => item.type === 'document') - if (active) setDocs(onlyDocuments) - } catch { - if (active) setDocs([]) - } finally { - if (active) setLoading(false) - } - }, 160) - return () => { - active = false - clearTimeout(handle) - } - }, [open, selectedTag, serverQuery]) - const folderIndex = React.useMemo(() => { const index = new Map>() docs.forEach((doc) => { @@ -338,6 +330,13 @@ export default function SearchDialog({ open, onOpenChange, presetTag }: Props) { return } + if (!activeWorkspaceId) { + setPreviewContent('') + setPreviewError(null) + setPreviewLoading(false) + return + } + const cached = previewCache.current.get(activeDocId) if (cached !== undefined) { setPreviewContent(cached) @@ -352,12 +351,8 @@ export default function SearchDialog({ open, onOpenChange, presetTag }: Props) { ;(async () => { try { - const res = await fetchDocumentContent(activeDocId) + const content = await fetchDecryptedContent(activeDocId, activeWorkspaceId) if (cancelled) return - const content = - typeof res === 'object' && res !== null && 'content' in (res as any) - ? ((res as any).content as string) ?? '' - : '' previewCache.current.set(activeDocId, content) setPreviewContent(content) setPreviewError(null) @@ -374,7 +369,7 @@ export default function SearchDialog({ open, onOpenChange, presetTag }: Props) { return () => { cancelled = true } - }, [activeDocId, open]) + }, [activeDocId, activeWorkspaceId, open]) const filteredTags = React.useMemo(() => { if (!tags || tags.length === 0) return [] diff --git a/app/src/features/search/workers/search.worker.ts b/app/src/features/search/workers/search.worker.ts new file mode 100644 index 00000000..80e05372 --- /dev/null +++ b/app/src/features/search/workers/search.worker.ts @@ -0,0 +1,132 @@ +/** + * Search Worker - MiniSearch with staged indexing + */ + +import MiniSearch from 'minisearch' + +import { tokenize } from '../lib/tokenizer' + +// Types +export interface TitleDocument { + id: string + title: string + path: string +} + +export type WorkerRequest = + | { type: 'index-titles'; payload: { documents: TitleDocument[] } } + | { type: 'index-content'; payload: { id: string; content: string } } + | { type: 'search'; payload: { query: string } } + | { type: 'clear' } + +export type WorkerResponse = + | { type: 'indexed-titles'; count: number } + | { type: 'indexed-content'; id: string } + | { type: 'search-result'; ids: string[] } + | { type: 'ready' } + +// Internal document structure +interface IndexedDocument { + id: string + title: string + path: string + content: string +} + +// Store document metadata for later content updates +const documentMeta = new Map() + +// MiniSearch instance +const index = new MiniSearch({ + fields: ['title', 'content'], + storeFields: ['title', 'path'], + tokenize, + searchOptions: { + boost: { title: 2 }, + fuzzy: 0.2, + prefix: true, + }, +}) + +// Max content length per document (50,000 chars) +const MAX_CONTENT_LENGTH = 50000 + +self.onmessage = (event: MessageEvent) => { + const { type } = event.data + + switch (type) { + case 'index-titles': { + const { documents } = event.data.payload as { documents: TitleDocument[] } + + // Phase 1: Index titles only (content is empty string) + for (const doc of documents) { + // Store metadata for later content updates + documentMeta.set(doc.id, { title: doc.title, path: doc.path }) + + // Add to index with empty content + index.add({ + id: doc.id, + title: doc.title, + path: doc.path, + content: '', + }) + } + + self.postMessage({ type: 'indexed-titles', count: documents.length } satisfies WorkerResponse) + break + } + + case 'index-content': { + const { id, content } = event.data.payload as { id: string; content: string } + + const meta = documentMeta.get(id) + if (!meta) { + // Document not found, skip + self.postMessage({ type: 'indexed-content', id } satisfies WorkerResponse) + break + } + + // Remove existing entry and re-add with content + try { + index.discard(id) + } catch { + // Document might not exist, ignore + } + + // Truncate content to max length + const truncatedContent = content.slice(0, MAX_CONTENT_LENGTH) + + index.add({ + id, + title: meta.title, + path: meta.path, + content: truncatedContent, + }) + + self.postMessage({ type: 'indexed-content', id } satisfies WorkerResponse) + break + } + + case 'search': { + const { query } = event.data.payload as { query: string } + + if (!query || query.trim().length === 0) { + self.postMessage({ type: 'search-result', ids: [] } satisfies WorkerResponse) + break + } + + const results = index.search(query) + const ids = results.slice(0, 100).map((r) => r.id) + + self.postMessage({ type: 'search-result', ids } satisfies WorkerResponse) + break + } + + case 'clear': { + index.removeAll() + documentMeta.clear() + self.postMessage({ type: 'ready' } satisfies WorkerResponse) + break + } + } +} diff --git a/app/src/features/security/context/key-vault-context.tsx b/app/src/features/security/context/key-vault-context.tsx new file mode 100644 index 00000000..8cbdec4f --- /dev/null +++ b/app/src/features/security/context/key-vault-context.tsx @@ -0,0 +1,296 @@ +import React, { createContext, useCallback, useContext, useEffect, useMemo, useRef, useState } from 'react' + +import { useAuthContext } from '@/features/auth' + +import { useSecurityStatus } from '../hooks/useSecurityStatus' +import { useServerBackup, type ServerBackup } from '../hooks/useServerBackup' +import { getKeyManager, type E2EESetupResult } from '../lib/keys' + +/** Result type from setup operation (re-exported for convenience) */ +export type SetupResult = E2EESetupResult + +export interface KeyVaultState { + /** Whether KeyManager is initialized */ + isInitialized: boolean + /** Whether security setup has been completed on the server */ + isSetupComplete: boolean + /** Whether the session is unlocked (keys are in memory) */ + isUnlocked: boolean + /** Whether local keys exist in IndexedDB (null = not yet checked) */ + hasLocalKeys: boolean | null + /** Whether data is being loaded */ + loading: boolean + /** Current error message */ + error: string | null + /** Whether the user needs to complete security setup */ + needsSetup: boolean + /** Whether the user needs to migrate existing data */ + needsMigration: boolean + /** Whether keys need to be restored from server (new device) */ + needsRestore: boolean + /** Server backup data (if available) */ + serverBackup: ServerBackup | null + /** Unlock the session with a passphrase */ + unlock: (passphrase: string) => Promise + /** Unlock the session with a recovery key */ + unlockWithRecovery: (mnemonic: string) => Promise + /** Restore keys from server with passphrase */ + restoreFromServer: (passphrase: string) => Promise + /** Restore keys from server with recovery key */ + restoreFromServerWithRecoveryKey: (recoveryKey: string) => Promise + /** Lock the session (keeps stored UMK) */ + lock: () => void + /** Logout - clears keys from memory AND storage */ + logout: () => Promise + /** Set up encryption for a new user */ + setup: (passphrase: string) => Promise + /** Clear error state */ + clearError: () => void +} + +const KeyVaultContext = createContext(null) + +export function KeyVaultProvider({ children }: { children: React.ReactNode }) { + const { user, loading: authLoading, rememberMe } = useAuthContext() + const { data: securityStatus, isLoading: statusLoading } = useSecurityStatus({ enabled: !!user }) + const { data: serverBackup, isLoading: backupLoading } = useServerBackup({ enabled: !!user }) + + // Use ref to always get the latest rememberMe value in callbacks + // This avoids stale closure issues where callbacks capture old values + const rememberMeRef = useRef(rememberMe) + useEffect(() => { + rememberMeRef.current = rememberMe + }, [rememberMe]) + + const [isInitialized, setIsInitialized] = useState(false) + const [isUnlocked, setIsUnlocked] = useState(false) + const [hasLocalKeys, setHasLocalKeys] = useState(null) + const [loading, setLoading] = useState(false) + const [error, setError] = useState(null) + + // Initialize KeyManager when user is authenticated + useEffect(() => { + if (!user) { + setIsInitialized(false) + setIsUnlocked(false) + setHasLocalKeys(null) + return + } + + const km = getKeyManager() + km.initialize().then(async () => { + setIsInitialized(true) + setIsUnlocked(km.isUnlocked) + // Check if local keys exist + const hasKeys = await km.hasKeys() + setHasLocalKeys(hasKeys) + }) + }, [user]) + + // Lock when user logs out (not during auth loading) + useEffect(() => { + // Only lock if auth is done loading AND user is definitely logged out + if (!authLoading && !user && isUnlocked) { + const km = getKeyManager() + km.lock() + setIsUnlocked(false) + } + }, [authLoading, user, isUnlocked]) + + // Subscribe to unlock state changes + useEffect(() => { + if (!isInitialized) return + + const km = getKeyManager() + return km.onUnlockChange(() => { + setIsUnlocked(km.isUnlocked) + }) + }, [isInitialized]) + + const unlock = useCallback(async (passphrase: string) => { + // Use ref to get the latest rememberMe value (avoids stale closure) + const shouldRemember = rememberMeRef.current === true + setLoading(true) + setError(null) + try { + const km = getKeyManager() + await km.unlockWithPassphrase(passphrase, { rememberMe: shouldRemember }) + setIsUnlocked(true) + } catch (err) { + const message = err instanceof Error ? err.message : 'Failed to unlock' + setError(message) + throw err + } finally { + setLoading(false) + } + }, []) + + const unlockWithRecovery = useCallback(async (mnemonic: string) => { + const shouldRemember = rememberMeRef.current === true + setLoading(true) + setError(null) + try { + const km = getKeyManager() + await km.unlockWithRecoveryKey(mnemonic, { rememberMe: shouldRemember }) + setIsUnlocked(true) + } catch (err) { + const message = err instanceof Error ? err.message : 'Recovery failed' + setError(message) + throw err + } finally { + setLoading(false) + } + }, []) + + const restoreFromServer = useCallback(async (passphrase: string) => { + if (!serverBackup?.hasBackup || !serverBackup.encryptedKeysBundle || !serverBackup.salt || !serverBackup.kdfType) { + throw new Error('No server backup available') + } + + const shouldRemember = rememberMeRef.current === true + setLoading(true) + setError(null) + try { + const km = getKeyManager() + await km.restoreFromServer(passphrase, { + encryptedKeysBundle: serverBackup.encryptedKeysBundle, + salt: serverBackup.salt, + kdfType: serverBackup.kdfType, + kdfParams: serverBackup.kdfParams ?? {}, + }, { rememberMe: shouldRemember }) + setIsUnlocked(true) + setHasLocalKeys(true) + } catch (err) { + const message = err instanceof Error ? err.message : 'Restore failed' + setError(message) + throw err + } finally { + setLoading(false) + } + }, [serverBackup]) + + const restoreFromServerWithRecoveryKey = useCallback(async (recoveryKey: string) => { + if (!serverBackup?.hasBackup || !serverBackup.encryptedKeysBundle || !serverBackup.salt || !serverBackup.kdfType) { + throw new Error('No server backup available') + } + + const shouldRemember = rememberMeRef.current === true + setLoading(true) + setError(null) + try { + const km = getKeyManager() + await km.restoreFromServerWithRecoveryKey(recoveryKey, { + encryptedKeysBundle: serverBackup.encryptedKeysBundle, + salt: serverBackup.salt, + kdfType: serverBackup.kdfType, + kdfParams: serverBackup.kdfParams ?? {}, + }, { rememberMe: shouldRemember }) + setIsUnlocked(true) + setHasLocalKeys(true) + } catch (err) { + const message = err instanceof Error ? err.message : 'Restore failed' + setError(message) + throw err + } finally { + setLoading(false) + } + }, [serverBackup]) + + const lock = useCallback(() => { + const km = getKeyManager() + km.lock() + setIsUnlocked(false) + }, []) + + const logout = useCallback(async () => { + const km = getKeyManager() + await km.logout() + setIsUnlocked(false) + setHasLocalKeys(null) // Reset local keys state + }, []) + + const setup = useCallback(async (passphrase: string): Promise => { + const shouldRemember = rememberMeRef.current === true + setLoading(true) + setError(null) + try { + const km = getKeyManager() + const result = await km.setupE2EE(passphrase, { rememberMe: shouldRemember }) + setIsUnlocked(true) + setHasLocalKeys(true) // Mark that local keys now exist + return result + } catch (err) { + const message = err instanceof Error ? err.message : 'Setup failed' + setError(message) + throw err + } finally { + setLoading(false) + } + }, []) + + const clearError = useCallback(() => { + setError(null) + }, []) + + // Determine if restore from server is needed + // needsRestore = server has setup complete + server has backup + local has no keys + const needsRestore = !!( + securityStatus?.isSetupComplete && + serverBackup?.hasBackup && + hasLocalKeys === false + ) + + const value = useMemo( + () => ({ + isInitialized, + isSetupComplete: securityStatus?.isSetupComplete ?? false, + isUnlocked, + hasLocalKeys, + loading: loading || statusLoading || backupLoading, + error, + needsSetup: securityStatus ? !securityStatus.isSetupComplete : false, + needsMigration: securityStatus?.needsMigration ?? false, + needsRestore, + serverBackup: serverBackup ?? null, + unlock, + unlockWithRecovery, + restoreFromServer, + restoreFromServerWithRecoveryKey, + lock, + logout, + setup, + clearError, + }), + [ + isInitialized, + securityStatus, + isUnlocked, + hasLocalKeys, + loading, + statusLoading, + backupLoading, + error, + needsRestore, + serverBackup, + unlock, + unlockWithRecovery, + restoreFromServer, + restoreFromServerWithRecoveryKey, + lock, + logout, + setup, + clearError, + ] + ) + + return {children} +} + +export function useKeyVault(): KeyVaultState { + const context = useContext(KeyVaultContext) + if (!context) { + throw new Error('useKeyVault must be used within KeyVaultProvider') + } + return context +} + diff --git a/app/src/features/security/hooks/index.ts b/app/src/features/security/hooks/index.ts new file mode 100644 index 00000000..c09c84ba --- /dev/null +++ b/app/src/features/security/hooks/index.ts @@ -0,0 +1,5 @@ +export { useSecurityStatus, useNeedsSecuritySetup } from './useSecurityStatus' +export { useKeyManager } from './useKeyManager' +export { useServerBackup, type ServerBackup } from './useServerBackup' +export { useNetworkStatus, type NetworkStatus } from './useNetworkStatus' +export { useOfflineQueue, type UseOfflineQueueOptions, type UseOfflineQueueResult } from './useOfflineQueue' diff --git a/app/src/features/security/hooks/useAttachmentContext.ts b/app/src/features/security/hooks/useAttachmentContext.ts new file mode 100644 index 00000000..b2d6f296 --- /dev/null +++ b/app/src/features/security/hooks/useAttachmentContext.ts @@ -0,0 +1,154 @@ +/** + * Hook to set up decryption context for attachments + * + * This hook registers the decryption context (DEK, token) for a document + * so that the attachment Web Component can decrypt files. + */ + +import { useLayoutEffect, useEffect, useRef } from 'react' + +import { + setDecryptionContext, + clearDecryptionContext, + setDefaultDecryptionContext, + initFileMap, + clearFileMap, +} from '@/entities/file' +import { validateShareToken } from '@/entities/share' + +import { useShareContextOptional } from '@/features/sharing' + +import { getSodium } from '../lib/crypto' +import { fetchDocumentKeys } from '../lib/key-helpers' +import { extractShareKeyFromFragment, decryptDekWithShareKey } from '../lib/keys' + +export interface UseAttachmentContextOptions { + /** Document ID */ + documentId?: string + /** Workspace ID for key access */ + workspaceId?: string | null + /** Share token for authentication */ + token?: string + /** Whether to set as default context (for all documents) */ + setAsDefault?: boolean +} + +/** + * Set up decryption context for attachments + * + * When a documentId and workspaceId are provided, this hook fetches the DEK + * and registers the context so that attachments can be decrypted when downloaded + * or previewed. + */ +export function useAttachmentContext(options: UseAttachmentContextOptions): void { + const { documentId, workspaceId, token, setAsDefault } = options + const initStartedRef = useRef(false) + + // Try to get share context (available when navigating from folder share page) + const shareCtx = useShareContextOptional() + + // Initialize context and file map + useEffect(() => { + if (!documentId) return + + let cancelled = false + initStartedRef.current = true + + ;(async () => { + try { + let dek: Uint8Array | null = null + + if (token) { + // Shared documents: try ShareContext first, then fall back to URL fragment + let shareKey = shareCtx?.shareKey ?? null + let encryptedDekBase64: string | null = null + + // Try to get encrypted DEK from ShareContext (folder share navigation) + if (shareCtx?.encryptedDeks && documentId) { + encryptedDekBase64 = shareCtx.encryptedDeks.get(documentId) ?? null + } + + // Fallback: extract share key from URL fragment (direct document share links) + if (!shareKey) { + const fragment = typeof window !== 'undefined' ? window.location.hash : '' + shareKey = fragment ? await extractShareKeyFromFragment(fragment) : null + } + + // Fallback: fetch encrypted DEK from API if not in context + if (!encryptedDekBase64 && shareKey) { + const shareInfo = await validateShareToken(token) + encryptedDekBase64 = shareInfo?.encryptedDek ?? null + } + + if (shareKey && encryptedDekBase64) { + // Decrypt DEK using share key + // The encrypted_dek from API has nonce prepended (24 bytes for XChaCha20) + const sodium = await getSodium() + const combined = sodium.from_base64(encryptedDekBase64, sodium.base64_variants.ORIGINAL) + const NONCE_LENGTH = 24 + if (combined.length > NONCE_LENGTH) { + const nonce = combined.slice(0, NONCE_LENGTH) + const ciphertext = combined.slice(NONCE_LENGTH) + const nonceBase64 = sodium.to_base64(nonce, sodium.base64_variants.ORIGINAL) + const ciphertextBase64 = sodium.to_base64(ciphertext, sodium.base64_variants.ORIGINAL) + dek = await decryptDekWithShareKey(ciphertextBase64, nonceBase64, shareKey) + } + } + } else if (workspaceId) { + // Regular documents: fetch DEK via workspace KEK hierarchy + const result = await fetchDocumentKeys(documentId, workspaceId) + dek = result.dek + } + + if (cancelled) return + + if (!dek) { + console.warn('[useAttachmentContext] No DEK available') + return + } + + // Set context with DEK + const context = { dek, token } + if (setAsDefault) { + setDefaultDecryptionContext(context) + } else { + setDecryptionContext(documentId, context) + } + + // Initialize file map with DEK (pass token for share access) + initFileMap(documentId, dek, token).catch(() => { + // Errors handled by waitForFileMap callers + }) + } catch (err) { + console.warn('[useAttachmentContext] Failed to fetch DEK:', err) + } + })() + + return () => { + cancelled = true + } + }, [documentId, workspaceId, token, setAsDefault, shareCtx]) + + // Cleanup on unmount or when dependencies change + useLayoutEffect(() => { + // Skip cleanup setup if neither workspaceId nor token is present + if (!workspaceId && !token) return + + return () => { + if (setAsDefault) { + setDefaultDecryptionContext(null) + } else if (documentId) { + clearDecryptionContext(documentId) + } + } + }, [documentId, workspaceId, token, setAsDefault]) + + // Cleanup file map on unmount + useEffect(() => { + return () => { + if (documentId) { + clearFileMap(documentId) + } + } + }, [documentId, workspaceId]) +} diff --git a/app/src/features/security/hooks/useKeyManager.ts b/app/src/features/security/hooks/useKeyManager.ts new file mode 100644 index 00000000..c8ce9ef9 --- /dev/null +++ b/app/src/features/security/hooks/useKeyManager.ts @@ -0,0 +1,121 @@ +import { useCallback, useEffect, useState, useSyncExternalStore } from 'react' + +import { getKeyManager } from '@/features/security' + +// External store for KeyManager state +let listeners: Set<() => void> = new Set() +let snapshot = { + isInitialized: false, + isUnlocked: false, +} + +function subscribe(listener: () => void): () => void { + listeners.add(listener) + return () => listeners.delete(listener) +} + +function getSnapshot() { + return snapshot +} + +function updateSnapshot() { + const km = getKeyManager() + const newSnapshot = { + isInitialized: km.isInitialized, + isUnlocked: km.isUnlocked, + } + if ( + newSnapshot.isInitialized !== snapshot.isInitialized || + newSnapshot.isUnlocked !== snapshot.isUnlocked + ) { + snapshot = newSnapshot + listeners.forEach((listener) => listener()) + } +} + +/** + * Low-level hook for KeyManager operations that do NOT require rememberMe. + * + * For operations that require rememberMe (unlock, setup, restore), use useKeyVault instead. + * This hook is only for: + * - changePassphrase: Change the user's passphrase + * - verifyPassphrase: Verify if a passphrase is correct + * - hasKeys: Check if keys exist + * - lock: Lock the session (clear keys from memory only) + */ +export function useKeyManager() { + const { isInitialized, isUnlocked } = useSyncExternalStore(subscribe, getSnapshot, getSnapshot) + const [error, setError] = useState(null) + const [loading, setLoading] = useState(false) + + // Initialize KeyManager on mount + useEffect(() => { + const km = getKeyManager() + if (!km.isInitialized) { + km.initialize().then(() => { + updateSnapshot() + }) + } + }, []) + + /** + * Lock the session (clears keys from memory only, not storage). + */ + const lock = useCallback(() => { + const km = getKeyManager() + km.lock() + updateSnapshot() + }, []) + + /** + * Change the passphrase. + * @returns New recovery key + */ + const changePassphrase = useCallback(async (newPassphrase: string): Promise => { + setLoading(true) + setError(null) + try { + const km = getKeyManager() + const recoveryKey = await km.changePassphrase(newPassphrase) + return recoveryKey + } catch (err) { + const message = err instanceof Error ? err.message : 'Change failed' + setError(message) + throw err + } finally { + setLoading(false) + } + }, []) + + /** + * Verify if a passphrase is correct. + */ + const verifyPassphrase = useCallback(async (passphrase: string): Promise => { + try { + const km = getKeyManager() + return await km.verifyPassphrase(passphrase) + } catch { + return false + } + }, []) + + /** + * Check if the user has stored keys. + */ + const hasKeys = useCallback(async (): Promise => { + const km = getKeyManager() + return km.hasKeys() + }, []) + + return { + isInitialized, + isUnlocked, + loading, + error, + lock, + changePassphrase, + verifyPassphrase, + hasKeys, + clearError: () => setError(null), + } +} diff --git a/app/src/features/security/hooks/useNetworkStatus.ts b/app/src/features/security/hooks/useNetworkStatus.ts new file mode 100644 index 00000000..2800a99c --- /dev/null +++ b/app/src/features/security/hooks/useNetworkStatus.ts @@ -0,0 +1,68 @@ +/** + * Network Status Hook + * + * Provides reactive network status monitoring. + */ + +import { useState, useEffect, useCallback } from 'react' + +export interface NetworkStatus { + /** Whether the browser is online */ + isOnline: boolean + /** Timestamp of last status change */ + lastChanged: number | null +} + +/** + * Hook to monitor network connectivity status + * + * @returns Network status object + * + * @example + * ```tsx + * function MyComponent() { + * const { isOnline } = useNetworkStatus() + * return
{isOnline ? 'Online' : 'Offline'}
+ * } + * ``` + */ +export function useNetworkStatus(): NetworkStatus { + const [status, setStatus] = useState(() => ({ + isOnline: typeof navigator !== 'undefined' ? navigator.onLine : true, + lastChanged: null, + })) + + const handleOnline = useCallback(() => { + setStatus({ + isOnline: true, + lastChanged: Date.now(), + }) + }, []) + + const handleOffline = useCallback(() => { + setStatus({ + isOnline: false, + lastChanged: Date.now(), + }) + }, []) + + useEffect(() => { + // Update initial status (handles SSR case) + if (typeof navigator !== 'undefined') { + setStatus((prev) => ({ + ...prev, + isOnline: navigator.onLine, + })) + } + + window.addEventListener('online', handleOnline) + window.addEventListener('offline', handleOffline) + + return () => { + window.removeEventListener('online', handleOnline) + window.removeEventListener('offline', handleOffline) + } + }, [handleOnline, handleOffline]) + + return status +} diff --git a/app/src/features/security/hooks/useOfflineQueue.ts b/app/src/features/security/hooks/useOfflineQueue.ts new file mode 100644 index 00000000..022ca7c1 --- /dev/null +++ b/app/src/features/security/hooks/useOfflineQueue.ts @@ -0,0 +1,193 @@ +/** + * Offline Queue Hook + * + * Provides access to the offline queue with automatic processing on reconnect. + */ + +import { useState, useEffect, useCallback, useRef } from 'react' + +import { + getOfflineQueue, + type QueuedOperation, + type OperationType, +} from '../lib/network/offline-queue' +import { withRetry } from '../lib/network/retry' + +import { useNetworkStatus } from './useNetworkStatus' + +const MAX_RETRY_COUNT = 5 + +export interface UseOfflineQueueOptions { + /** Handler to process queued operations */ + processOperation?: (operation: QueuedOperation) => Promise + /** Whether to auto-process on reconnect */ + autoProcess?: boolean +} + +export interface UseOfflineQueueResult { + /** Number of pending operations */ + pendingCount: number + /** List of pending operations */ + pendingOperations: QueuedOperation[] + /** Whether queue is being processed */ + processing: boolean + /** Add an operation to the queue */ + addToQueue: ( + type: OperationType, + payload: string, + metadata?: { documentId?: string; workspaceId?: string } + ) => Promise + /** Manually process the queue */ + processQueue: () => Promise + /** Clear all pending operations */ + clearQueue: () => Promise + /** Refresh the queue state */ + refresh: () => Promise +} + +/** + * Hook to manage the offline queue + * + * @param options - Configuration options + * @returns Queue management functions and state + * + * @example + * ```tsx + * function MyComponent() { + * const { pendingCount, processing, addToQueue } = useOfflineQueue({ + * processOperation: async (op) => { + * await sendToServer(op.payload) + * }, + * }) + * + * return
{pendingCount} pending operations
+ * } + * ``` + */ +export function useOfflineQueue( + options: UseOfflineQueueOptions = {} +): UseOfflineQueueResult { + const { processOperation, autoProcess = true } = options + + const { isOnline } = useNetworkStatus() + const [pendingOperations, setPendingOperations] = useState([]) + const [processing, setProcessing] = useState(false) + const processingRef = useRef(false) + const prevOnlineRef = useRef(isOnline) + + // Refresh queue state + const refresh = useCallback(async () => { + try { + const queue = getOfflineQueue() + const operations = await queue.getAll() + setPendingOperations(operations) + } catch (error) { + console.error('[useOfflineQueue] Failed to refresh queue:', error) + } + }, []) + + // Add to queue + const addToQueue = useCallback( + async ( + type: OperationType, + payload: string, + metadata?: { documentId?: string; workspaceId?: string } + ): Promise => { + const queue = getOfflineQueue() + const id = await queue.add({ + type, + payload, + documentId: metadata?.documentId, + workspaceId: metadata?.workspaceId, + }) + await refresh() + return id + }, + [refresh] + ) + + // Process queue + const processQueue = useCallback(async () => { + if (!processOperation || processingRef.current) return + + processingRef.current = true + setProcessing(true) + + try { + const queue = getOfflineQueue() + const operations = await queue.getAll() + + for (const operation of operations) { + // Skip operations that have exceeded retry limit + if (operation.retryCount >= MAX_RETRY_COUNT) { + console.warn( + `[useOfflineQueue] Operation ${operation.id} exceeded max retries, removing` + ) + await queue.remove(operation.id) + continue + } + + try { + await withRetry(() => processOperation(operation), { + maxRetries: 2, + onRetry: (attempt) => { + console.log( + `[useOfflineQueue] Retrying operation ${operation.id}, attempt ${attempt}` + ) + }, + }) + + // Success - remove from queue + await queue.remove(operation.id) + } catch (error) { + console.error( + `[useOfflineQueue] Failed to process operation ${operation.id}:`, + error + ) + // Increment retry count + await queue.incrementRetryCount(operation.id) + } + } + + await refresh() + } finally { + processingRef.current = false + setProcessing(false) + } + }, [processOperation, refresh]) + + // Clear queue + const clearQueue = useCallback(async () => { + const queue = getOfflineQueue() + await queue.clear() + await refresh() + }, [refresh]) + + // Initialize and refresh on mount + useEffect(() => { + const init = async () => { + const queue = getOfflineQueue() + await queue.initialize() + await refresh() + } + init() + }, [refresh]) + + // Auto-process when coming back online + useEffect(() => { + if (autoProcess && !prevOnlineRef.current && isOnline && pendingOperations.length > 0) { + processQueue() + } + prevOnlineRef.current = isOnline + }, [isOnline, autoProcess, pendingOperations.length, processQueue]) + + return { + pendingCount: pendingOperations.length, + pendingOperations, + processing, + addToQueue, + processQueue, + clearQueue, + refresh, + } +} diff --git a/app/src/features/security/hooks/useSecurityStatus.ts b/app/src/features/security/hooks/useSecurityStatus.ts new file mode 100644 index 00000000..116c7c9a --- /dev/null +++ b/app/src/features/security/hooks/useSecurityStatus.ts @@ -0,0 +1,70 @@ +import { useQuery } from '@tanstack/react-query' + +import { securityStatusQuery, needsMigrationQuery } from '@/entities/user' + +export interface SecurityStatus { + /** Whether E2EE setup has been completed */ + isSetupComplete: boolean + /** Whether data migration is needed (existing user) */ + needsMigration: boolean +} + +interface UseSecurityStatusOptions { + enabled?: boolean +} + +interface UseSecurityStatusResult { + data: SecurityStatus | undefined + isLoading: boolean + error: Error | null + refetch: () => void +} + +/** + * Hook to fetch and combine security status information. + * Combines E2EE status and migration status into a single interface. + */ +export function useSecurityStatus(options?: UseSecurityStatusOptions): UseSecurityStatusResult { + const enabled = options?.enabled ?? true + const statusQuery = useQuery({ ...securityStatusQuery(), enabled }) + const migrationQuery = useQuery({ ...needsMigrationQuery(), enabled }) + + const isLoading = statusQuery.isLoading || migrationQuery.isLoading + const error = statusQuery.error ?? migrationQuery.error + + const data: SecurityStatus | undefined = + statusQuery.data && migrationQuery.data + ? { + isSetupComplete: statusQuery.data.isSetupCompleted, + needsMigration: migrationQuery.data.needsMigration, + } + : undefined + + const refetch = () => { + statusQuery.refetch() + migrationQuery.refetch() + } + + return { + data, + isLoading, + error: error as Error | null, + refetch, + } +} + +/** + * Hook to check if security setup is required. + * Returns true if setup is not complete. + */ +export function useNeedsSecuritySetup(): { + needsSetup: boolean | undefined + isLoading: boolean +} { + const { data, isLoading } = useSecurityStatus() + + return { + needsSetup: data ? !data.isSetupComplete : undefined, + isLoading, + } +} diff --git a/app/src/features/security/hooks/useServerBackup.ts b/app/src/features/security/hooks/useServerBackup.ts new file mode 100644 index 00000000..3a5183ba --- /dev/null +++ b/app/src/features/security/hooks/useServerBackup.ts @@ -0,0 +1,147 @@ +import { useQuery } from '@tanstack/react-query' + +import { + getMasterKeyBackup, + getEncryptedPrivateKey, + type MasterKeyBackupResponse, + type EncryptedPrivateKeyResponse, +} from '@/entities/user' + +import type { EncryptedKeysBundle } from '../lib/keys' + +export interface ServerBackup { + /** Whether server has backup data */ + hasBackup: boolean + /** Encrypted keys bundle (if available) */ + encryptedKeysBundle: EncryptedKeysBundle | null + /** Salt for KDF (base64) */ + salt: string | null + /** KDF type */ + kdfType: 'argon2id' | 'pbkdf2' | null + /** KDF parameters */ + kdfParams: { + memory?: number | null + iterations?: number | null + parallelism?: number | null + } | null +} + +/** + * Parse encrypted keys bundle from server response. + */ +function parseKeysBundle(response: EncryptedPrivateKeyResponse): EncryptedKeysBundle | null { + try { + // Check if it's our bundle format (nonce is base64 encoded 'bundle-v1') + const expectedNonce = btoa('bundle-v1') + if (response.nonce !== expectedNonce) { + console.warn('[useServerBackup] Unknown nonce format:', response.nonce) + return null + } + + // Decode base64 and parse JSON + const jsonStr = atob(response.encryptedPrivateKey) + const bundle = JSON.parse(jsonStr) as EncryptedKeysBundle + + // Validate required fields + if ( + !bundle.encryptedEcdhPrivateKey || + !bundle.encryptedEcdhPrivateKeyNonce || + !bundle.encryptedSigningPrivateKey || + !bundle.encryptedSigningPrivateKeyNonce || + !bundle.ecdhPublicKey || + !bundle.signingPublicKey + ) { + console.warn('[useServerBackup] Invalid bundle format') + return null + } + + return bundle + } catch (err) { + console.error('[useServerBackup] Failed to parse keys bundle:', err) + return null + } +} + +/** + * Hook to fetch server backup data for key restoration. + */ +export function useServerBackup(options?: { enabled?: boolean }) { + const masterKeyQuery = useQuery({ + queryKey: ['security', 'master-key-backup'], + queryFn: async () => { + try { + return await getMasterKeyBackup() + } catch (err) { + // 404 means no backup exists + if ((err as { status?: number }).status === 404) { + return null + } + throw err + } + }, + enabled: options?.enabled ?? true, + staleTime: 30_000, + retry: false, + }) + + const encryptedKeyQuery = useQuery({ + queryKey: ['security', 'encrypted-private-key'], + queryFn: async () => { + try { + return await getEncryptedPrivateKey() + } catch (err) { + // 404 means no backup exists + if ((err as { status?: number }).status === 404) { + return null + } + throw err + } + }, + enabled: options?.enabled ?? true, + staleTime: 30_000, + retry: false, + }) + + const isLoading = masterKeyQuery.isLoading || encryptedKeyQuery.isLoading + const error = masterKeyQuery.error ?? encryptedKeyQuery.error + + let data: ServerBackup | undefined + + if (!isLoading && masterKeyQuery.data !== undefined && encryptedKeyQuery.data !== undefined) { + const masterKey = masterKeyQuery.data as MasterKeyBackupResponse | null + const encryptedKey = encryptedKeyQuery.data as EncryptedPrivateKeyResponse | null + + if (masterKey && encryptedKey) { + const bundle = parseKeysBundle(encryptedKey) + data = { + hasBackup: bundle !== null, + encryptedKeysBundle: bundle, + salt: masterKey.salt, + kdfType: (masterKey.kdfType === 'argon2id' || masterKey.kdfType === 'pbkdf2') + ? masterKey.kdfType + : null, + kdfParams: masterKey.kdfParams, + } + } else { + data = { + hasBackup: false, + encryptedKeysBundle: null, + salt: null, + kdfType: null, + kdfParams: null, + } + } + } + + const refetch = () => { + masterKeyQuery.refetch() + encryptedKeyQuery.refetch() + } + + return { + data, + isLoading, + error: error as Error | null, + refetch, + } +} diff --git a/app/src/features/security/index.ts b/app/src/features/security/index.ts new file mode 100644 index 00000000..c2a577e0 --- /dev/null +++ b/app/src/features/security/index.ts @@ -0,0 +1,241 @@ +/** + * Security Feature Module + * + * End-to-End Encryption (KeyVault) for RefMD. + */ + +// Crypto primitives (explicit exports to avoid conflicts) +export { + // Sodium + getSodium, + isSodiumReady, + type Sodium, + // XChaCha20 + encrypt, + decrypt, + encryptDek, + decryptDek, + encryptString, + decryptString, + generateKey, + generateNonce, + NONCE_SIZE, + KEY_SIZE, + TAG_SIZE, + type EncryptResult, + // Ed25519 + sign, + verify, + signToBase64, + verifyFromBase64, + generateSigningKeyPair, + buildSigningMessage, + SIGNATURE_DOMAINS, + PUBLIC_KEY_SIZE, + PRIVATE_KEY_SIZE, + SIGNATURE_SIZE, + type Ed25519KeyPair, + type SigningMessage, + type SignatureDomain, + // Canonicalize + canonicalize, + canonicalizeAndToBase64, + fromBase64Json, + toBase64, + fromBase64, + // HKDF + hkdfDeriveKey, + hkdfGenerateMasterKey, + HKDF_CONTEXTS, + SUBKEY_IDS, + MASTER_KEY_SIZE, + MIN_KEY_SIZE, + MAX_KEY_SIZE, + // Argon2 + argon2DeriveKey, + argon2DeriveKeyWithNewSalt, + argon2GenerateSalt, + isArgon2Supported, + DEFAULT_ARGON2_PARAMS, + ARGON2_SALT_SIZE, + // Note: Argon2Params is exported from types + // PBKDF2 + pbkdf2DeriveKey, + pbkdf2DeriveKeyWithNewSalt, + pbkdf2GenerateSalt, + PBKDF2_DEFAULT_ITERATIONS, + PBKDF2_SALT_SIZE, + // ECDH + generateEcdhKeyPair, + getEcdhPublicKey, + computeSharedSecret, + deriveSharedKey, + encryptKeyForRecipient, + decryptKeyFromSender, + isValidEcdhPublicKey, + ECDH_PRIVATE_KEY_SIZE, + ECDH_PUBLIC_KEY_SIZE, + SHARED_SECRET_SIZE, + // Note: EcdhKeyPair is exported from types + // BIP39 + generateRecoveryKey, + validateRecoveryKey, + recoveryKeyToUmk, + umkToRecoveryKey, + getWordsAtIndices, + verifyWords, + generateVerificationIndices, + getWordList, + WORD_COUNT, + ENTROPY_BITS, + ENTROPY_BYTES, +} from './lib/crypto' + +// Type definitions +export * from './lib/types' + +// Key management (explicit exports to avoid conflicts) +export { + // KeyManager + KeyManager, + getKeyManager, + resetKeyManager, + SessionLockedError, + KeyNotFoundError, + type E2EESetupResult, + type EncryptedKeysBundle, + // KeyStore + KeyStore, + getKeyStore, + type StoredKeys, + // KeyCache + KeyCache, + KekCache, + DekCache, + getKekCache, + getDekCache, + clearAllCaches, + DEFAULT_KEK_CACHE_SIZE, + DEFAULT_DEK_CACHE_SIZE, + // UMK + generateUmk, + deriveUmkFromPassphrase, + restoreUmkFromRecoveryKey, + verifyPassphrase, + generateNewRecoveryKey, + zeroUmk, + UMK_SIZE, + type UmkGenerationResult, + // User Keys + generateUserKeys, + encryptUserKeys, + decryptUserKeys, + reEncryptUserKeys, + getPublicKeysBase64, + parsePublicKeysFromBase64, + zeroUserKeys, + type UserKeySet, + type EncryptedUserKeys, + // Workspace KEK + generateWorkspaceKek, + encryptKekForRecipient, + decryptKek, + decryptKekFromApiResponse, + encodeKekForApi, + getOrFetchKek, + invalidateCachedKek, + createKekForMember, + KEK_SIZE, + type EncryptedKekFromApi, + // Document DEK + generateDocumentDek, + encryptDekWithKek, + decryptDekWithKek, + decryptDekFromApiResponse, + encodeDekForApi, + createEncryptedDekForApi, + getOrFetchDek, + invalidateCachedDek, + invalidateWorkspaceDeks, + reEncryptDek, + DEK_SIZE, + type EncryptedDekFromApi, + // Share Keys + generateShareKey, + extractShareKeyFromFragment, + deriveShareKeyFromPassword, + createPasswordProtectedShareKey, + encryptDekWithShareKey, + decryptDekWithShareKey, + buildShareUrl, + parseSaltFromApi, + encodeSaltForApi, + hasShareKeyFragment, + SHARE_KEY_SIZE, + URL_FRAGMENT_PREFIX, + type EncryptedShareKeyForApi, +} from './lib/keys' + +// Hooks +export { useSecurityStatus, useNeedsSecuritySetup } from './hooks/useSecurityStatus' +export { useKeyManager } from './hooks/useKeyManager' +export { useServerBackup, type ServerBackup } from './hooks/useServerBackup' +export { useAttachmentContext } from './hooks/useAttachmentContext' + +// Context (KeyVault) +export { + KeyVaultProvider, + useKeyVault, + type KeyVaultState, + type SetupResult, +} from './context/key-vault-context' + +// UI Components +export { + PassphraseInput, + RecoveryKeyDisplay, + RecoveryKeyVerify, + MigrationProgress, + SecuritySetupWizard, + UnlockPrompt, + RestorePrompt, + RequireKeyVault, +} from './ui' + +// KeyVault Service (simplified API for key operations) +export { + KeyVaultService, + getKeyVaultService, + resetKeyVaultService, +} from './lib/key-vault-service' + +// Document key helpers +export { + createDocumentDek, + createDocumentDekIfNeeded, + isE2EEReady, +} from './lib/document-keys' + +// Tag encryption (Phase 14) +export { + // Extraction + extractTags, + extractTagsPreserveCase, + // Deterministic encryption + encryptTagDeterministic, + encryptTags, + buildTagLookupTable, + decryptTag, + decryptTags, + TagLookupManager, + getTagLookupManager, + resetTagLookupManager, + HMAC_KEY_SIZE, +} from './lib/tags' + +// Key helpers (simplified key fetching) +export { + fetchWorkspaceKek, + fetchDocumentDek, + fetchDocumentKeys, +} from './lib/key-helpers' diff --git a/app/src/features/security/lib/crypto/__tests__/bip39.test.ts b/app/src/features/security/lib/crypto/__tests__/bip39.test.ts new file mode 100644 index 00000000..3a52fdb0 --- /dev/null +++ b/app/src/features/security/lib/crypto/__tests__/bip39.test.ts @@ -0,0 +1,187 @@ +import { describe, it, expect } from 'vitest' + +import { + generateRecoveryKey, + validateRecoveryKey, + recoveryKeyToUmk, + umkToRecoveryKey, + getWordsAtIndices, + verifyWords, + generateVerificationIndices, + getWordList, + WORD_COUNT, + ENTROPY_BYTES, +} from '@/shared/lib/crypto' + +describe('BIP39 Recovery Key', () => { + describe('generateRecoveryKey', () => { + it('should generate 24 words', () => { + const mnemonic = generateRecoveryKey() + const words = mnemonic.split(' ') + expect(words.length).toBe(WORD_COUNT) + }) + + it('should generate valid mnemonic', () => { + const mnemonic = generateRecoveryKey() + expect(validateRecoveryKey(mnemonic)).toBe(true) + }) + + it('should generate unique mnemonics', () => { + const mnemonic1 = generateRecoveryKey() + const mnemonic2 = generateRecoveryKey() + expect(mnemonic1).not.toBe(mnemonic2) + }) + + it('should only use words from BIP39 wordlist', () => { + const wordlist = getWordList() + const mnemonic = generateRecoveryKey() + const words = mnemonic.split(' ') + + for (const word of words) { + expect(wordlist).toContain(word) + } + }) + }) + + describe('validateRecoveryKey', () => { + it('should validate correct mnemonic', () => { + const mnemonic = generateRecoveryKey() + expect(validateRecoveryKey(mnemonic)).toBe(true) + }) + + it('should reject invalid mnemonic', () => { + expect(validateRecoveryKey('not a valid mnemonic phrase')).toBe(false) + }) + + it('should reject mnemonic with wrong word count', () => { + const mnemonic = generateRecoveryKey() + const words = mnemonic.split(' ').slice(0, 12).join(' ') + expect(validateRecoveryKey(words)).toBe(false) + }) + + it('should reject mnemonic with invalid checksum', () => { + const mnemonic = generateRecoveryKey() + const words = mnemonic.split(' ') + words[0] = 'abandon' // Replace first word to break checksum + expect(validateRecoveryKey(words.join(' '))).toBe(false) + }) + }) + + describe('recoveryKeyToUmk / umkToRecoveryKey', () => { + it('should convert mnemonic to UMK and back', () => { + const mnemonic = generateRecoveryKey() + const umk = recoveryKeyToUmk(mnemonic) + + expect(umk).toBeInstanceOf(Uint8Array) + expect(umk.length).toBe(ENTROPY_BYTES) + + const recoveredMnemonic = umkToRecoveryKey(umk) + expect(recoveredMnemonic).toBe(mnemonic) + }) + + it('should produce consistent UMK for same mnemonic', () => { + const mnemonic = generateRecoveryKey() + const umk1 = recoveryKeyToUmk(mnemonic) + const umk2 = recoveryKeyToUmk(mnemonic) + + expect(umk1).toEqual(umk2) + }) + + it('should throw on invalid mnemonic', () => { + expect(() => recoveryKeyToUmk('invalid mnemonic')).toThrow('Invalid recovery key') + }) + + it('should throw on invalid UMK length', () => { + const shortUmk = new Uint8Array(16) + expect(() => umkToRecoveryKey(shortUmk)).toThrow('Invalid UMK length') + }) + }) + + describe('getWordsAtIndices', () => { + it('should get correct words at specified indices', () => { + const mnemonic = generateRecoveryKey() + const words = mnemonic.split(' ') + + const result = getWordsAtIndices(mnemonic, [0, 5, 23]) + + expect(result[0]).toBe(words[0]) + expect(result[1]).toBe(words[5]) + expect(result[2]).toBe(words[23]) + }) + + it('should throw on out of bounds index', () => { + const mnemonic = generateRecoveryKey() + expect(() => getWordsAtIndices(mnemonic, [24])).toThrow('Invalid word index') + expect(() => getWordsAtIndices(mnemonic, [-1])).toThrow('Invalid word index') + }) + }) + + describe('verifyWords', () => { + it('should verify correct words', () => { + const mnemonic = generateRecoveryKey() + const indices = [2, 7] + const words = getWordsAtIndices(mnemonic, indices) + + expect(verifyWords(mnemonic, indices, words)).toBe(true) + }) + + it('should reject incorrect words', () => { + const mnemonic = generateRecoveryKey() + const indices = [2, 7] + + expect(verifyWords(mnemonic, indices, ['wrong', 'words'])).toBe(false) + }) + + it('should be case insensitive', () => { + const mnemonic = generateRecoveryKey() + const indices = [0] + const words = getWordsAtIndices(mnemonic, indices) + + expect(verifyWords(mnemonic, indices, [words[0].toUpperCase()])).toBe(true) + }) + + it('should trim whitespace', () => { + const mnemonic = generateRecoveryKey() + const indices = [0] + const words = getWordsAtIndices(mnemonic, indices) + + expect(verifyWords(mnemonic, indices, [` ${words[0]} `])).toBe(true) + }) + }) + + describe('generateVerificationIndices', () => { + it('should generate specified number of indices', () => { + const indices = generateVerificationIndices(3) + expect(indices.length).toBe(3) + }) + + it('should generate unique indices', () => { + const indices = generateVerificationIndices(5) + const uniqueIndices = new Set(indices) + expect(uniqueIndices.size).toBe(5) + }) + + it('should generate indices within valid range', () => { + const indices = generateVerificationIndices(5) + for (const i of indices) { + expect(i).toBeGreaterThanOrEqual(0) + expect(i).toBeLessThan(WORD_COUNT) + } + }) + + it('should return sorted indices', () => { + const indices = generateVerificationIndices(5) + const sorted = [...indices].sort((a, b) => a - b) + expect(indices).toEqual(sorted) + }) + }) + + describe('getWordList', () => { + it('should return BIP39 English wordlist', () => { + const wordlist = getWordList() + expect(wordlist.length).toBe(2048) + expect(wordlist).toContain('abandon') + expect(wordlist).toContain('zoo') + }) + }) +}) diff --git a/app/src/features/security/lib/crypto/__tests__/ed25519.test.ts b/app/src/features/security/lib/crypto/__tests__/ed25519.test.ts new file mode 100644 index 00000000..8ebf7e96 --- /dev/null +++ b/app/src/features/security/lib/crypto/__tests__/ed25519.test.ts @@ -0,0 +1,188 @@ +import { describe, it, expect, beforeAll } from 'vitest' + +import { + sign, + verify, + signToBase64, + verifyFromBase64, + generateKeyPair, + buildSigningMessage, + SIGNATURE_DOMAINS, + PUBLIC_KEY_SIZE, + PRIVATE_KEY_SIZE, + SIGNATURE_SIZE, + getSodium, +} from '@/shared/lib/crypto' + +describe('Ed25519', () => { + beforeAll(async () => { + await getSodium() + }) + + describe('generateKeyPair', () => { + it('should generate valid key pair sizes', async () => { + const { publicKey, privateKey } = await generateKeyPair() + + expect(publicKey).toBeInstanceOf(Uint8Array) + expect(publicKey.length).toBe(PUBLIC_KEY_SIZE) + + expect(privateKey).toBeInstanceOf(Uint8Array) + expect(privateKey.length).toBe(PRIVATE_KEY_SIZE) + }) + + it('should generate unique key pairs', async () => { + const kp1 = await generateKeyPair() + const kp2 = await generateKeyPair() + + expect(kp1.publicKey).not.toEqual(kp2.publicKey) + expect(kp1.privateKey).not.toEqual(kp2.privateKey) + }) + }) + + describe('buildSigningMessage', () => { + it('should build message with correct format', () => { + const message = buildSigningMessage(SIGNATURE_DOMAINS.UPDATE, { + ciphertext: 'Y2lwaGVy', + nonce: 'bm9uY2U=', + publicData: 'cHVibGljRGF0YQ==', + }) + + const expected = 'refmd_update{"ciphertext":"Y2lwaGVy","nonce":"bm9uY2U=","publicData":"cHVibGljRGF0YQ=="}' + expect(new TextDecoder().decode(message)).toBe(expected) + }) + + it('should produce deterministic output', () => { + const msg1 = buildSigningMessage(SIGNATURE_DOMAINS.SNAPSHOT, { + ciphertext: 'abc', + nonce: 'def', + publicData: 'ghi', + }) + + const msg2 = buildSigningMessage(SIGNATURE_DOMAINS.SNAPSHOT, { + nonce: 'def', // different order + ciphertext: 'abc', + publicData: 'ghi', + }) + + expect(msg1).toEqual(msg2) + }) + }) + + describe('sign/verify', () => { + it('should sign and verify a message', async () => { + const { publicKey, privateKey } = await generateKeyPair() + const message = { + ciphertext: 'encrypted_content', + nonce: 'random_nonce', + publicData: 'public_metadata', + } + + const signature = await sign(privateKey, SIGNATURE_DOMAINS.UPDATE, message) + + expect(signature).toBeInstanceOf(Uint8Array) + expect(signature.length).toBe(SIGNATURE_SIZE) + + const isValid = await verify(publicKey, signature, SIGNATURE_DOMAINS.UPDATE, message) + expect(isValid).toBe(true) + }) + + it('should fail verification with wrong public key', async () => { + const kp1 = await generateKeyPair() + const kp2 = await generateKeyPair() + const message = { + ciphertext: 'encrypted_content', + nonce: 'random_nonce', + publicData: 'public_metadata', + } + + const signature = await sign(kp1.privateKey, SIGNATURE_DOMAINS.UPDATE, message) + const isValid = await verify(kp2.publicKey, signature, SIGNATURE_DOMAINS.UPDATE, message) + + expect(isValid).toBe(false) + }) + + it('should fail verification with modified message', async () => { + const { publicKey, privateKey } = await generateKeyPair() + const message = { + ciphertext: 'encrypted_content', + nonce: 'random_nonce', + publicData: 'public_metadata', + } + + const signature = await sign(privateKey, SIGNATURE_DOMAINS.UPDATE, message) + + const modifiedMessage = { ...message, ciphertext: 'tampered_content' } + const isValid = await verify(publicKey, signature, SIGNATURE_DOMAINS.UPDATE, modifiedMessage) + + expect(isValid).toBe(false) + }) + + it('should fail verification with wrong domain', async () => { + const { publicKey, privateKey } = await generateKeyPair() + const message = { + ciphertext: 'encrypted_content', + nonce: 'random_nonce', + publicData: 'public_metadata', + } + + const signature = await sign(privateKey, SIGNATURE_DOMAINS.UPDATE, message) + const isValid = await verify(publicKey, signature, SIGNATURE_DOMAINS.SNAPSHOT, message) + + expect(isValid).toBe(false) + }) + }) + + describe('signToBase64/verifyFromBase64', () => { + it('should work with Base64 encoding', async () => { + const { publicKey, privateKey } = await generateKeyPair() + const message = { + ciphertext: 'encrypted_content', + nonce: 'random_nonce', + publicData: 'public_metadata', + } + + const signatureBase64 = await signToBase64(privateKey, SIGNATURE_DOMAINS.UPDATE, message) + expect(typeof signatureBase64).toBe('string') + + const isValid = await verifyFromBase64(publicKey, signatureBase64, SIGNATURE_DOMAINS.UPDATE, message) + expect(isValid).toBe(true) + }) + }) + + describe('input validation', () => { + it('should throw on invalid private key length', async () => { + const shortKey = new Uint8Array(32) + const message = { + ciphertext: 'test', + nonce: 'test', + publicData: 'test', + } + + await expect(sign(shortKey, SIGNATURE_DOMAINS.UPDATE, message)).rejects.toThrow('Invalid private key length') + }) + + it('should throw on invalid public key length', async () => { + const shortKey = new Uint8Array(16) + const signature = new Uint8Array(SIGNATURE_SIZE) + const message = { + ciphertext: 'test', + nonce: 'test', + publicData: 'test', + } + + await expect(verify(shortKey, signature, SIGNATURE_DOMAINS.UPDATE, message)).rejects.toThrow('Invalid public key length') + }) + + it('should throw on invalid signature length', async () => { + const { publicKey } = await generateKeyPair() + const shortSignature = new Uint8Array(32) + const message = { + ciphertext: 'test', + nonce: 'test', + publicData: 'test', + } + + await expect(verify(publicKey, shortSignature, SIGNATURE_DOMAINS.UPDATE, message)).rejects.toThrow('Invalid signature length') + }) + }) +}) diff --git a/app/src/features/security/lib/crypto/__tests__/xchacha20.test.ts b/app/src/features/security/lib/crypto/__tests__/xchacha20.test.ts new file mode 100644 index 00000000..4da9d2f1 --- /dev/null +++ b/app/src/features/security/lib/crypto/__tests__/xchacha20.test.ts @@ -0,0 +1,144 @@ +import { describe, it, expect, beforeAll } from 'vitest' + +import { + getSodium, + encrypt, + decrypt, + encryptDek, + decryptDek, + encryptString, + decryptString, + generateKey, + generateNonce, + KEY_SIZE, + NONCE_SIZE, +} from '@/shared/lib/crypto' + +describe('XChaCha20-Poly1305', () => { + beforeAll(async () => { + // Ensure sodium is initialized + await getSodium() + }) + + describe('generateKey', () => { + it('should generate a 32-byte key', async () => { + const key = await generateKey() + expect(key).toBeInstanceOf(Uint8Array) + expect(key.length).toBe(KEY_SIZE) + }) + + it('should generate unique keys', async () => { + const key1 = await generateKey() + const key2 = await generateKey() + expect(key1).not.toEqual(key2) + }) + }) + + describe('generateNonce', () => { + it('should generate a 24-byte nonce', async () => { + const nonce = await generateNonce() + expect(nonce).toBeInstanceOf(Uint8Array) + expect(nonce.length).toBe(NONCE_SIZE) + }) + + it('should generate unique nonces', async () => { + const nonce1 = await generateNonce() + const nonce2 = await generateNonce() + expect(nonce1).not.toEqual(nonce2) + }) + }) + + describe('encrypt/decrypt', () => { + it('should encrypt and decrypt data correctly', async () => { + const key = await generateKey() + const plaintext = new TextEncoder().encode('Hello, E2EE World!') + + const { ciphertext, nonce } = await encrypt(key, plaintext) + const decrypted = await decrypt(key, ciphertext, nonce) + + expect(decrypted).toEqual(plaintext) + }) + + it('should encrypt empty data', async () => { + const key = await generateKey() + const plaintext = new Uint8Array(0) + + const { ciphertext, nonce } = await encrypt(key, plaintext) + const decrypted = await decrypt(key, ciphertext, nonce) + + expect(decrypted).toEqual(plaintext) + }) + + it('should encrypt large data', async () => { + const key = await generateKey() + const plaintext = new Uint8Array(1024 * 1024) // 1MB + plaintext.fill(0xAB) + + const { ciphertext, nonce } = await encrypt(key, plaintext) + const decrypted = await decrypt(key, ciphertext, nonce) + + expect(decrypted).toEqual(plaintext) + }) + + it('should fail with wrong key', async () => { + const key1 = await generateKey() + const key2 = await generateKey() + const plaintext = new TextEncoder().encode('Secret message') + + const { ciphertext, nonce } = await encrypt(key1, plaintext) + + await expect(decrypt(key2, ciphertext, nonce)).rejects.toThrow('Decryption failed') + }) + + it('should fail with corrupted ciphertext', async () => { + const key = await generateKey() + const plaintext = new TextEncoder().encode('Secret message') + + const { ciphertext, nonce } = await encrypt(key, plaintext) + + // Corrupt the ciphertext + ciphertext[0] ^= 0xFF + + await expect(decrypt(key, ciphertext, nonce)).rejects.toThrow('Decryption failed') + }) + + it('should throw on invalid key length', async () => { + const shortKey = new Uint8Array(16) + const plaintext = new Uint8Array([1, 2, 3]) + + await expect(encrypt(shortKey, plaintext)).rejects.toThrow('Invalid key length') + }) + + it('should throw on invalid nonce length', async () => { + const key = await generateKey() + const ciphertext = new Uint8Array(32) + const shortNonce = new Uint8Array(12) + + await expect(decrypt(key, ciphertext, shortNonce)).rejects.toThrow('Invalid nonce length') + }) + }) + + describe('encryptDek/decryptDek', () => { + it('should encrypt and decrypt a DEK', async () => { + const kek = await generateKey() + const dek = await generateKey() + + const { ciphertext, nonce } = await encryptDek(kek, dek) + const decryptedDek = await decryptDek(kek, ciphertext, nonce) + + expect(decryptedDek).toEqual(dek) + }) + }) + + describe('encryptString/decryptString', () => { + it('should encrypt and decrypt strings', async () => { + const key = await generateKey() + const plaintext = 'Hello, World! 日本語テスト 🎉' + + const { ciphertext, nonce } = await encryptString(key, plaintext) + const decrypted = await decryptString(key, ciphertext, nonce) + + expect(decrypted).toBe(plaintext) + }) + }) +}) diff --git a/app/src/features/security/lib/crypto/index.ts b/app/src/features/security/lib/crypto/index.ts new file mode 100644 index 00000000..aab0c207 --- /dev/null +++ b/app/src/features/security/lib/crypto/index.ts @@ -0,0 +1,7 @@ +/** + * E2EE Crypto Module + * + * Re-exported from shared/lib/crypto for backward compatibility. + */ + +export * from '@/shared/lib/crypto' diff --git a/app/src/features/security/lib/document-keys.ts b/app/src/features/security/lib/document-keys.ts new file mode 100644 index 00000000..94b10025 --- /dev/null +++ b/app/src/features/security/lib/document-keys.ts @@ -0,0 +1,144 @@ +/** + * E2EE Document Key Management + * + * Helpers for creating documents with E2EE encryption keys. + */ + +import { + storeDocumentKey, + getMyWorkspaceKey, + getDocumentKey, +} from '@/shared/api' + +import { + getKeyManager, + generateDocumentDek, + createEncryptedDekForApi, +} from './keys' + +/** + * Generate and store a DEK for a newly created document. + * + * This should be called immediately after creating a document. + * + * @param documentId - The ID of the newly created document + * @param workspaceId - The workspace ID (for fetching KEK) + * @throws Error if E2EE is not unlocked or key operations fail + */ +export async function createDocumentDek( + documentId: string, + workspaceId: string +): Promise { + const km = getKeyManager() + + // Ensure E2EE is unlocked + if (!km.isUnlocked) { + throw new Error('E2EE session is locked. Please unlock first.') + } + + // Get workspace KEK + const kek = await km.getWorkspaceKek(workspaceId, async () => { + const response = await getMyWorkspaceKey({ id: workspaceId }) + return response.encryptedKek + }) + + // Generate DEK for the new document + const dek = await generateDocumentDek() + + // Encrypt DEK with workspace KEK + const { encryptedDek, nonce } = await createEncryptedDekForApi(dek, kek) + + // Store the encrypted DEK + await storeDocumentKey({ + id: documentId, + requestBody: { + encryptedDek, + nonce, + keyVersion: 1, // Initial version for new document + }, + }) + + // Clear DEK from memory + dek.fill(0) +} + +/** + * Check if E2EE is enabled and unlocked. + */ +export function isE2EEReady(): boolean { + const km = getKeyManager() + return km.isInitialized && km.isUnlocked +} + +/** + * Create DEK for a new document if E2EE is enabled. + * + * This is a safe wrapper that does nothing if E2EE is not set up or unlocked. + * Use this in document creation flows. + * + * @param documentId - The ID of the newly created document + * @param workspaceId - The workspace ID (for fetching KEK) + * @returns true if DEK was created, false if E2EE is not enabled + */ +export async function createDocumentDekIfNeeded( + documentId: string, + workspaceId: string | null +): Promise { + if (!workspaceId) { + console.warn('[e2ee] Cannot create DEK: no workspace ID') + return false + } + + if (!isE2EEReady()) { + // E2EE not enabled or not unlocked, skip DEK creation + return false + } + + try { + await createDocumentDek(documentId, workspaceId) + return true + } catch (err) { + console.error('[e2ee] Failed to create document DEK:', err) + throw err + } +} + +/** + * Get the DEK for a document (for plugin use). + * + * This is a convenience function for effect handlers that need to + * encrypt data for a newly created document. + * + * @param documentId - The document ID + * @param workspaceId - The workspace ID (for fetching KEK) + * @returns The DEK as Uint8Array, or null if E2EE is not enabled + */ +export async function getDocumentDekForPlugin( + documentId: string, + workspaceId: string +): Promise { + if (!isE2EEReady()) { + return null + } + + const km = getKeyManager() + + try { + // Get workspace KEK + const kek = await km.getWorkspaceKek(workspaceId, async () => { + const response = await getMyWorkspaceKey({ id: workspaceId }) + return response.encryptedKek + }) + + // Get and decrypt document DEK + const dek = await km.getDocumentDek(documentId, kek, async () => { + const response = await getDocumentKey({ id: documentId }) + return { encryptedDek: response.encryptedDek, nonce: response.nonce } + }) + + return dek + } catch (err) { + console.error('[e2ee] Failed to get document DEK:', err) + return null + } +} diff --git a/app/src/features/security/lib/error-log/error-log-store.ts b/app/src/features/security/lib/error-log/error-log-store.ts new file mode 100644 index 00000000..f79f517f --- /dev/null +++ b/app/src/features/security/lib/error-log/error-log-store.ts @@ -0,0 +1,270 @@ +/** + * Error Log Store + * + * Stores error logs in IndexedDB for debugging and troubleshooting. + * Sensitive information is never stored. + */ + +import type { ErrorCode, ErrorCategory, ErrorContext } from '@/shared/types/security' + +const DB_NAME = 'refmd-error-logs' +const DB_VERSION = 1 +const STORE_NAME = 'logs' +const MAX_LOGS = 1000 + +/** Error log entry */ +export interface ErrorLog { + /** Unique ID */ + id: string + /** Timestamp */ + timestamp: number + /** Error code */ + code: ErrorCode + /** Error category */ + category: ErrorCategory + /** User-facing message (no sensitive data) */ + message: string + /** Context (document/workspace IDs, operation name) */ + context?: ErrorContext +} + +/** + * Generate a unique ID + */ +function generateId(): string { + return `${Date.now()}-${Math.random().toString(36).slice(2, 11)}` +} + +/** + * Open the IndexedDB database + */ +function openDatabase(): Promise { + return new Promise((resolve, reject) => { + const request = indexedDB.open(DB_NAME, DB_VERSION) + + request.onerror = () => { + reject(new Error(`Failed to open error log database: ${request.error?.message}`)) + } + + request.onsuccess = () => { + resolve(request.result) + } + + request.onupgradeneeded = (event) => { + const db = (event.target as IDBOpenDBRequest).result + + if (!db.objectStoreNames.contains(STORE_NAME)) { + const store = db.createObjectStore(STORE_NAME, { keyPath: 'id' }) + // Index for querying by timestamp (for pruning old logs) + store.createIndex('timestamp', 'timestamp', { unique: false }) + // Index for querying by code + store.createIndex('code', 'code', { unique: false }) + } + } + }) +} + +/** + * ErrorLogStore - manages error log storage in IndexedDB + */ +export class ErrorLogStore { + private db: IDBDatabase | null = null + + /** + * Initialize the store + */ + async initialize(): Promise { + if (this.db) return + this.db = await openDatabase() + } + + /** + * Ensure database is initialized + */ + private async ensureDb(): Promise { + if (!this.db) { + await this.initialize() + } + return this.db! + } + + /** + * Add an error log entry + */ + async add(log: Omit): Promise { + const db = await this.ensureDb() + + const entry: ErrorLog = { + id: generateId(), + ...log, + } + + return new Promise((resolve, reject) => { + const transaction = db.transaction(STORE_NAME, 'readwrite') + const store = transaction.objectStore(STORE_NAME) + const request = store.add(entry) + + request.onerror = () => { + reject(new Error(`Failed to add error log: ${request.error?.message}`)) + } + + request.onsuccess = () => { + resolve() + } + }) + } + + /** + * Get all error logs + */ + async getAll(): Promise { + const db = await this.ensureDb() + + return new Promise((resolve, reject) => { + const transaction = db.transaction(STORE_NAME, 'readonly') + const store = transaction.objectStore(STORE_NAME) + const request = store.getAll() + + request.onerror = () => { + reject(new Error(`Failed to get error logs: ${request.error?.message}`)) + } + + request.onsuccess = () => { + // Sort by timestamp descending (newest first) + const logs = (request.result as ErrorLog[]).sort( + (a, b) => b.timestamp - a.timestamp + ) + resolve(logs) + } + }) + } + + /** + * Get recent error logs + */ + async getRecent(limit: number = 100): Promise { + const all = await this.getAll() + return all.slice(0, limit) + } + + /** + * Get log count + */ + async count(): Promise { + const db = await this.ensureDb() + + return new Promise((resolve, reject) => { + const transaction = db.transaction(STORE_NAME, 'readonly') + const store = transaction.objectStore(STORE_NAME) + const request = store.count() + + request.onerror = () => { + reject(new Error(`Failed to count error logs: ${request.error?.message}`)) + } + + request.onsuccess = () => { + resolve(request.result) + } + }) + } + + /** + * Prune old logs to keep under MAX_LOGS + */ + async prune(): Promise { + const db = await this.ensureDb() + const currentCount = await this.count() + + if (currentCount <= MAX_LOGS) { + return 0 + } + + const toDelete = currentCount - MAX_LOGS + + return new Promise((resolve, reject) => { + const transaction = db.transaction(STORE_NAME, 'readwrite') + const store = transaction.objectStore(STORE_NAME) + const index = store.index('timestamp') + + // Get oldest logs + const request = index.openCursor() + let deleted = 0 + + request.onerror = () => { + reject(new Error(`Failed to prune error logs: ${request.error?.message}`)) + } + + request.onsuccess = (event) => { + const cursor = (event.target as IDBRequest).result + if (cursor && deleted < toDelete) { + cursor.delete() + deleted++ + cursor.continue() + } else { + resolve(deleted) + } + } + }) + } + + /** + * Clear all logs + */ + async clear(): Promise { + const db = await this.ensureDb() + + return new Promise((resolve, reject) => { + const transaction = db.transaction(STORE_NAME, 'readwrite') + const store = transaction.objectStore(STORE_NAME) + const request = store.clear() + + request.onerror = () => { + reject(new Error(`Failed to clear error logs: ${request.error?.message}`)) + } + + request.onsuccess = () => { + resolve() + } + }) + } + + /** + * Close the database connection + */ + close(): void { + if (this.db) { + this.db.close() + this.db = null + } + } + + /** + * Delete the entire database + */ + static async deleteDatabase(): Promise { + return new Promise((resolve, reject) => { + const request = indexedDB.deleteDatabase(DB_NAME) + + request.onerror = () => { + reject(new Error(`Failed to delete error log database: ${request.error?.message}`)) + } + + request.onsuccess = () => { + resolve() + } + }) + } +} + +// Singleton instance +let errorLogStoreInstance: ErrorLogStore | null = null + +/** + * Get the singleton ErrorLogStore instance + */ +export function getErrorLogStore(): ErrorLogStore { + if (!errorLogStoreInstance) { + errorLogStoreInstance = new ErrorLogStore() + } + return errorLogStoreInstance +} diff --git a/app/src/features/security/lib/error-log/index.ts b/app/src/features/security/lib/error-log/index.ts new file mode 100644 index 00000000..eb8528f8 --- /dev/null +++ b/app/src/features/security/lib/error-log/index.ts @@ -0,0 +1,18 @@ +/** + * Error Log Module + * + * Provides error logging to IndexedDB for debugging. + */ + +export { + ErrorLogStore, + getErrorLogStore, + type ErrorLog, +} from './error-log-store' + +export { + logError, + logAndThrow, + getRecentErrors, + clearErrorLogs, +} from './log-error' diff --git a/app/src/features/security/lib/error-log/log-error.ts b/app/src/features/security/lib/error-log/log-error.ts new file mode 100644 index 00000000..cf4e35f4 --- /dev/null +++ b/app/src/features/security/lib/error-log/log-error.ts @@ -0,0 +1,101 @@ +/** + * Error Logging + * + * Logs errors to IndexedDB with sensitive information filtered out. + * Never sends error data to the server. + */ + +import { type CryptoError, type ErrorContext } from '@/shared/types/security' + +import { getErrorLogStore, type ErrorLog } from './error-log-store' + +/** + * Sanitize error context to remove any potentially sensitive data + */ +function sanitizeContext(context?: ErrorContext): ErrorContext | undefined { + if (!context) return undefined + + // Only keep safe fields (IDs and operation names) + const safe: ErrorContext = {} + + if (context.documentId) { + safe.documentId = context.documentId + } + if (context.workspaceId) { + safe.workspaceId = context.workspaceId + } + if (context.operation) { + safe.operation = context.operation + } + + return Object.keys(safe).length > 0 ? safe : undefined +} + +/** + * Log a CryptoError to IndexedDB + * + * Sensitive information (keys, passphrases, content) is never logged. + * Only error codes, user-facing messages, and safe context are stored. + */ +export async function logError(error: CryptoError): Promise { + try { + const store = getErrorLogStore() + + const logEntry: Omit = { + timestamp: Date.now(), + code: error.code, + category: error.category, + message: error.getUserMessage(), + context: sanitizeContext(error.context), + } + + await store.add(logEntry) + + // Prune old logs to prevent unbounded growth + await store.prune() + + // In development, also log to console + if (process.env.NODE_ENV === 'development') { + console.error('[CryptoError]', { + code: error.code, + category: error.category, + message: error.message, + context: error.context, + }) + } + } catch (logStoreError) { + // Don't throw if logging fails - just log to console in dev + if (process.env.NODE_ENV === 'development') { + console.error('[CryptoError] Failed to log error:', logStoreError) + console.error('[CryptoError] Original error:', error) + } + } +} + +/** + * Log an error and re-throw it + * + * Useful for logging errors in catch blocks without swallowing them. + */ +export async function logAndThrow(error: CryptoError): Promise { + await logError(error) + throw error +} + +/** + * Get recent error logs for debugging + * + * Only available in the browser. + */ +export async function getRecentErrors(limit: number = 100): Promise { + const store = getErrorLogStore() + return store.getRecent(limit) +} + +/** + * Clear all error logs + */ +export async function clearErrorLogs(): Promise { + const store = getErrorLogStore() + return store.clear() +} diff --git a/app/src/features/security/lib/files/index.ts b/app/src/features/security/lib/files/index.ts new file mode 100644 index 00000000..3912c22a --- /dev/null +++ b/app/src/features/security/lib/files/index.ts @@ -0,0 +1,7 @@ +/** + * E2EE File Encryption Module + * + * Re-exported from shared/lib/files for backward compatibility. + */ + +export * from '@/shared/lib/files' diff --git a/app/src/features/security/lib/key-helpers.ts b/app/src/features/security/lib/key-helpers.ts new file mode 100644 index 00000000..29979a77 --- /dev/null +++ b/app/src/features/security/lib/key-helpers.ts @@ -0,0 +1,95 @@ +/** + * Key Helpers - Simplified API for fetching encryption keys + * + * These helper functions eliminate boilerplate code for common key operations. + * Instead of manually calling getWorkspaceKek with inline fetch functions, + * just call fetchWorkspaceKek(workspaceId). + * + * Before: + * ```typescript + * const service = getKeyVaultService() + * await service.ready() + * service.ensureUnlocked() + * const kek = await service.getWorkspaceKek(workspaceId, async () => { + * const response = await getMyWorkspaceKey({ id: workspaceId }) + * return response.encryptedKek + * }) + * const dek = await service.getDocumentDek(documentId, kek, async () => { + * const response = await getDocumentKey({ id: documentId }) + * return { encryptedDek: response.encryptedDek, nonce: response.nonce } + * }) + * ``` + * + * After: + * ```typescript + * const { kek, dek } = await fetchDocumentKeys(documentId, workspaceId) + * ``` + */ + +import { getMyWorkspaceKey, getDocumentKey } from '@/shared/api/client' + +import { getKeyVaultService } from './key-vault-service' + +/** + * Fetch workspace KEK (Key Encryption Key). + * + * Auto-initializes KeyVaultService and ensures unlocked state. + * + * @param workspaceId - Workspace ID + * @returns KEK as Uint8Array + * @throws SessionLockedError if session is locked + */ +export async function fetchWorkspaceKek(workspaceId: string): Promise { + const service = getKeyVaultService() + return service.getWorkspaceKek(workspaceId, async () => { + const response = await getMyWorkspaceKey({ id: workspaceId }) + return response.encryptedKek + }) +} + +/** + * Fetch document DEK (Document Encryption Key). + * + * Automatically fetches the workspace KEK first if not provided. + * Auto-initializes KeyVaultService and ensures unlocked state. + * + * @param documentId - Document ID + * @param workspaceId - Workspace ID + * @returns DEK as Uint8Array + * @throws SessionLockedError if session is locked + */ +export async function fetchDocumentDek( + documentId: string, + workspaceId: string +): Promise { + const kek = await fetchWorkspaceKek(workspaceId) + const service = getKeyVaultService() + return service.getDocumentDek(documentId, kek, async () => { + const response = await getDocumentKey({ id: documentId }) + return { encryptedDek: response.encryptedDek, nonce: response.nonce } + }) +} + +/** + * Fetch both KEK and DEK for a document. + * + * Use this when you need both keys (e.g., for encryption operations). + * Auto-initializes KeyVaultService and ensures unlocked state. + * + * @param documentId - Document ID + * @param workspaceId - Workspace ID + * @returns Object with kek and dek as Uint8Array + * @throws SessionLockedError if session is locked + */ +export async function fetchDocumentKeys( + documentId: string, + workspaceId: string +): Promise<{ kek: Uint8Array; dek: Uint8Array }> { + const kek = await fetchWorkspaceKek(workspaceId) + const service = getKeyVaultService() + const dek = await service.getDocumentDek(documentId, kek, async () => { + const response = await getDocumentKey({ id: documentId }) + return { encryptedDek: response.encryptedDek, nonce: response.nonce } + }) + return { kek, dek } +} diff --git a/app/src/features/security/lib/key-vault-service.ts b/app/src/features/security/lib/key-vault-service.ts new file mode 100644 index 00000000..140a6be4 --- /dev/null +++ b/app/src/features/security/lib/key-vault-service.ts @@ -0,0 +1,172 @@ +/** + * KeyVaultService - Simplified API for encryption key operations + * + * This service provides a cleaner interface than KeyManager for common operations: + * - Auto-initialization: No need to call initialize() manually + * - Clear error handling: Throws SessionLockedError when locked + * - Facade pattern: Delegates to KeyManager internally + * + * Usage: + * ```typescript + * const service = getKeyVaultService() + * await service.ready() // Wait for initialization + * service.ensureUnlocked() // Throws if locked + * const kek = await service.getWorkspaceKek(workspaceId, fetchFn) + * ``` + */ + +import { getKeyManager, KeyManager, SessionLockedError } from './keys' + +let serviceInstance: KeyVaultService | null = null +let initPromise: Promise | null = null + +export class KeyVaultService { + private readonly km: KeyManager + + constructor() { + this.km = getKeyManager() + } + + // ========================================================================== + // Initialization + // ========================================================================== + + /** + * Wait for the service to be ready (initialized). + * Call this before any operations if you need to ensure initialization. + */ + async ready(): Promise { + if (!initPromise) { + initPromise = this.km.initialize() + } + await initPromise + } + + /** + * Check if the service is initialized. + */ + get isInitialized(): boolean { + return this.km.isInitialized + } + + // ========================================================================== + // Session State + // ========================================================================== + + /** + * Check if the session is unlocked (keys are in memory). + */ + get isUnlocked(): boolean { + return this.km.isUnlocked + } + + /** + * Ensure the session is unlocked. + * @throws SessionLockedError if the session is locked + */ + ensureUnlocked(): void { + if (!this.km.isUnlocked) { + throw new SessionLockedError() + } + } + + /** + * Check if local keys exist in storage. + */ + async hasKeys(): Promise { + await this.ready() + return this.km.hasKeys() + } + + // ========================================================================== + // Workspace KEK Operations + // ========================================================================== + + /** + * Get the workspace KEK (Key Encryption Key). + * Auto-initializes if needed. + * + * @param workspaceId - Workspace ID + * @param fetchFn - Function to fetch encrypted KEK from server + * @returns KEK as Uint8Array + * @throws SessionLockedError if session is locked + */ + async getWorkspaceKek( + workspaceId: string, + fetchFn: () => Promise + ): Promise { + await this.ready() + this.ensureUnlocked() + return this.km.getWorkspaceKek(workspaceId, fetchFn) + } + + // ========================================================================== + // Document DEK Operations + // ========================================================================== + + /** + * Get the document DEK (Document Encryption Key). + * Auto-initializes if needed. + * + * @param documentId - Document ID + * @param kek - Workspace KEK + * @param fetchFn - Function to fetch encrypted DEK from server + * @returns DEK as Uint8Array + * @throws SessionLockedError if session is locked + */ + async getDocumentDek( + documentId: string, + kek: Uint8Array, + fetchFn: () => Promise<{ encryptedDek: string; nonce: string }> + ): Promise { + await this.ready() + this.ensureUnlocked() + return this.km.getDocumentDek(documentId, kek, fetchFn) + } + + // ========================================================================== + // Low-level Access (for advanced use cases) + // ========================================================================== + + /** + * Get the underlying KeyManager instance. + * Use with caution - prefer the service methods when possible. + */ + get keyManager(): KeyManager { + return this.km + } + + /** + * Subscribe to unlock state changes. + * @returns Unsubscribe function + */ + onUnlockChange(callback: () => void): () => void { + return this.km.onUnlockChange(callback) + } +} + +/** + * Get the KeyVaultService singleton instance. + * Automatically initializes on first access. + */ +export function getKeyVaultService(): KeyVaultService { + if (!serviceInstance) { + serviceInstance = new KeyVaultService() + // Start initialization immediately but don't block + serviceInstance.ready().catch((err) => { + console.error('[KeyVaultService] Auto-initialization failed:', err) + }) + } + return serviceInstance +} + +/** + * Reset the service instance (for testing). + */ +export function resetKeyVaultService(): void { + serviceInstance = null + initPromise = null +} + +// Re-export error type for convenience +export { SessionLockedError } diff --git a/app/src/features/security/lib/keys/__tests__/key-cache.test.ts b/app/src/features/security/lib/keys/__tests__/key-cache.test.ts new file mode 100644 index 00000000..11efaa85 --- /dev/null +++ b/app/src/features/security/lib/keys/__tests__/key-cache.test.ts @@ -0,0 +1,217 @@ +import { describe, it, expect, beforeEach } from 'vitest' + +import { KeyCache, KekCache, DekCache, clearAllCaches } from '../key-cache' + +describe('KeyCache', () => { + describe('basic operations', () => { + let cache: KeyCache + + beforeEach(() => { + cache = new KeyCache(3) + }) + + it('should set and get values', () => { + cache.set('a', 'value-a') + expect(cache.get('a')).toBe('value-a') + }) + + it('should return undefined for missing keys', () => { + expect(cache.get('missing')).toBeUndefined() + }) + + it('should update existing values', () => { + cache.set('a', 'value-1') + cache.set('a', 'value-2') + expect(cache.get('a')).toBe('value-2') + }) + + it('should delete values', () => { + cache.set('a', 'value-a') + expect(cache.delete('a')).toBe(true) + expect(cache.get('a')).toBeUndefined() + }) + + it('should return false when deleting non-existent key', () => { + expect(cache.delete('missing')).toBe(false) + }) + + it('should check if key exists', () => { + cache.set('a', 'value-a') + expect(cache.has('a')).toBe(true) + expect(cache.has('missing')).toBe(false) + }) + + it('should clear all values', () => { + cache.set('a', 'value-a') + cache.set('b', 'value-b') + cache.clear() + expect(cache.size).toBe(0) + expect(cache.get('a')).toBeUndefined() + }) + + it('should track size', () => { + expect(cache.size).toBe(0) + cache.set('a', 'value-a') + expect(cache.size).toBe(1) + cache.set('b', 'value-b') + expect(cache.size).toBe(2) + cache.delete('a') + expect(cache.size).toBe(1) + }) + + it('should return all keys', () => { + cache.set('a', 'value-a') + cache.set('b', 'value-b') + expect(cache.keys()).toEqual(expect.arrayContaining(['a', 'b'])) + }) + }) + + describe('LRU eviction', () => { + let cache: KeyCache + + beforeEach(() => { + cache = new KeyCache(3) + }) + + it('should evict oldest entry when at capacity', () => { + cache.set('a', 'value-a') + cache.set('b', 'value-b') + cache.set('c', 'value-c') + cache.set('d', 'value-d') // should evict 'a' + + expect(cache.get('a')).toBeUndefined() + expect(cache.get('b')).toBe('value-b') + expect(cache.get('c')).toBe('value-c') + expect(cache.get('d')).toBe('value-d') + expect(cache.size).toBe(3) + }) + + it('should update access time on get', async () => { + // Set 'a' first (oldest by insertion) + cache.set('a', 'value-a') + + // Wait to ensure different timestamps + await new Promise(resolve => setTimeout(resolve, 50)) + + // Set 'b' and 'c' + cache.set('b', 'value-b') + await new Promise(resolve => setTimeout(resolve, 50)) + cache.set('c', 'value-c') + + // Wait and access 'a' to update its timestamp (makes it newest) + await new Promise(resolve => setTimeout(resolve, 50)) + const accessedA = cache.get('a') + expect(accessedA).toBe('value-a') + + // Now timestamps should be: b (oldest), c (middle), a (newest) + // Wait a bit before adding 'd' + await new Promise(resolve => setTimeout(resolve, 50)) + + // Add 'd', should evict 'b' (oldest by access time) + cache.set('d', 'value-d') + + expect(cache.get('a')).toBe('value-a') // still here (was accessed recently) + expect(cache.get('b')).toBeUndefined() // evicted (oldest) + expect(cache.get('c')).toBe('value-c') + expect(cache.get('d')).toBe('value-d') + }) + }) + + describe('Uint8Array handling', () => { + it('should zero out Uint8Array values on clear', () => { + const cache = new KeyCache(3) + const value = new Uint8Array([1, 2, 3, 4]) + cache.set('key', value) + cache.clear() + + // Original array should be zeroed + expect(Array.from(value)).toEqual([0, 0, 0, 0]) + }) + + it('should zero out Uint8Array values on eviction', async () => { + const cache = new KeyCache(2) + const value1 = new Uint8Array([1, 2, 3]) + + cache.set('a', value1) + + await new Promise(resolve => setTimeout(resolve, 10)) + + cache.set('b', new Uint8Array([4, 5, 6])) + + await new Promise(resolve => setTimeout(resolve, 10)) + + // This should evict 'a' + cache.set('c', new Uint8Array([7, 8, 9])) + + // Original value should be zeroed + expect(Array.from(value1)).toEqual([0, 0, 0]) + }) + }) +}) + +describe('KekCache', () => { + let cache: KekCache + + beforeEach(() => { + cache = new KekCache(3) + }) + + it('should set and get KEK by workspace ID', () => { + const kek = new Uint8Array(32).fill(1) + cache.setKek('ws-1', kek) + expect(cache.getKek('ws-1')).toBe(kek) + }) + + it('should delete KEK', () => { + const kek = new Uint8Array(32).fill(1) + cache.setKek('ws-1', kek) + expect(cache.deleteKek('ws-1')).toBe(true) + expect(cache.getKek('ws-1')).toBeUndefined() + }) +}) + +describe('DekCache', () => { + let cache: DekCache + + beforeEach(() => { + cache = new DekCache(3) + }) + + it('should set and get DEK by document ID', () => { + const dek = new Uint8Array(32).fill(2) + cache.setDek('doc-1', dek) + expect(cache.getDek('doc-1')).toBe(dek) + }) + + it('should delete DEK', () => { + const dek = new Uint8Array(32).fill(2) + cache.setDek('doc-1', dek) + expect(cache.deleteDek('doc-1')).toBe(true) + expect(cache.getDek('doc-1')).toBeUndefined() + }) + + it('should delete multiple DEKs by workspace', () => { + cache.setDek('doc-1', new Uint8Array(32).fill(1)) + cache.setDek('doc-2', new Uint8Array(32).fill(2)) + cache.setDek('doc-3', new Uint8Array(32).fill(3)) + + cache.deleteByWorkspace(['doc-1', 'doc-2']) + + expect(cache.getDek('doc-1')).toBeUndefined() + expect(cache.getDek('doc-2')).toBeUndefined() + expect(cache.getDek('doc-3')).toBeDefined() + }) +}) + +describe('clearAllCaches', () => { + it('should clear both KEK and DEK caches', () => { + const kekCache = new KekCache() + const dekCache = new DekCache() + + kekCache.setKek('ws-1', new Uint8Array(32).fill(1)) + dekCache.setDek('doc-1', new Uint8Array(32).fill(2)) + + // Note: clearAllCaches uses singletons, so this test is more of a smoke test + clearAllCaches() + }) +}) diff --git a/app/src/features/security/lib/keys/__tests__/share-key.test.ts b/app/src/features/security/lib/keys/__tests__/share-key.test.ts new file mode 100644 index 00000000..231facb5 --- /dev/null +++ b/app/src/features/security/lib/keys/__tests__/share-key.test.ts @@ -0,0 +1,105 @@ +import { describe, it, expect, beforeAll } from 'vitest' + +import { getSodium } from '../../crypto' +import { + generateShareKey, + extractShareKeyFromFragment, + hasShareKeyFragment, + buildShareUrl, + URL_FRAGMENT_PREFIX, + SHARE_KEY_SIZE, +} from '../share-key' + +describe('Share Key', () => { + beforeAll(async () => { + // Initialize sodium + await getSodium() + }) + + describe('generateShareKey', () => { + it('should generate a key and fragment', async () => { + const result = await generateShareKey() + + expect(result.key).toBeInstanceOf(Uint8Array) + expect(result.key.length).toBe(SHARE_KEY_SIZE) + expect(result.fragment).toMatch(new RegExp(`^${URL_FRAGMENT_PREFIX}`)) + }) + + it('should generate unique keys', async () => { + const result1 = await generateShareKey() + const result2 = await generateShareKey() + + expect(result1.key).not.toEqual(result2.key) + expect(result1.fragment).not.toBe(result2.fragment) + }) + }) + + describe('extractShareKeyFromFragment', () => { + it('should extract key from valid fragment', async () => { + const original = await generateShareKey() + const extracted = await extractShareKeyFromFragment(original.fragment) + + expect(extracted).toBeInstanceOf(Uint8Array) + expect(Array.from(extracted!)).toEqual(Array.from(original.key)) + }) + + it('should handle fragment with leading #', async () => { + const original = await generateShareKey() + const extracted = await extractShareKeyFromFragment('#' + original.fragment) + + expect(extracted).toBeInstanceOf(Uint8Array) + expect(Array.from(extracted!)).toEqual(Array.from(original.key)) + }) + + it('should return null for invalid fragment', async () => { + const result = await extractShareKeyFromFragment('invalid') + expect(result).toBeNull() + }) + + it('should return null for fragment without prefix', async () => { + const result = await extractShareKeyFromFragment('other=value') + expect(result).toBeNull() + }) + + it('should return null for empty fragment', async () => { + const result = await extractShareKeyFromFragment('') + expect(result).toBeNull() + }) + }) + + describe('hasShareKeyFragment', () => { + it('should return true for valid fragment', async () => { + const { fragment } = await generateShareKey() + expect(hasShareKeyFragment(fragment)).toBe(true) + }) + + it('should return true for fragment with leading #', async () => { + const { fragment } = await generateShareKey() + expect(hasShareKeyFragment('#' + fragment)).toBe(true) + }) + + it('should return false for invalid fragment', () => { + expect(hasShareKeyFragment('invalid')).toBe(false) + expect(hasShareKeyFragment('')).toBe(false) + expect(hasShareKeyFragment('#other=value')).toBe(false) + }) + }) + + describe('buildShareUrl', () => { + it('should build URL with fragment', () => { + const url = buildShareUrl('https://refmd.io/share/abc123', 'key=xyz') + expect(url).toBe('https://refmd.io/share/abc123#key=xyz') + }) + + it('should replace existing fragment', () => { + const url = buildShareUrl('https://refmd.io/share/abc123#old', 'key=xyz') + expect(url).toBe('https://refmd.io/share/abc123#key=xyz') + }) + + it('should work with generated fragment', async () => { + const { fragment } = await generateShareKey() + const url = buildShareUrl('https://refmd.io/share/abc123', fragment) + expect(url).toBe(`https://refmd.io/share/abc123#${fragment}`) + }) + }) +}) diff --git a/app/src/features/security/lib/keys/document-dek.ts b/app/src/features/security/lib/keys/document-dek.ts new file mode 100644 index 00000000..e9f3ffde --- /dev/null +++ b/app/src/features/security/lib/keys/document-dek.ts @@ -0,0 +1,218 @@ +/** + * Document DEK (Data Encryption Key) Management + * + * DEKs are used to encrypt document content. + * Each document has its own DEK, encrypted with the workspace KEK. + */ + +import { + generateKey, + encrypt, + decrypt, + getSodium, +} from '../crypto' + +import { getDekCache } from './key-cache' + +/** DEK size (32 bytes) */ +export const DEK_SIZE = 32 + +/** Document DEK with metadata */ +export interface DocumentDek { + /** Document ID */ + documentId: string + /** 32-byte DEK */ + key: Uint8Array + /** Key version for rotation */ + version: number +} + +/** Encrypted DEK from API response */ +export interface EncryptedDekFromApi { + /** Encrypted DEK (Base64) */ + encryptedDek: string + /** Nonce (Base64) */ + nonce: string + /** Key version */ + keyVersion: number + /** Document ID */ + documentId: string +} + +/** + * Generate a new Document DEK. + * + * @returns 32-byte random DEK + */ +export async function generateDocumentDek(): Promise { + return generateKey() +} + +/** + * Encrypt a DEK with a workspace KEK. + * + * @param dek - The DEK to encrypt + * @param kek - Workspace KEK + * @returns Encrypted DEK with nonce + */ +export async function encryptDekWithKek( + dek: Uint8Array, + kek: Uint8Array +): Promise<{ + encryptedDek: Uint8Array + nonce: Uint8Array +}> { + const result = await encrypt(kek, dek) + return { + encryptedDek: result.ciphertext, + nonce: result.nonce, + } +} + +/** + * Decrypt a DEK with a workspace KEK. + * + * @param encryptedDek - Encrypted DEK + * @param nonce - Nonce used for encryption + * @param kek - Workspace KEK + * @returns Decrypted DEK + */ +export async function decryptDekWithKek( + encryptedDek: Uint8Array, + nonce: Uint8Array, + kek: Uint8Array +): Promise { + return decrypt(kek, encryptedDek, nonce) +} + +/** + * Decrypt a DEK from API response format. + * + * @param encryptedDekBase64 - Base64-encoded encrypted DEK + * @param nonceBase64 - Base64-encoded nonce + * @param kek - Workspace KEK + * @returns Decrypted DEK + */ +export async function decryptDekFromApiResponse( + encryptedDekBase64: string, + nonceBase64: string, + kek: Uint8Array +): Promise { + const sodium = await getSodium() + + const encryptedDek = sodium.from_base64(encryptedDekBase64, sodium.base64_variants.ORIGINAL) + const nonce = sodium.from_base64(nonceBase64, sodium.base64_variants.ORIGINAL) + + return decryptDekWithKek(encryptedDek, nonce, kek) +} + +/** + * Encode DEK for API request. + * + * @param encryptedDek - Encrypted DEK + * @param nonce - Nonce + * @returns Base64-encoded values + */ +export async function encodeDekForApi( + encryptedDek: Uint8Array, + nonce: Uint8Array +): Promise<{ + encryptedDek: string + nonce: string +}> { + const sodium = await getSodium() + + return { + encryptedDek: sodium.to_base64(encryptedDek, sodium.base64_variants.ORIGINAL), + nonce: sodium.to_base64(nonce, sodium.base64_variants.ORIGINAL), + } +} + +/** + * Create encrypted DEK ready for API request. + * + * @param dek - The DEK to encrypt + * @param kek - Workspace KEK + * @returns Base64-encoded encrypted DEK and nonce + */ +export async function createEncryptedDekForApi( + dek: Uint8Array, + kek: Uint8Array +): Promise<{ + encryptedDek: string + nonce: string +}> { + const { encryptedDek, nonce } = await encryptDekWithKek(dek, kek) + return encodeDekForApi(encryptedDek, nonce) +} + +/** + * Get DEK from cache or fetch from API. + * + * @param documentId - Document ID + * @param kek - Workspace KEK for decryption + * @param fetchDekFn - Function to fetch encrypted DEK from API + * @returns Decrypted DEK + */ +export async function getOrFetchDek( + documentId: string, + kek: Uint8Array, + fetchDekFn: () => Promise<{ encryptedDek: string; nonce: string }> +): Promise { + const cache = getDekCache() + + // Check cache first + const cached = cache.getDek(documentId) + if (cached) { + return cached + } + + // Fetch from API + const { encryptedDek, nonce } = await fetchDekFn() + + // Decrypt + const dek = await decryptDekFromApiResponse(encryptedDek, nonce, kek) + + // Cache the result + cache.setDek(documentId, dek) + + return dek +} + +/** + * Invalidate cached DEK (e.g., after rotation). + * + * @param documentId - Document ID + */ +export function invalidateCachedDek(documentId: string): void { + const cache = getDekCache() + cache.deleteDek(documentId) +} + +/** + * Invalidate all DEKs for documents in a workspace. + * Call this when workspace KEK is rotated. + * + * @param documentIds - Document IDs to invalidate + */ +export function invalidateWorkspaceDeks(documentIds: string[]): void { + const cache = getDekCache() + cache.deleteByWorkspace(documentIds) +} + +/** + * Re-encrypt a DEK with a new KEK (for KEK rotation). + * + * @param dek - The DEK to re-encrypt + * @param newKek - New workspace KEK + * @returns Encrypted DEK with new KEK + */ +export async function reEncryptDek( + dek: Uint8Array, + newKek: Uint8Array +): Promise<{ + encryptedDek: string + nonce: string +}> { + return createEncryptedDekForApi(dek, newKek) +} diff --git a/app/src/features/security/lib/keys/index.ts b/app/src/features/security/lib/keys/index.ts new file mode 100644 index 00000000..9cb5851d --- /dev/null +++ b/app/src/features/security/lib/keys/index.ts @@ -0,0 +1,124 @@ +/** + * E2EE Key Management Module + * + * This module provides all key management functionality for E2EE: + * - KeyManager: Main entry point for all key operations + * - KeyStore: IndexedDB storage for encrypted keys + * - KeyCache: LRU cache for KEK/DEK + * - Individual key modules for UMK, User Keys, KEK, DEK, Share Keys + */ + +// Main KeyManager class +export { + KeyManager, + getKeyManager, + resetKeyManager, + SessionLockedError, + KeyNotFoundError, + type E2EESetupResult, + type EncryptedKeysBundle, +} from './key-manager' + +// Key Store (IndexedDB) +export { + KeyStore, + getKeyStore, + type StoredKeys, +} from './key-store' + +// Key Cache (LRU) +export { + KeyCache, + KekCache, + DekCache, + getKekCache, + getDekCache, + clearAllCaches, + DEFAULT_KEK_CACHE_SIZE, + DEFAULT_DEK_CACHE_SIZE, +} from './key-cache' + +// UMK (User Master Key) +export { + generateUmk, + deriveUmkFromPassphrase, + restoreUmkFromRecoveryKey, + verifyPassphrase, + generateNewRecoveryKey, + validateRecoveryKey, + zeroUmk, + UMK_SIZE, + type UmkGenerationResult, +} from './umk' + +// User Keys (ECDH + Ed25519) +export { + generateUserKeys, + encryptUserKeys, + decryptUserKeys, + reEncryptUserKeys, + getPublicKeysBase64, + parsePublicKeysFromBase64, + zeroUserKeys, + type UserKeySet, + type EncryptedUserKeys, +} from './user-keys' + +// Workspace KEK +export { + generateWorkspaceKek, + encryptKekForRecipient, + decryptKek, + decryptKekFromApiResponse, + encodeKekForApi, + getOrFetchKek, + invalidateCachedKek, + createKekForMember, + KEK_SIZE, + type WorkspaceKek, + type EncryptedKekFromApi, +} from './workspace-kek' + +// Document DEK +export { + generateDocumentDek, + encryptDekWithKek, + decryptDekWithKek, + decryptDekFromApiResponse, + encodeDekForApi, + createEncryptedDekForApi, + getOrFetchDek, + invalidateCachedDek, + invalidateWorkspaceDeks, + reEncryptDek, + DEK_SIZE, + type DocumentDek, + type EncryptedDekFromApi, +} from './document-dek' + +// Share Keys +export { + generateShareKey, + extractShareKeyFromFragment, + deriveShareKeyFromPassword, + createPasswordProtectedShareKey, + encryptDekWithShareKey, + decryptDekWithShareKey, + buildShareUrl, + parseSaltFromApi, + encodeSaltForApi, + hasShareKeyFragment, + SHARE_KEY_SIZE, + URL_FRAGMENT_PREFIX, + type ShareKey, + type EncryptedShareKeyForApi, +} from './share-key' + +// Invitation KEK +export { + deriveKeyFromInvitationToken, + encryptKekForInvitation, + decryptKekFromInvitation, + encodeInvitationKekForApi, + decodeInvitationKekFromApi, +} from './invitation-kek' diff --git a/app/src/features/security/lib/keys/invitation-kek.ts b/app/src/features/security/lib/keys/invitation-kek.ts new file mode 100644 index 00000000..973ee3d2 --- /dev/null +++ b/app/src/features/security/lib/keys/invitation-kek.ts @@ -0,0 +1,147 @@ +/** + * Invitation KEK Encryption + * + * Encrypts workspace KEK using a key derived from the invitation token. + * This allows invited users to decrypt the KEK without requiring + * the inviter to be online at acceptance time. + * + * Security model: + * - Token is a UUID (122 bits entropy) generated server-side + * - Token is transmitted to invitee via any secure channel + * - Token is single-use and expires + * - Key derivation uses BLAKE2b (via crypto_generichash) + */ + +import { getSodium } from '../crypto' + +/** Domain separation context for invitation KEK derivation */ +const INVITATION_KEK_CONTEXT = 'refmd_invitation_kek_v1' + +/** Key size for XChaCha20-Poly1305 (32 bytes) */ +const KEY_SIZE = 32 + +/** Nonce size for XChaCha20-Poly1305 (24 bytes) */ +const NONCE_SIZE = 24 + +/** + * Derive a symmetric key from an invitation token. + * + * Uses BLAKE2b hash with domain separation to derive a 32-byte key + * from the invitation token. + * + * @param token - Invitation token (UUID string) + * @returns 32-byte derived key + */ +export async function deriveKeyFromInvitationToken(token: string): Promise { + const sodium = await getSodium() + + // Combine token with context for domain separation + const input = `${INVITATION_KEK_CONTEXT}:${token}` + const inputBytes = sodium.from_string(input) + + // Use BLAKE2b to derive a 32-byte key + return sodium.crypto_generichash(KEY_SIZE, inputBytes) +} + +/** + * Encrypt a KEK for an invitation. + * + * @param kek - The workspace KEK to encrypt + * @param invitationToken - The invitation token + * @returns Encrypted KEK with nonce + */ +export async function encryptKekForInvitation( + kek: Uint8Array, + invitationToken: string +): Promise<{ ciphertext: Uint8Array; nonce: Uint8Array }> { + if (kek.length !== KEY_SIZE) { + throw new Error(`Invalid KEK length: expected ${KEY_SIZE}, got ${kek.length}`) + } + + const sodium = await getSodium() + const derivedKey = await deriveKeyFromInvitationToken(invitationToken) + + const nonce = sodium.randombytes_buf(NONCE_SIZE) + const ciphertext = sodium.crypto_secretbox_easy(kek, nonce, derivedKey) + + return { ciphertext, nonce } +} + +/** + * Decrypt a KEK from an invitation. + * + * @param ciphertext - Encrypted KEK + * @param nonce - Nonce used during encryption + * @param invitationToken - The invitation token + * @returns Decrypted KEK + * @throws Error if decryption fails (wrong token or corrupted data) + */ +export async function decryptKekFromInvitation( + ciphertext: Uint8Array, + nonce: Uint8Array, + invitationToken: string +): Promise { + if (nonce.length !== NONCE_SIZE) { + throw new Error(`Invalid nonce length: expected ${NONCE_SIZE}, got ${nonce.length}`) + } + + const sodium = await getSodium() + const derivedKey = await deriveKeyFromInvitationToken(invitationToken) + + try { + const decrypted = sodium.crypto_secretbox_open_easy(ciphertext, nonce, derivedKey) + + if (decrypted.length !== KEY_SIZE) { + throw new Error(`Invalid decrypted KEK length: expected ${KEY_SIZE}, got ${decrypted.length}`) + } + + return decrypted + } catch { + throw new Error('Failed to decrypt invitation KEK: invalid token or corrupted data') + } +} + +/** + * Encode invitation-encrypted KEK for API storage. + * + * Format: nonce (24 bytes) || ciphertext + * + * @param ciphertext - Encrypted KEK + * @param nonce - Nonce used during encryption + * @returns Base64-encoded combined data + */ +export async function encodeInvitationKekForApi( + ciphertext: Uint8Array, + nonce: Uint8Array +): Promise { + const sodium = await getSodium() + + const combined = new Uint8Array(nonce.length + ciphertext.length) + combined.set(nonce, 0) + combined.set(ciphertext, nonce.length) + + return sodium.to_base64(combined, sodium.base64_variants.ORIGINAL) +} + +/** + * Decode invitation-encrypted KEK from API response. + * + * @param encryptedKekBase64 - Base64-encoded encrypted KEK from API + * @returns Nonce and ciphertext + */ +export async function decodeInvitationKekFromApi(encryptedKekBase64: string): Promise<{ + nonce: Uint8Array + ciphertext: Uint8Array +}> { + const sodium = await getSodium() + const combined = sodium.from_base64(encryptedKekBase64, sodium.base64_variants.ORIGINAL) + + if (combined.length < NONCE_SIZE + 1) { + throw new Error('Invalid invitation KEK format: too short') + } + + const nonce = combined.slice(0, NONCE_SIZE) + const ciphertext = combined.slice(NONCE_SIZE) + + return { nonce, ciphertext } +} diff --git a/app/src/features/security/lib/keys/key-cache.ts b/app/src/features/security/lib/keys/key-cache.ts new file mode 100644 index 00000000..a73f916c --- /dev/null +++ b/app/src/features/security/lib/keys/key-cache.ts @@ -0,0 +1,235 @@ +/** + * E2EE Key Cache + * + * LRU cache for KEK and DEK keys to avoid repeated API calls and decryption. + */ + +/** Default cache sizes */ +export const DEFAULT_KEK_CACHE_SIZE = 50 +export const DEFAULT_DEK_CACHE_SIZE = 200 + +/** Cache entry with metadata */ +interface CacheEntry { + value: T + accessedAt: number +} + +/** + * LRU Cache implementation for encryption keys + * + * Keys are stored in memory only and cleared on page unload. + */ +export class KeyCache { + private cache: Map> + private readonly maxSize: number + + constructor(maxSize: number) { + this.cache = new Map() + this.maxSize = maxSize + } + + /** + * Get a value from the cache + */ + get(key: string): T | undefined { + const entry = this.cache.get(key) + if (!entry) { + return undefined + } + + // Update access time for LRU + entry.accessedAt = Date.now() + return entry.value + } + + /** + * Set a value in the cache + */ + set(key: string, value: T): void { + // If key already exists, update it + if (this.cache.has(key)) { + this.cache.set(key, { + value, + accessedAt: Date.now(), + }) + return + } + + // Evict oldest entries if at capacity + if (this.cache.size >= this.maxSize) { + this.evictOldest() + } + + this.cache.set(key, { + value, + accessedAt: Date.now(), + }) + } + + /** + * Delete a value from the cache + */ + delete(key: string): boolean { + return this.cache.delete(key) + } + + /** + * Check if a key exists in the cache + */ + has(key: string): boolean { + return this.cache.has(key) + } + + /** + * Clear all entries from the cache + */ + clear(): void { + // Zero out key material before clearing + for (const entry of this.cache.values()) { + if (entry.value instanceof Uint8Array) { + entry.value.fill(0) + } + } + this.cache.clear() + } + + /** + * Get the current size of the cache + */ + get size(): number { + return this.cache.size + } + + /** + * Get all keys in the cache + */ + keys(): string[] { + return Array.from(this.cache.keys()) + } + + /** + * Evict the oldest (least recently accessed) entry + */ + private evictOldest(): void { + let oldestKey: string | null = null + let oldestTime = Infinity + + for (const [key, entry] of this.cache.entries()) { + if (entry.accessedAt < oldestTime) { + oldestTime = entry.accessedAt + oldestKey = key + } + } + + if (oldestKey !== null) { + // Zero out key material before eviction + const entry = this.cache.get(oldestKey) + if (entry?.value instanceof Uint8Array) { + entry.value.fill(0) + } + this.cache.delete(oldestKey) + } + } +} + +/** + * Specialized cache for Workspace KEKs + */ +export class KekCache extends KeyCache { + constructor(maxSize: number = DEFAULT_KEK_CACHE_SIZE) { + super(maxSize) + } + + /** + * Get KEK by workspace ID + */ + getKek(workspaceId: string): Uint8Array | undefined { + return this.get(workspaceId) + } + + /** + * Set KEK for workspace + */ + setKek(workspaceId: string, kek: Uint8Array): void { + this.set(workspaceId, kek) + } + + /** + * Delete KEK for workspace + */ + deleteKek(workspaceId: string): boolean { + return this.delete(workspaceId) + } +} + +/** + * Specialized cache for Document DEKs + */ +export class DekCache extends KeyCache { + constructor(maxSize: number = DEFAULT_DEK_CACHE_SIZE) { + super(maxSize) + } + + /** + * Get DEK by document ID + */ + getDek(documentId: string): Uint8Array | undefined { + return this.get(documentId) + } + + /** + * Set DEK for document + */ + setDek(documentId: string, dek: Uint8Array): void { + this.set(documentId, dek) + } + + /** + * Delete DEK for document + */ + deleteDek(documentId: string): boolean { + return this.delete(documentId) + } + + /** + * Delete all DEKs for documents in a workspace + * (useful when workspace KEK is rotated) + */ + deleteByWorkspace(documentIds: string[]): void { + for (const docId of documentIds) { + this.delete(docId) + } + } +} + +// Singleton instances +let kekCacheInstance: KekCache | null = null +let dekCacheInstance: DekCache | null = null + +/** + * Get the singleton KEK cache instance + */ +export function getKekCache(): KekCache { + if (!kekCacheInstance) { + kekCacheInstance = new KekCache() + } + return kekCacheInstance +} + +/** + * Get the singleton DEK cache instance + */ +export function getDekCache(): DekCache { + if (!dekCacheInstance) { + dekCacheInstance = new DekCache() + } + return dekCacheInstance +} + +/** + * Clear all key caches (for logout/lock) + */ +export function clearAllCaches(): void { + kekCacheInstance?.clear() + dekCacheInstance?.clear() +} diff --git a/app/src/features/security/lib/keys/key-manager.ts b/app/src/features/security/lib/keys/key-manager.ts new file mode 100644 index 00000000..4515ca66 --- /dev/null +++ b/app/src/features/security/lib/keys/key-manager.ts @@ -0,0 +1,1075 @@ +/** + * KeyManager - Central key management for E2EE + * + * This is the main entry point for all key operations. + * It coordinates between the KeyStore, KeyCache, and individual key modules. + */ + +import { + storeWorkspaceKey, + getMyWorkspaceKey, + listMembers, + getUserPublicKey, + rotateWorkspaceKey, + rotateDocumentKey, +} from '@/shared/api/client' + +import { toBase64, fromBase64 } from '../crypto' + +import { + generateDocumentDek, + getOrFetchDek, + invalidateCachedDek, + invalidateWorkspaceDeks, + createEncryptedDekForApi, + decryptDekFromApiResponse, +} from './document-dek' +import { + encryptKekForInvitation, + decryptKekFromInvitation, + encodeInvitationKekForApi, + decodeInvitationKekFromApi, +} from './invitation-kek' +import { clearAllCaches } from './key-cache' +import { getKekCache, getDekCache } from './key-cache' +import { KeyStore, getKeyStore, type StoredKeys } from './key-store' +import { + generateShareKey, + extractShareKeyFromFragment, + deriveShareKeyFromPassword, + createPasswordProtectedShareKey, + encryptDekWithShareKey, + decryptDekWithShareKey, +} from './share-key' +import { + generateUmk, + deriveUmkFromPassphrase, + restoreUmkFromRecoveryKey, + verifyPassphrase, + zeroUmk, +} from './umk' +import { + generateUserKeys, + encryptUserKeys, + decryptUserKeys, + getPublicKeysBase64, + zeroUserKeys, + type UserKeySet, +} from './user-keys' +import { + generateWorkspaceKek, + getOrFetchKek, + invalidateCachedKek, + createKekForMember, + decryptKekFromApiResponse, + encryptKekForRecipient, + encodeKekForApi, +} from './workspace-kek' + +/** Encrypted keys data for server storage */ +export interface EncryptedKeysBundle { + /** Encrypted ECDH private key (base64) */ + encryptedEcdhPrivateKey: string + /** Nonce for ECDH key encryption (base64) */ + encryptedEcdhPrivateKeyNonce: string + /** Encrypted signing private key (base64) */ + encryptedSigningPrivateKey: string + /** Nonce for signing key encryption (base64) */ + encryptedSigningPrivateKeyNonce: string + /** ECDH public key (base64) */ + ecdhPublicKey: string + /** Signing public key (base64) */ + signingPublicKey: string +} + +/** E2EE Setup result */ +export interface E2EESetupResult { + /** BIP39 recovery key (24 words) - must be shown to user */ + recoveryKey: string + /** Public keys to register with server */ + publicKeys: { + ecdhPublicKey: string + signingPublicKey: string + } + /** Salt used for passphrase derivation */ + salt: Uint8Array + /** KDF type used */ + kdf: 'argon2id' | 'pbkdf2' + /** KDF parameters */ + kdfParams: { memory?: number; iterations: number; parallelism?: number } + /** Encrypted keys bundle for server storage */ + encryptedKeysBundle: EncryptedKeysBundle +} + +/** Session lock error */ +export class SessionLockedError extends Error { + constructor() { + super('Session is locked. Please unlock with passphrase.') + this.name = 'SessionLockedError' + } +} + +/** KDF parameters with type discriminator */ +type KdfParams = + | { type: 'argon2id'; memory: number; iterations: number; parallelism: number } + | { type: 'pbkdf2'; iterations: number } + +/** + * Build KDF parameters from server backup data. + * Handles null/undefined values with sensible defaults. + */ +function buildKdfParams( + kdfType: 'argon2id' | 'pbkdf2', + rawParams: { memory?: number | null; iterations?: number | null; parallelism?: number | null } +): KdfParams { + if (kdfType === 'argon2id') { + return { + type: 'argon2id', + memory: rawParams.memory ?? 65536, + iterations: rawParams.iterations ?? 3, + parallelism: rawParams.parallelism ?? 4, + } + } + return { + type: 'pbkdf2', + iterations: rawParams.iterations ?? 600000, + } +} + +/** Key not found error */ +export class KeyNotFoundError extends Error { + constructor(keyType: string, id: string) { + super(`${keyType} not found for ${id}`) + this.name = 'KeyNotFoundError' + } +} + +/** + * KeyManager - Singleton class for managing E2EE keys + */ +export class KeyManager { + private keyStore: KeyStore + private umk: Uint8Array | null = null + private userKeys: UserKeySet | null = null + private _isInitialized = false + private unlockListeners = new Set<() => void>() + + constructor(keyStore?: KeyStore) { + this.keyStore = keyStore ?? getKeyStore() + } + + /** + * Subscribe to unlock state changes. + * @returns Unsubscribe function + */ + onUnlockChange(listener: () => void): () => void { + this.unlockListeners.add(listener) + return () => this.unlockListeners.delete(listener) + } + + private notifyUnlockChange(): void { + this.unlockListeners.forEach(l => l()) + } + + /** + * Initialize the KeyManager. + * Must be called before any other operations. + * Automatically restores UMK from IndexedDB if available. + */ + async initialize(): Promise { + if (this._isInitialized) return + await this.keyStore.initialize() + this._isInitialized = true + + // Try to auto-restore UMK from IndexedDB + await this.tryAutoUnlock() + } + + /** + * Try to automatically unlock using stored UMK. + * This is called during initialization. + */ + private async tryAutoUnlock(): Promise { + try { + const storedUmk = await this.keyStore.loadSessionUmk() + if (!storedUmk) return + + const storedKeys = await this.keyStore.loadKeys() + if (!storedKeys) { + // No keys stored, clear the orphaned UMK + await this.keyStore.clearSessionUmk() + return + } + + // Try to decrypt user keys with the stored UMK + const userKeys = await decryptUserKeys(storedKeys, storedUmk) + this.umk = storedUmk + this.userKeys = userKeys + this.notifyUnlockChange() + } catch { + // Failed to auto-unlock, clear invalid UMK + await this.keyStore.clearSessionUmk() + } + } + + /** + * Check if KeyManager is initialized. + */ + get isInitialized(): boolean { + return this._isInitialized + } + + /** + * Check if session is unlocked. + */ + get isUnlocked(): boolean { + return this.umk !== null && this.userKeys !== null + } + + /** + * Check if user has E2EE keys set up. + */ + async hasKeys(): Promise { + await this.ensureInitialized() + return this.keyStore.hasKeys() + } + + // ============================================ + // E2EE Setup + // ============================================ + + /** + * Set up E2EE for a new user or existing user migrating. + * + * @param passphrase - User's passphrase (min 8 characters) + * @param options - Setup options + * @param options.rememberMe - If true, persist UMK in IndexedDB for session continuity + * @returns Setup result with recovery key and public keys + */ + async setupE2EE(passphrase: string, options?: { rememberMe?: boolean }): Promise { + await this.ensureInitialized() + + // Generate UMK from passphrase + const umkResult = await generateUmk(passphrase) + + // Generate user key pairs + const userKeys = await generateUserKeys() + + // Encrypt user keys with UMK + const encryptedKeys = await encryptUserKeys(userKeys, umkResult.umk) + + // Store in IndexedDB + const kdfParams = buildKdfParams(umkResult.kdf, umkResult.kdfParams) + + const storedKeys: StoredKeys = { + ...encryptedKeys, + salt: umkResult.salt, + kdf: umkResult.kdf, + kdfParams, + createdAt: Date.now(), + } + + await this.keyStore.saveKeys(storedKeys) + + // Persist UMK based on rememberMe preference + await this.keyStore.saveSessionUmk(umkResult.umk, { rememberMe: options?.rememberMe }) + + // Keep UMK and keys in memory + this.umk = umkResult.umk + this.userKeys = userKeys + + // Get public keys for server registration + const publicKeys = await getPublicKeysBase64(userKeys) + + // Create encrypted keys bundle for server storage + const encryptedKeysBundle: EncryptedKeysBundle = { + encryptedEcdhPrivateKey: await toBase64(encryptedKeys.encryptedEcdhPrivateKey), + encryptedEcdhPrivateKeyNonce: await toBase64(encryptedKeys.encryptedEcdhPrivateKeyNonce), + encryptedSigningPrivateKey: await toBase64(encryptedKeys.encryptedSigningPrivateKey), + encryptedSigningPrivateKeyNonce: await toBase64(encryptedKeys.encryptedSigningPrivateKeyNonce), + ecdhPublicKey: publicKeys.ecdhPublicKey, + signingPublicKey: publicKeys.signingPublicKey, + } + + return { + recoveryKey: umkResult.recoveryKey, + publicKeys, + salt: umkResult.salt, + kdf: umkResult.kdf, + kdfParams: umkResult.kdfParams, + encryptedKeysBundle, + } + } + + // ============================================ + // Unlock / Lock + // ============================================ + + /** + * Unlock the session with a passphrase. + * + * @param passphrase - User's passphrase + * @param options - Unlock options + * @param options.rememberMe - If true, persist UMK in IndexedDB for session continuity + * @throws Error if passphrase is incorrect + */ + async unlockWithPassphrase(passphrase: string, options?: { rememberMe?: boolean }): Promise { + await this.ensureInitialized() + + const storedKeys = await this.keyStore.loadKeys() + if (!storedKeys) { + throw new Error('No E2EE keys found. Please set up E2EE first.') + } + + const umk = await deriveUmkFromPassphrase( + passphrase, + storedKeys.salt, + storedKeys.kdf, + storedKeys.kdfParams + ) + + await this.performUnlock(umk, storedKeys, 'Incorrect passphrase', options) + } + + /** + * Unlock the session with a recovery key. + * + * @param recoveryKey - BIP39 mnemonic (24 words) + * @param options - Unlock options + * @param options.rememberMe - If true, persist UMK in IndexedDB for session continuity + * @throws Error if recovery key is invalid + */ + async unlockWithRecoveryKey(recoveryKey: string, options?: { rememberMe?: boolean }): Promise { + await this.ensureInitialized() + + const storedKeys = await this.keyStore.loadKeys() + if (!storedKeys) { + throw new Error('No E2EE keys found. Please set up E2EE first.') + } + + const umk = restoreUmkFromRecoveryKey(recoveryKey) + await this.performUnlock(umk, storedKeys, 'Incorrect recovery key', options) + } + + /** + * Lock the session - clears all keys from memory. + * Use this for temporary locking (e.g., screen lock). + * For logout, use logout() instead which also clears stored UMK. + */ + lock(): void { + const wasUnlocked = this.isUnlocked + if (this.umk) { + zeroUmk(this.umk) + this.umk = null + } + + if (this.userKeys) { + zeroUserKeys(this.userKeys) + this.userKeys = null + } + + // Clear all cached KEKs and DEKs + clearAllCaches() + + if (wasUnlocked) { + this.notifyUnlockChange() + } + } + + /** + * Logout - clears all keys from memory AND storage. + * Use this when user explicitly logs out. + */ + async logout(): Promise { + // First lock (clear from memory) + this.lock() + + // Then clear from storage + await this.keyStore.clearSessionUmk() + } + + /** + * Verify if a passphrase is correct without unlocking. + */ + async verifyPassphrase(passphrase: string): Promise { + await this.ensureInitialized() + + const storedKeys = await this.keyStore.loadKeys() + if (!storedKeys) { + return false + } + + return verifyPassphrase(passphrase, storedKeys) + } + + // ============================================ + // Server Restore + // ============================================ + + /** + * Restore keys from server backup. + * Used when logging in on a new device. + * + * @param passphrase - User's passphrase + * @param serverBackup - Backup data from server + */ + async restoreFromServer( + passphrase: string, + serverBackup: { + encryptedKeysBundle: EncryptedKeysBundle + salt: string + kdfType: 'argon2id' | 'pbkdf2' + kdfParams: { memory?: number | null; iterations?: number | null; parallelism?: number | null } + }, + options?: { rememberMe?: boolean } + ): Promise { + await this.ensureInitialized() + + const salt = await fromBase64(serverBackup.salt) + const kdfParams = buildKdfParams(serverBackup.kdfType, serverBackup.kdfParams) + + const umk = await deriveUmkFromPassphrase( + passphrase, + salt, + serverBackup.kdfType, + kdfParams + ) + + await this.performRestore(umk, serverBackup, 'Incorrect passphrase', options) + } + + /** + * Restore keys from server using recovery key. + * + * @param recoveryKey - BIP39 mnemonic (24 words) + * @param serverBackup - Backup data from server + * @param options - Restore options + * @param options.rememberMe - If true, persist UMK in IndexedDB for session continuity + */ + async restoreFromServerWithRecoveryKey( + recoveryKey: string, + serverBackup: { + encryptedKeysBundle: EncryptedKeysBundle + salt: string + kdfType: 'argon2id' | 'pbkdf2' + kdfParams: { memory?: number | null; iterations?: number | null; parallelism?: number | null } + }, + options?: { rememberMe?: boolean } + ): Promise { + await this.ensureInitialized() + + const umk = restoreUmkFromRecoveryKey(recoveryKey) + await this.performRestore(umk, serverBackup, 'Incorrect recovery key', options) + } + + // ============================================ + // User Keys + // ============================================ + + /** + * Get the current user's ECDH key pair. + */ + getEcdhKeyPair(): { publicKey: Uint8Array; privateKey: Uint8Array } { + this.ensureUnlocked() + return { + publicKey: this.userKeys!.ecdh.publicKey, + privateKey: this.userKeys!.ecdh.privateKey, + } + } + + /** + * Get the current user's signing key pair. + */ + getSigningKeyPair(): { publicKey: Uint8Array; privateKey: Uint8Array } { + this.ensureUnlocked() + return { + publicKey: this.userKeys!.signing.publicKey, + privateKey: this.userKeys!.signing.privateKey, + } + } + + /** + * Get public keys as Base64 for API requests. + */ + async getPublicKeysBase64(): Promise<{ ecdhPublicKey: string; signingPublicKey: string }> { + this.ensureUnlocked() + return getPublicKeysBase64(this.userKeys!) + } + + // ============================================ + // Workspace KEK + // ============================================ + + /** + * Generate a new workspace KEK. + */ + async generateWorkspaceKek(): Promise { + return generateWorkspaceKek() + } + + /** + * Encrypt a KEK for a recipient. + */ + async encryptKekForRecipient( + kek: Uint8Array, + recipientPublicKey: Uint8Array + ): Promise { + return createKekForMember(kek, recipientPublicKey) + } + + /** + * Get a workspace KEK (from cache or API). + * + * @param workspaceId - Workspace ID + * @param fetchFn - Function to fetch encrypted KEK from API + */ + async getWorkspaceKek( + workspaceId: string, + fetchFn: () => Promise + ): Promise { + this.ensureUnlocked() + return getOrFetchKek(workspaceId, this.userKeys!.ecdh.privateKey, fetchFn) + } + + /** + * Decrypt a KEK directly from API response. + */ + async decryptKek(encryptedKekBase64: string): Promise { + this.ensureUnlocked() + return decryptKekFromApiResponse(this.userKeys!.ecdh.privateKey, encryptedKekBase64) + } + + /** + * Invalidate cached KEK. + */ + invalidateKekCache(workspaceId: string): void { + invalidateCachedKek(workspaceId) + } + + /** + * Create and store a new workspace KEK. + * Used when creating a new workspace. + * + * @param workspaceId - Workspace ID + * @returns The raw KEK (for immediate use) + */ + async createAndStoreWorkspaceKek(workspaceId: string): Promise { + this.ensureUnlocked() + + // 1. Generate new KEK + const kek = await generateWorkspaceKek() + + // 2. Encrypt for self + const userPublicKey = this.userKeys!.ecdh.publicKey + const { encryptedKek, ephemeralPublicKey, nonce } = await encryptKekForRecipient( + kek, + userPublicKey + ) + + // 3. Encode and store via API + const encryptedKekBase64 = await encodeKekForApi(encryptedKek, ephemeralPublicKey, nonce) + await storeWorkspaceKey({ + id: workspaceId, + requestBody: { encryptedKek: encryptedKekBase64, keyVersion: 1 }, + }) + + // 4. Cache + getKekCache().setKek(workspaceId, kek) + + return kek + } + + /** + * Get or create workspace KEK. + * If no KEK exists for the workspace, creates one automatically. + * + * @param workspaceId - Workspace ID + * @returns The raw KEK + */ + async getOrCreateWorkspaceKek(workspaceId: string): Promise { + this.ensureUnlocked() + + // Check cache first + const cachedKek = getKekCache().getKek(workspaceId) + if (cachedKek) { + return cachedKek + } + + // Try to fetch from server + try { + const response = await getMyWorkspaceKey({ id: workspaceId }) + return await this.decryptKek(response.encryptedKek) + } catch (error: unknown) { + // If 404, create new KEK + if (error && typeof error === 'object' && 'status' in error && error.status === 404) { + console.log(`[KeyManager] No KEK found for workspace ${workspaceId}, creating new one`) + return this.createAndStoreWorkspaceKek(workspaceId) + } + throw error + } + } + + /** + * Encrypt workspace KEK for an invitation. + * The KEK is encrypted using a key derived from the invitation token. + * + * @param workspaceId - Workspace ID + * @param invitationToken - The invitation token + * @returns Base64-encoded encrypted KEK for API storage + */ + async encryptKekForInvitationToken( + workspaceId: string, + invitationToken: string + ): Promise { + this.ensureUnlocked() + + // Get or create workspace KEK + const kek = await this.getOrCreateWorkspaceKek(workspaceId) + + // Encrypt KEK with token-derived key + const { ciphertext, nonce } = await encryptKekForInvitation(kek, invitationToken) + + // Encode for API + return encodeInvitationKekForApi(ciphertext, nonce) + } + + /** + * Decrypt KEK from invitation and store for self. + * Called when accepting a workspace invitation. + * + * @param workspaceId - Workspace ID + * @param invitationToken - The invitation token + * @param encryptedKekBase64 - Base64-encoded encrypted KEK from invitation + * @param keyVersion - Key version + */ + async acceptInvitationAndStoreKek( + workspaceId: string, + invitationToken: string, + encryptedKekBase64: string, + keyVersion: number + ): Promise { + this.ensureUnlocked() + + // 1. Decode invitation-encrypted KEK + const { nonce, ciphertext } = await decodeInvitationKekFromApi(encryptedKekBase64) + + // 2. Decrypt KEK using invitation token + const kek = await decryptKekFromInvitation(ciphertext, nonce, invitationToken) + + // 3. Re-encrypt for self + const userPublicKey = this.userKeys!.ecdh.publicKey + const encrypted = await encryptKekForRecipient(kek, userPublicKey) + const encryptedKekForSelf = await encodeKekForApi( + encrypted.encryptedKek, + encrypted.ephemeralPublicKey, + encrypted.nonce + ) + + // 4. Store via API + await storeWorkspaceKey({ + id: workspaceId, + requestBody: { encryptedKek: encryptedKekForSelf, keyVersion }, + }) + + // 5. Cache + getKekCache().setKek(workspaceId, kek) + } + + // ============================================ + // Document DEK + // ============================================ + + /** + * Generate a new document DEK. + */ + async generateDocumentDek(): Promise { + return generateDocumentDek() + } + + /** + * Create an encrypted DEK for API storage. + */ + async createEncryptedDek( + dek: Uint8Array, + kek: Uint8Array + ): Promise<{ encryptedDek: string; nonce: string }> { + return createEncryptedDekForApi(dek, kek) + } + + /** + * Get a document DEK (from cache or API). + * + * @param documentId - Document ID + * @param kek - Workspace KEK + * @param fetchFn - Function to fetch encrypted DEK from API + */ + async getDocumentDek( + documentId: string, + kek: Uint8Array, + fetchFn: () => Promise<{ encryptedDek: string; nonce: string }> + ): Promise { + return getOrFetchDek(documentId, kek, fetchFn) + } + + /** + * Decrypt a DEK directly from API response. + */ + async decryptDek( + encryptedDekBase64: string, + nonceBase64: string, + kek: Uint8Array + ): Promise { + return decryptDekFromApiResponse(encryptedDekBase64, nonceBase64, kek) + } + + /** + * Invalidate cached DEK. + */ + invalidateDekCache(documentId: string): void { + invalidateCachedDek(documentId) + } + + /** + * Invalidate all DEKs for a workspace. + */ + invalidateWorkspaceDeksCache(documentIds: string[]): void { + invalidateWorkspaceDeks(documentIds) + } + + // ============================================ + // Share Keys + // ============================================ + + /** + * Generate a share key for URL fragment mode. + */ + async generateShareKey(): Promise<{ key: Uint8Array; fragment: string }> { + return generateShareKey() + } + + /** + * Extract share key from URL fragment. + */ + async extractShareKeyFromFragment(fragment: string): Promise { + return extractShareKeyFromFragment(fragment) + } + + /** + * Derive share key from password. + */ + async deriveShareKeyFromPassword( + password: string, + salt: Uint8Array + ): Promise { + return deriveShareKeyFromPassword(password, salt) + } + + /** + * Create a password-protected share key. + */ + async createPasswordProtectedShareKey( + password: string + ): Promise<{ key: Uint8Array; salt: Uint8Array }> { + return createPasswordProtectedShareKey(password) + } + + /** + * Encrypt a DEK with a share key. + */ + async encryptDekWithShareKey( + dek: Uint8Array, + shareKey: Uint8Array + ): Promise<{ encryptedDek: string; nonce: string }> { + return encryptDekWithShareKey(dek, shareKey) + } + + /** + * Decrypt a DEK with a share key. + */ + async decryptDekWithShareKey( + encryptedDekBase64: string, + nonceBase64: string, + shareKey: Uint8Array + ): Promise { + return decryptDekWithShareKey(encryptedDekBase64, nonceBase64, shareKey) + } + + // ============================================ + // Key Rotation + // ============================================ + + /** + * Rotate the workspace KEK. + * Generates a new KEK and re-encrypts it for all workspace members. + * + * Requires workspace:manage permission. + * + * @param workspaceId - Workspace ID + * @returns The new key version + */ + async rotateWorkspaceKek(workspaceId: string): Promise { + this.ensureUnlocked() + + // 1. Generate new KEK + const newKek = await generateWorkspaceKek() + + // 2. Get all workspace members + const members = await listMembers({ id: workspaceId }) + + // 3. For each member, get their public key and encrypt the new KEK + const memberKeys: Array<{ userId: string; encryptedKek: string }> = [] + + for (const member of members) { + try { + const publicKeyResponse = await getUserPublicKey({ userId: member.user_id }) + const publicKeyBase64 = publicKeyResponse.publicKey + const publicKeyBytes = await fromBase64(publicKeyBase64) + + // Encrypt KEK for this member + const encryptedKekBase64 = await createKekForMember(newKek, publicKeyBytes) + + memberKeys.push({ + userId: member.user_id, + encryptedKek: encryptedKekBase64, + }) + } catch (error) { + console.warn(`[KeyManager] Failed to get public key for member ${member.user_id}:`, error) + // Skip members without public keys (they may not have set up E2EE yet) + } + } + + if (memberKeys.length === 0) { + throw new Error('No members have E2EE public keys. Cannot rotate KEK.') + } + + // 4. Call the rotation API + const response = await rotateWorkspaceKey({ + id: workspaceId, + requestBody: { + memberKeys, + }, + }) + + // 5. Update cache with new KEK + getKekCache().setKek(workspaceId, newKek) + + // 6. Invalidate all DEK caches for this workspace + // (DEKs will need to be re-fetched with the new KEK) + // Note: The caller should handle re-encrypting documents if needed + + return response.newKeyVersion + } + + /** + * Rotate the document DEK. + * Generates a new DEK and encrypts it with the workspace KEK. + * + * Note: This only rotates the key. The caller is responsible for + * re-encrypting the document content with the new DEK. + * + * @param documentId - Document ID + * @param workspaceId - Workspace ID (needed to get the KEK) + * @returns Object containing the new DEK and new key version + */ + async rotateDocumentDek( + documentId: string, + workspaceId: string + ): Promise<{ dek: Uint8Array; newKeyVersion: number }> { + this.ensureUnlocked() + + // 1. Get the workspace KEK + const kek = await this.getOrCreateWorkspaceKek(workspaceId) + + // 2. Generate new DEK + const newDek = await generateDocumentDek() + + // 3. Encrypt new DEK with KEK + const { encryptedDek, nonce } = await createEncryptedDekForApi(newDek, kek) + + // 4. Call the rotation API + const response = await rotateDocumentKey({ + id: documentId, + requestBody: { + encryptedDek, + nonce, + }, + }) + + // 5. Update DEK cache + getDekCache().setDek(documentId, newDek) + + return { + dek: newDek, + newKeyVersion: response.newKeyVersion, + } + } + + // ============================================ + // Password Change + // ============================================ + + /** + * Change the passphrase. + * Session must be unlocked. + * + * @param newPassphrase - New passphrase + * @returns New recovery key + */ + async changePassphrase(newPassphrase: string): Promise { + this.ensureUnlocked() + + // Generate new UMK from new passphrase + const umkResult = await generateUmk(newPassphrase) + + // Re-encrypt user keys with new UMK + const encryptedKeys = await encryptUserKeys(this.userKeys!, umkResult.umk) + const kdfParams = buildKdfParams(umkResult.kdf, umkResult.kdfParams) + + // Store updated keys + const storedKeys: StoredKeys = { + ...encryptedKeys, + salt: umkResult.salt, + kdf: umkResult.kdf, + kdfParams, + createdAt: Date.now(), + } + + await this.keyStore.saveKeys(storedKeys) + + // Zero out old UMK + if (this.umk) { + zeroUmk(this.umk) + } + + // Update session with new UMK + this.umk = umkResult.umk + + return umkResult.recoveryKey + } + + // ============================================ + // Utility + // ============================================ + + /** + * Clear all stored keys (for account deletion or reset). + */ + async clearAllKeys(): Promise { + this.lock() + await this.keyStore.clear() + this._isInitialized = false + } + + /** + * Get UMK (for migration or backup operations). + * Use with caution - UMK should not be exposed. + */ + getUmk(): Uint8Array { + this.ensureUnlocked() + return this.umk! + } + + // ============================================ + // Private Helpers + // ============================================ + + private async ensureInitialized(): Promise { + if (!this._isInitialized) { + await this.initialize() + } + } + + private ensureUnlocked(): void { + if (!this.isUnlocked) { + throw new SessionLockedError() + } + } + + /** + * Common unlock logic shared by unlockWithPassphrase and unlockWithRecoveryKey. + */ + private async performUnlock( + umk: Uint8Array, + storedKeys: StoredKeys, + errorMessage: string, + options?: { rememberMe?: boolean } + ): Promise { + try { + const userKeys = await decryptUserKeys(storedKeys, umk) + this.umk = umk + this.userKeys = userKeys + await this.keyStore.saveSessionUmk(umk, { rememberMe: options?.rememberMe }) + this.notifyUnlockChange() + } catch { + umk.fill(0) + throw new Error(errorMessage) + } + } + + /** + * Common restore logic shared by restoreFromServer and restoreFromServerWithRecoveryKey. + */ + private async performRestore( + umk: Uint8Array, + serverBackup: { + encryptedKeysBundle: EncryptedKeysBundle + salt: string + kdfType: 'argon2id' | 'pbkdf2' + kdfParams: { memory?: number | null; iterations?: number | null; parallelism?: number | null } + }, + errorMessage: string, + options?: { rememberMe?: boolean } + ): Promise { + const salt = await fromBase64(serverBackup.salt) + const kdfParams = buildKdfParams(serverBackup.kdfType, serverBackup.kdfParams) + + const bundle = serverBackup.encryptedKeysBundle + const storedKeys: StoredKeys = { + encryptedEcdhPrivateKey: await fromBase64(bundle.encryptedEcdhPrivateKey), + encryptedEcdhPrivateKeyNonce: await fromBase64(bundle.encryptedEcdhPrivateKeyNonce), + encryptedSigningPrivateKey: await fromBase64(bundle.encryptedSigningPrivateKey), + encryptedSigningPrivateKeyNonce: await fromBase64(bundle.encryptedSigningPrivateKeyNonce), + ecdhPublicKey: await fromBase64(bundle.ecdhPublicKey), + signingPublicKey: await fromBase64(bundle.signingPublicKey), + salt, + kdf: serverBackup.kdfType, + kdfParams, + createdAt: Date.now(), + } + + try { + const userKeys = await decryptUserKeys(storedKeys, umk) + await this.keyStore.saveKeys(storedKeys) + await this.keyStore.saveSessionUmk(umk, { rememberMe: options?.rememberMe }) + this.umk = umk + this.userKeys = userKeys + this.notifyUnlockChange() + } catch { + umk.fill(0) + throw new Error(errorMessage) + } + } +} + +// Singleton instance +let keyManagerInstance: KeyManager | null = null + +/** + * Get the singleton KeyManager instance. + */ +export function getKeyManager(): KeyManager { + if (!keyManagerInstance) { + keyManagerInstance = new KeyManager() + } + return keyManagerInstance +} + +/** + * Reset the singleton instance (for testing). + */ +export function resetKeyManager(): void { + if (keyManagerInstance) { + keyManagerInstance.lock() + } + keyManagerInstance = null +} diff --git a/app/src/features/security/lib/keys/key-store.ts b/app/src/features/security/lib/keys/key-store.ts new file mode 100644 index 00000000..5b72888a --- /dev/null +++ b/app/src/features/security/lib/keys/key-store.ts @@ -0,0 +1,421 @@ +/** + * E2EE Key Store + * + * Stores encrypted keys in IndexedDB. + * UMK storage depends on user's "Remember Me" preference: + * - rememberMe=true: stored in IndexedDB (persists across sessions) + * - rememberMe=false: stored in sessionStorage (cleared on tab close) + */ + +import type { Argon2Params, Pbkdf2Params } from '../types' + +const DB_NAME = 'refmd-e2ee' +const DB_VERSION = 2 +const STORE_NAME = 'keys' +const KEYS_ID = 'user-keys' +const SESSION_ID = 'session-umk' +const SESSION_STORAGE_UMK_KEY = 'refmd-e2ee-session-umk' + +/** Stored key data structure */ +export interface StoredKeys { + /** ECDH private key encrypted with UMK */ + encryptedEcdhPrivateKey: Uint8Array + /** Nonce for ECDH private key encryption */ + encryptedEcdhPrivateKeyNonce: Uint8Array + /** Ed25519 signing private key encrypted with UMK */ + encryptedSigningPrivateKey: Uint8Array + /** Nonce for signing private key encryption */ + encryptedSigningPrivateKeyNonce: Uint8Array + /** ECDH public key (unencrypted) */ + ecdhPublicKey: Uint8Array + /** Ed25519 signing public key (unencrypted) */ + signingPublicKey: Uint8Array + /** Salt used for passphrase derivation */ + salt: Uint8Array + /** KDF type used */ + kdf: 'argon2id' | 'pbkdf2' + /** KDF parameters */ + kdfParams: Argon2Params | Pbkdf2Params + /** When the keys were created */ + createdAt: number +} + +/** Serializable format for IndexedDB */ +interface SerializedStoredKeys { + encryptedEcdhPrivateKey: number[] + encryptedEcdhPrivateKeyNonce: number[] + encryptedSigningPrivateKey: number[] + encryptedSigningPrivateKeyNonce: number[] + ecdhPublicKey: number[] + signingPublicKey: number[] + salt: number[] + kdf: 'argon2id' | 'pbkdf2' + kdfParams: Argon2Params | Pbkdf2Params + createdAt: number +} + +/** + * Open the IndexedDB database + */ +function openDatabase(): Promise { + return new Promise((resolve, reject) => { + const request = indexedDB.open(DB_NAME, DB_VERSION) + + request.onerror = () => { + reject(new Error(`Failed to open database: ${request.error?.message}`)) + } + + request.onsuccess = () => { + resolve(request.result) + } + + request.onupgradeneeded = (event) => { + const db = (event.target as IDBOpenDBRequest).result + + // Create the keys store if it doesn't exist + if (!db.objectStoreNames.contains(STORE_NAME)) { + db.createObjectStore(STORE_NAME, { keyPath: 'id' }) + } + } + }) +} + +/** + * Serialize StoredKeys to a format safe for IndexedDB + */ +function serializeKeys(keys: StoredKeys): SerializedStoredKeys { + return { + encryptedEcdhPrivateKey: Array.from(keys.encryptedEcdhPrivateKey), + encryptedEcdhPrivateKeyNonce: Array.from(keys.encryptedEcdhPrivateKeyNonce), + encryptedSigningPrivateKey: Array.from(keys.encryptedSigningPrivateKey), + encryptedSigningPrivateKeyNonce: Array.from(keys.encryptedSigningPrivateKeyNonce), + ecdhPublicKey: Array.from(keys.ecdhPublicKey), + signingPublicKey: Array.from(keys.signingPublicKey), + salt: Array.from(keys.salt), + kdf: keys.kdf, + kdfParams: keys.kdfParams, + createdAt: keys.createdAt, + } +} + +/** + * Deserialize keys from IndexedDB format + */ +function deserializeKeys(data: SerializedStoredKeys): StoredKeys { + return { + encryptedEcdhPrivateKey: new Uint8Array(data.encryptedEcdhPrivateKey), + encryptedEcdhPrivateKeyNonce: new Uint8Array(data.encryptedEcdhPrivateKeyNonce), + encryptedSigningPrivateKey: new Uint8Array(data.encryptedSigningPrivateKey), + encryptedSigningPrivateKeyNonce: new Uint8Array(data.encryptedSigningPrivateKeyNonce), + ecdhPublicKey: new Uint8Array(data.ecdhPublicKey), + signingPublicKey: new Uint8Array(data.signingPublicKey), + salt: new Uint8Array(data.salt), + kdf: data.kdf, + kdfParams: data.kdfParams, + createdAt: data.createdAt, + } +} + +/** + * KeyStore - manages encrypted key storage in IndexedDB + */ +export class KeyStore { + private db: IDBDatabase | null = null + + /** + * Initialize the key store + */ + async initialize(): Promise { + if (this.db) return + this.db = await openDatabase() + } + + /** + * Ensure database is initialized + */ + private async ensureDb(): Promise { + if (!this.db) { + await this.initialize() + } + return this.db! + } + + /** + * Save encrypted keys to IndexedDB + */ + async saveKeys(keys: StoredKeys): Promise { + const db = await this.ensureDb() + + return new Promise((resolve, reject) => { + const transaction = db.transaction(STORE_NAME, 'readwrite') + const store = transaction.objectStore(STORE_NAME) + + const data = { + id: KEYS_ID, + ...serializeKeys(keys), + } + + const request = store.put(data) + + request.onerror = () => { + reject(new Error(`Failed to save keys: ${request.error?.message}`)) + } + + request.onsuccess = () => { + resolve() + } + }) + } + + /** + * Load encrypted keys from IndexedDB + */ + async loadKeys(): Promise { + const db = await this.ensureDb() + + return new Promise((resolve, reject) => { + const transaction = db.transaction(STORE_NAME, 'readonly') + const store = transaction.objectStore(STORE_NAME) + const request = store.get(KEYS_ID) + + request.onerror = () => { + reject(new Error(`Failed to load keys: ${request.error?.message}`)) + } + + request.onsuccess = () => { + if (!request.result) { + resolve(null) + return + } + + // Remove the id field before deserializing + + const { id, ...data } = request.result + resolve(deserializeKeys(data as SerializedStoredKeys)) + } + }) + } + + /** + * Check if keys exist in IndexedDB + */ + async hasKeys(): Promise { + const keys = await this.loadKeys() + return keys !== null + } + + /** + * Clear all keys from IndexedDB + */ + async clear(): Promise { + const db = await this.ensureDb() + + return new Promise((resolve, reject) => { + const transaction = db.transaction(STORE_NAME, 'readwrite') + const store = transaction.objectStore(STORE_NAME) + const request = store.delete(KEYS_ID) + + request.onerror = () => { + reject(new Error(`Failed to clear keys: ${request.error?.message}`)) + } + + request.onsuccess = () => { + resolve() + } + }) + } + + /** + * Save session UMK for session continuity. + * + * @param umk - The User Master Key + * @param options - Storage options + * @param options.rememberMe - If true, store in IndexedDB (persists across sessions). + * If false, store in sessionStorage (cleared on tab close). + */ + async saveSessionUmk(umk: Uint8Array, options?: { rememberMe?: boolean }): Promise { + const rememberMe = options?.rememberMe ?? false + + if (rememberMe) { + // Store in IndexedDB for persistent storage + const db = await this.ensureDb() + + // Clear sessionStorage to avoid stale data being loaded first + try { + sessionStorage.removeItem(SESSION_STORAGE_UMK_KEY) + } catch { + // sessionStorage not available + } + + return new Promise((resolve, reject) => { + const transaction = db.transaction(STORE_NAME, 'readwrite') + const store = transaction.objectStore(STORE_NAME) + + const data = { + id: SESSION_ID, + umk: Array.from(umk), + savedAt: Date.now(), + } + + const request = store.put(data) + + request.onerror = () => { + reject(new Error(`Failed to save session UMK: ${request.error?.message}`)) + } + + request.onsuccess = () => { + resolve() + } + }) + } else { + // Store in sessionStorage for session-only storage + try { + const encoded = btoa(String.fromCharCode(...umk)) + sessionStorage.setItem(SESSION_STORAGE_UMK_KEY, encoded) + } catch { + throw new Error('Failed to save session UMK to sessionStorage') + } + + // Clear any existing UMK from IndexedDB to ensure clean state + // This prevents old remembered sessions from being used + await this.clearIndexedDbUmk() + } + } + + /** + * Clear UMK from IndexedDB only. + * Used internally when rememberMe is false to ensure clean state. + */ + private async clearIndexedDbUmk(): Promise { + const db = await this.ensureDb() + + return new Promise((resolve) => { + const transaction = db.transaction(STORE_NAME, 'readwrite') + const store = transaction.objectStore(STORE_NAME) + const request = store.delete(SESSION_ID) + + request.onerror = () => { + // Ignore errors - this is a best-effort cleanup + resolve() + } + + request.onsuccess = () => { + resolve() + } + }) + } + + /** + * Load session UMK from storage. + * Checks both IndexedDB (for rememberMe=true) and sessionStorage (for rememberMe=false). + * Returns null if no session UMK is stored. + */ + async loadSessionUmk(): Promise { + // First, try sessionStorage (for current session) + try { + const encoded = sessionStorage.getItem(SESSION_STORAGE_UMK_KEY) + if (encoded) { + const decoded = atob(encoded) + const umk = new Uint8Array(decoded.length) + for (let i = 0; i < decoded.length; i++) { + umk[i] = decoded.charCodeAt(i) + } + return umk + } + } catch { + // sessionStorage not available or invalid data + } + + // Then, try IndexedDB (for remembered sessions) + const db = await this.ensureDb() + + return new Promise((resolve, reject) => { + const transaction = db.transaction(STORE_NAME, 'readonly') + const store = transaction.objectStore(STORE_NAME) + const request = store.get(SESSION_ID) + + request.onerror = () => { + reject(new Error(`Failed to load session UMK: ${request.error?.message}`)) + } + + request.onsuccess = () => { + if (!request.result || !request.result.umk) { + resolve(null) + return + } + + resolve(new Uint8Array(request.result.umk)) + } + }) + } + + /** + * Clear session UMK from all storage locations. + * Called on logout or manual lock. + */ + async clearSessionUmk(): Promise { + // Clear from sessionStorage + try { + sessionStorage.removeItem(SESSION_STORAGE_UMK_KEY) + } catch { + // sessionStorage not available + } + + // Clear from IndexedDB + const db = await this.ensureDb() + + return new Promise((resolve, reject) => { + const transaction = db.transaction(STORE_NAME, 'readwrite') + const store = transaction.objectStore(STORE_NAME) + const request = store.delete(SESSION_ID) + + request.onerror = () => { + reject(new Error(`Failed to clear session UMK: ${request.error?.message}`)) + } + + request.onsuccess = () => { + resolve() + } + }) + } + + /** + * Close the database connection + */ + close(): void { + if (this.db) { + this.db.close() + this.db = null + } + } + + /** + * Delete the entire database (for testing/reset) + */ + static async deleteDatabase(): Promise { + return new Promise((resolve, reject) => { + const request = indexedDB.deleteDatabase(DB_NAME) + + request.onerror = () => { + reject(new Error(`Failed to delete database: ${request.error?.message}`)) + } + + request.onsuccess = () => { + resolve() + } + }) + } +} + +// Singleton instance +let keyStoreInstance: KeyStore | null = null + +/** + * Get the singleton KeyStore instance + */ +export function getKeyStore(): KeyStore { + if (!keyStoreInstance) { + keyStoreInstance = new KeyStore() + } + return keyStoreInstance +} diff --git a/app/src/features/security/lib/keys/share-key.ts b/app/src/features/security/lib/keys/share-key.ts new file mode 100644 index 00000000..8ff9b7fd --- /dev/null +++ b/app/src/features/security/lib/keys/share-key.ts @@ -0,0 +1,211 @@ +/** + * Share Key Management + * + * Handles key generation and management for shared document links. + * Supports two modes: + * 1. URL fragment mode: key is in the URL hash (never sent to server) + * 2. Password mode: key is derived from a password using PBKDF2 + */ + +import { + generateKey, + encrypt, + decrypt, + pbkdf2DeriveKey, + pbkdf2GenerateSalt, + getSodium, + PBKDF2_DEFAULT_ITERATIONS, +} from '../crypto' + +/** Share key size (32 bytes) */ +export const SHARE_KEY_SIZE = 32 + +/** URL fragment prefix for share keys */ +export const URL_FRAGMENT_PREFIX = 'key=' + +/** Share key with metadata */ +export interface ShareKey { + /** 32-byte share key */ + key: Uint8Array + /** Whether this is password-derived */ + isPasswordProtected: boolean +} + +/** Encrypted share key for API */ +export interface EncryptedShareKeyForApi { + /** Encrypted DEK (Base64) */ + encryptedDek: string + /** Nonce (Base64) */ + nonce: string + /** Salt for password-protected shares (Base64), if applicable */ + salt?: string +} + +/** + * Generate a new share key for URL fragment mode. + * + * @returns Share key and URL fragment + */ +export async function generateShareKey(): Promise<{ + key: Uint8Array + fragment: string +}> { + const sodium = await getSodium() + const key = await generateKey() + const keyBase64 = sodium.to_base64(key, sodium.base64_variants.URLSAFE_NO_PADDING) + + return { + key, + fragment: `${URL_FRAGMENT_PREFIX}${keyBase64}`, + } +} + +/** + * Extract share key from URL fragment. + * + * @param fragment - URL fragment (with or without leading #) + * @returns Decoded share key, or null if not found/invalid + */ +export async function extractShareKeyFromFragment( + fragment: string +): Promise { + const sodium = await getSodium() + + // Remove leading # if present + const cleanFragment = fragment.startsWith('#') ? fragment.slice(1) : fragment + + // Check for key prefix + if (!cleanFragment.startsWith(URL_FRAGMENT_PREFIX)) { + return null + } + + const keyBase64 = cleanFragment.slice(URL_FRAGMENT_PREFIX.length) + + try { + return sodium.from_base64(keyBase64, sodium.base64_variants.URLSAFE_NO_PADDING) + } catch { + return null + } +} + +/** + * Derive a share key from a password. + * + * @param password - User-provided password + * @param salt - Salt (generate new for creation, use existing for access) + * @returns Derived share key + */ +export async function deriveShareKeyFromPassword( + password: string, + salt: Uint8Array +): Promise { + return pbkdf2DeriveKey(password, salt, PBKDF2_DEFAULT_ITERATIONS) +} + +/** + * Create a password-protected share key. + * + * @param password - User-provided password + * @returns Derived share key and salt + */ +export async function createPasswordProtectedShareKey( + password: string +): Promise<{ + key: Uint8Array + salt: Uint8Array +}> { + const salt = await pbkdf2GenerateSalt() + const key = await deriveShareKeyFromPassword(password, salt) + + return { key, salt } +} + +/** + * Encrypt a DEK with a share key for storage. + * + * @param dek - Document DEK to encrypt + * @param shareKey - Share key + * @returns Encrypted DEK and nonce (Base64) + */ +export async function encryptDekWithShareKey( + dek: Uint8Array, + shareKey: Uint8Array +): Promise<{ + encryptedDek: string + nonce: string +}> { + const sodium = await getSodium() + const { ciphertext, nonce } = await encrypt(shareKey, dek) + + return { + encryptedDek: sodium.to_base64(ciphertext, sodium.base64_variants.ORIGINAL), + nonce: sodium.to_base64(nonce, sodium.base64_variants.ORIGINAL), + } +} + +/** + * Decrypt a DEK with a share key. + * + * @param encryptedDekBase64 - Encrypted DEK (Base64) + * @param nonceBase64 - Nonce (Base64) + * @param shareKey - Share key + * @returns Decrypted DEK + */ +export async function decryptDekWithShareKey( + encryptedDekBase64: string, + nonceBase64: string, + shareKey: Uint8Array +): Promise { + const sodium = await getSodium() + + const encryptedDek = sodium.from_base64(encryptedDekBase64, sodium.base64_variants.ORIGINAL) + const nonce = sodium.from_base64(nonceBase64, sodium.base64_variants.ORIGINAL) + + return decrypt(shareKey, encryptedDek, nonce) +} + +/** + * Build a complete share URL with the key in the fragment. + * + * @param baseUrl - Base share URL (e.g., "https://refmd.io/share/abc123") + * @param fragment - Key fragment (e.g., "key=...") + * @returns Complete URL with fragment + */ +export function buildShareUrl(baseUrl: string, fragment: string): string { + // Remove any existing fragment from baseUrl + const cleanBaseUrl = baseUrl.split('#')[0] + return `${cleanBaseUrl}#${fragment}` +} + +/** + * Parse salt from API response. + * + * @param saltBase64 - Base64-encoded salt + * @returns Decoded salt + */ +export async function parseSaltFromApi(saltBase64: string): Promise { + const sodium = await getSodium() + return sodium.from_base64(saltBase64, sodium.base64_variants.ORIGINAL) +} + +/** + * Encode salt for API request. + * + * @param salt - Salt bytes + * @returns Base64-encoded salt + */ +export async function encodeSaltForApi(salt: Uint8Array): Promise { + const sodium = await getSodium() + return sodium.to_base64(salt, sodium.base64_variants.ORIGINAL) +} + +/** + * Check if a URL fragment contains a share key. + * + * @param fragment - URL fragment + * @returns true if contains key fragment + */ +export function hasShareKeyFragment(fragment: string): boolean { + const cleanFragment = fragment.startsWith('#') ? fragment.slice(1) : fragment + return cleanFragment.startsWith(URL_FRAGMENT_PREFIX) +} diff --git a/app/src/features/security/lib/keys/umk.ts b/app/src/features/security/lib/keys/umk.ts new file mode 100644 index 00000000..d3c3bacc --- /dev/null +++ b/app/src/features/security/lib/keys/umk.ts @@ -0,0 +1,253 @@ +/** + * User Master Key (UMK) Management + * + * The UMK is the root of the key hierarchy: + * - Generated from random entropy or derived from passphrase + * - Never stored in IndexedDB, only in session memory + * - Can be recovered from BIP39 mnemonic (recovery key) + */ + +import { + argon2DeriveKey, + argon2DeriveKeyWithNewSalt, + isArgon2Supported, + pbkdf2DeriveKey, + pbkdf2DeriveKeyWithNewSalt, + DEFAULT_ARGON2_PARAMS, + PBKDF2_DEFAULT_ITERATIONS, + type Argon2Params, + generateRecoveryKey, + validateRecoveryKey, + recoveryKeyToUmk, + umkToRecoveryKey, + getSodium, +} from '../crypto' + +import type { StoredKeys } from './key-store' + +/** UMK size in bytes (256 bits) */ +export const UMK_SIZE = 32 + +/** Result of UMK generation */ +export interface UmkGenerationResult { + /** The generated UMK (32 bytes) */ + umk: Uint8Array + /** BIP39 recovery key (24 words) */ + recoveryKey: string + /** Salt used for passphrase derivation */ + salt: Uint8Array + /** KDF type used */ + kdf: 'argon2id' | 'pbkdf2' + /** KDF parameters */ + kdfParams: Argon2Params | { iterations: number } +} + +/** KDF parameters for storage */ +export type KdfParams = Argon2Params | { type: 'pbkdf2'; iterations: number } + +/** + * Generate a new UMK with recovery key. + * + * The UMK is derived from a passphrase using Argon2id (or PBKDF2 fallback). + * A recovery key (BIP39 mnemonic) is also generated for backup. + * + * @param passphrase - User's passphrase (min 12 characters recommended) + * @returns UMK generation result + */ +export async function generateUmk(passphrase: string): Promise { + // Validate passphrase + if (!passphrase || passphrase.length < 8) { + throw new Error('Passphrase must be at least 8 characters') + } + + // Check if Argon2id is supported + const useArgon2 = await isArgon2Supported() + + let umk: Uint8Array + let salt: Uint8Array + let kdf: 'argon2id' | 'pbkdf2' + let kdfParams: Argon2Params | { iterations: number } + + if (useArgon2) { + const result = await argon2DeriveKeyWithNewSalt(passphrase, DEFAULT_ARGON2_PARAMS) + umk = result.key + salt = result.salt + kdf = 'argon2id' + kdfParams = DEFAULT_ARGON2_PARAMS + } else { + const result = await pbkdf2DeriveKeyWithNewSalt(passphrase, PBKDF2_DEFAULT_ITERATIONS) + umk = result.key + salt = result.salt + kdf = 'pbkdf2' + kdfParams = { iterations: PBKDF2_DEFAULT_ITERATIONS } + } + + // Generate recovery key from the UMK + const recoveryKey = umkToRecoveryKey(umk) + + return { + umk, + recoveryKey, + salt, + kdf, + kdfParams, + } +} + +/** + * Derive UMK from passphrase using stored parameters. + * + * @param passphrase - User's passphrase + * @param salt - Salt from key store + * @param kdf - KDF type ('argon2id' or 'pbkdf2') + * @param kdfParams - KDF parameters + * @returns Derived UMK + */ +export async function deriveUmkFromPassphrase( + passphrase: string, + salt: Uint8Array, + kdf: 'argon2id' | 'pbkdf2', + kdfParams: Argon2Params | { iterations: number } +): Promise { + if (kdf === 'argon2id') { + return argon2DeriveKey(passphrase, salt, kdfParams as Argon2Params) + } else { + return pbkdf2DeriveKey( + passphrase, + salt, + (kdfParams as { iterations: number }).iterations + ) + } +} + +/** + * Restore UMK from recovery key (BIP39 mnemonic). + * + * @param recoveryKey - 24-word BIP39 mnemonic + * @returns The restored UMK + * @throws Error if recovery key is invalid + */ +export function restoreUmkFromRecoveryKey(recoveryKey: string): Uint8Array { + if (!validateRecoveryKey(recoveryKey)) { + throw new Error('Invalid recovery key') + } + + return recoveryKeyToUmk(recoveryKey) +} + +/** + * Re-encrypt UMK with a new passphrase. + * Used when user changes their passphrase. + * + * @param umk - Current UMK + * @param newPassphrase - New passphrase + * @returns New salt and KDF parameters + */ +export async function reEncryptUmk( + _umk: Uint8Array, + newPassphrase: string +): Promise<{ + salt: Uint8Array + kdf: 'argon2id' | 'pbkdf2' + kdfParams: Argon2Params | { iterations: number } +}> { + if (!newPassphrase || newPassphrase.length < 8) { + throw new Error('Passphrase must be at least 8 characters') + } + + const useArgon2 = await isArgon2Supported() + + if (useArgon2) { + const { salt } = await argon2DeriveKeyWithNewSalt(newPassphrase, DEFAULT_ARGON2_PARAMS) + return { + salt, + kdf: 'argon2id', + kdfParams: DEFAULT_ARGON2_PARAMS, + } + } else { + const { salt } = await pbkdf2DeriveKeyWithNewSalt(newPassphrase, PBKDF2_DEFAULT_ITERATIONS) + return { + salt, + kdf: 'pbkdf2', + kdfParams: { iterations: PBKDF2_DEFAULT_ITERATIONS }, + } + } +} + +/** + * Verify if a passphrase matches the stored keys. + * Derives UMK and attempts to decrypt the stored private key. + * + * @param passphrase - Passphrase to verify + * @param storedKeys - Stored keys from IndexedDB + * @returns true if passphrase is correct + */ +export async function verifyPassphrase( + passphrase: string, + storedKeys: StoredKeys +): Promise { + try { + const umk = await deriveUmkFromPassphrase( + passphrase, + storedKeys.salt, + storedKeys.kdf, + storedKeys.kdfParams + ) + + // Try to decrypt the stored private key + const sodium = await getSodium() + + // Try ECDH key decryption + const decrypted = sodium.crypto_secretbox_open_easy( + storedKeys.encryptedEcdhPrivateKey, + storedKeys.encryptedEcdhPrivateKeyNonce, + umk + ) + + // If we get here, the passphrase is correct + // Zero out the decrypted key + decrypted.fill(0) + umk.fill(0) + + return true + } catch { + return false + } +} + +/** + * Generate a new recovery key for an existing UMK. + * The UMK must already be unlocked. + * + * @param umk - Current UMK + * @returns New recovery key (24 words) + */ +export function generateNewRecoveryKey(umk: Uint8Array): string { + if (umk.length !== UMK_SIZE) { + throw new Error(`Invalid UMK size: expected ${UMK_SIZE}, got ${umk.length}`) + } + + return umkToRecoveryKey(umk) +} + +/** + * Validate a recovery key without restoring. + * + * @param recoveryKey - Recovery key to validate + * @returns true if valid BIP39 mnemonic + */ +export { validateRecoveryKey } + +/** + * Export recovery key generation for new UMKs. + */ +export { generateRecoveryKey } + +/** + * Zero out UMK from memory (call when locking session). + * + * @param umk - UMK to zero out + */ +export function zeroUmk(umk: Uint8Array): void { + umk.fill(0) +} diff --git a/app/src/features/security/lib/keys/user-keys.ts b/app/src/features/security/lib/keys/user-keys.ts new file mode 100644 index 00000000..2aaa24cb --- /dev/null +++ b/app/src/features/security/lib/keys/user-keys.ts @@ -0,0 +1,185 @@ +/** + * User Key Pair Management + * + * Manages ECDH key pairs (for key exchange) and Ed25519 key pairs (for signing). + * Private keys are encrypted with UMK before storage. + */ + +import { + generateEcdhKeyPair, + generateSigningKeyPair, + encrypt, + decrypt, + getSodium, + type EcdhKeyPair, + type Ed25519KeyPair, +} from '../crypto' + +import type { StoredKeys } from './key-store' + +/** Complete user key set */ +export interface UserKeySet { + /** ECDH key pair for key exchange */ + ecdh: EcdhKeyPair + /** Ed25519 key pair for signing */ + signing: Ed25519KeyPair +} + +/** Encrypted private keys for storage */ +export interface EncryptedUserKeys { + /** ECDH private key encrypted with UMK */ + encryptedEcdhPrivateKey: Uint8Array + /** Nonce for ECDH private key encryption */ + encryptedEcdhPrivateKeyNonce: Uint8Array + /** Ed25519 signing private key encrypted with UMK */ + encryptedSigningPrivateKey: Uint8Array + /** Nonce for signing private key encryption */ + encryptedSigningPrivateKeyNonce: Uint8Array + /** ECDH public key (unencrypted) */ + ecdhPublicKey: Uint8Array + /** Ed25519 signing public key (unencrypted) */ + signingPublicKey: Uint8Array +} + +/** + * Generate a new set of user key pairs. + * + * @returns New ECDH and Ed25519 key pairs + */ +export async function generateUserKeys(): Promise { + // Generate ECDH key pair for key exchange + const ecdh = generateEcdhKeyPair() + + // Generate Ed25519 key pair for signing + const signing = await generateSigningKeyPair() + + return { ecdh, signing } +} + +/** + * Encrypt user private keys with UMK for storage. + * + * @param keys - User key set + * @param umk - User Master Key + * @returns Encrypted keys ready for storage + */ +export async function encryptUserKeys( + keys: UserKeySet, + umk: Uint8Array +): Promise { + // Encrypt ECDH private key + const ecdhResult = await encrypt(umk, keys.ecdh.privateKey) + + // Encrypt Ed25519 signing private key + const signingResult = await encrypt(umk, keys.signing.privateKey) + + return { + encryptedEcdhPrivateKey: ecdhResult.ciphertext, + encryptedEcdhPrivateKeyNonce: ecdhResult.nonce, + encryptedSigningPrivateKey: signingResult.ciphertext, + encryptedSigningPrivateKeyNonce: signingResult.nonce, + ecdhPublicKey: keys.ecdh.publicKey, + signingPublicKey: keys.signing.publicKey, + } +} + +/** + * Decrypt user private keys from storage. + * + * @param storedKeys - Stored encrypted keys + * @param umk - User Master Key + * @returns Decrypted user key set + */ +export async function decryptUserKeys( + storedKeys: StoredKeys, + umk: Uint8Array +): Promise { + // Decrypt ECDH private key + const ecdhPrivateKey = await decrypt( + umk, + storedKeys.encryptedEcdhPrivateKey, + storedKeys.encryptedEcdhPrivateKeyNonce + ) + + // Decrypt Ed25519 signing private key + const signingPrivateKey = await decrypt( + umk, + storedKeys.encryptedSigningPrivateKey, + storedKeys.encryptedSigningPrivateKeyNonce + ) + + return { + ecdh: { + privateKey: ecdhPrivateKey, + publicKey: storedKeys.ecdhPublicKey, + }, + signing: { + privateKey: signingPrivateKey, + publicKey: storedKeys.signingPublicKey, + }, + } +} + +/** + * Re-encrypt user keys with a new UMK (for passphrase change). + * + * @param keys - Current user key set + * @param newUmk - New User Master Key + * @returns Newly encrypted keys + */ +export async function reEncryptUserKeys( + keys: UserKeySet, + newUmk: Uint8Array +): Promise { + return encryptUserKeys(keys, newUmk) +} + +/** + * Zero out user keys from memory (call when locking session). + * + * @param keys - User key set to zero out + */ +export function zeroUserKeys(keys: UserKeySet): void { + keys.ecdh.privateKey.fill(0) + keys.signing.privateKey.fill(0) +} + +/** + * Convert public keys to Base64 for API transmission. + * + * @param keys - User key set + * @returns Base64-encoded public keys + */ +export async function getPublicKeysBase64(keys: UserKeySet): Promise<{ + ecdhPublicKey: string + signingPublicKey: string +}> { + const sodium = await getSodium() + + return { + ecdhPublicKey: sodium.to_base64(keys.ecdh.publicKey, sodium.base64_variants.ORIGINAL), + signingPublicKey: sodium.to_base64(keys.signing.publicKey, sodium.base64_variants.ORIGINAL), + } +} + +/** + * Parse public keys from Base64 (from API). + * + * @param ecdhPublicKey - Base64-encoded ECDH public key + * @param signingPublicKey - Base64-encoded signing public key + * @returns Decoded public keys + */ +export async function parsePublicKeysFromBase64( + ecdhPublicKey: string, + signingPublicKey: string +): Promise<{ + ecdhPublicKey: Uint8Array + signingPublicKey: Uint8Array +}> { + const sodium = await getSodium() + + return { + ecdhPublicKey: sodium.from_base64(ecdhPublicKey, sodium.base64_variants.ORIGINAL), + signingPublicKey: sodium.from_base64(signingPublicKey, sodium.base64_variants.ORIGINAL), + } +} diff --git a/app/src/features/security/lib/keys/workspace-kek.ts b/app/src/features/security/lib/keys/workspace-kek.ts new file mode 100644 index 00000000..696802f5 --- /dev/null +++ b/app/src/features/security/lib/keys/workspace-kek.ts @@ -0,0 +1,211 @@ +/** + * Workspace KEK (Key Encryption Key) Management + * + * KEKs are used to encrypt Document DEKs within a workspace. + * Each workspace has its own KEK, encrypted for each member. + */ + +import { + generateKey, + getSodium, + encryptKeyForRecipient, + decryptKeyFromSender, +} from '../crypto' + +import { getKekCache } from './key-cache' + +/** HKDF context for KEK derivation */ +const KEK_HKDF_INFO = 'refmd_workspace_kek' + +/** KEK size (32 bytes) */ +export const KEK_SIZE = 32 + +/** Workspace KEK with metadata */ +export interface WorkspaceKek { + /** Workspace ID */ + workspaceId: string + /** 32-byte KEK */ + key: Uint8Array + /** Key version for rotation */ + version: number +} + +/** Encrypted KEK from API response */ +export interface EncryptedKekFromApi { + /** Encrypted KEK (Base64) */ + encryptedKek: string + /** Key version */ + keyVersion: number + /** Workspace ID */ + workspaceId: string +} + +/** + * Generate a new Workspace KEK. + * + * @returns 32-byte random KEK + */ +export async function generateWorkspaceKek(): Promise { + return generateKey() +} + +/** + * Encrypt a KEK for a recipient using their public key. + * + * @param kek - The KEK to encrypt + * @param recipientPublicKey - Recipient's ECDH public key + * @returns Encrypted KEK with ephemeral public key and nonce + */ +export async function encryptKekForRecipient( + kek: Uint8Array, + recipientPublicKey: Uint8Array +): Promise<{ + encryptedKek: Uint8Array + ephemeralPublicKey: Uint8Array + nonce: Uint8Array +}> { + const result = await encryptKeyForRecipient(recipientPublicKey, kek, KEK_HKDF_INFO) + + return { + encryptedKek: result.encryptedKey, + ephemeralPublicKey: result.ephemeralPublicKey, + nonce: result.nonce, + } +} + +/** + * Decrypt a KEK that was encrypted for us. + * + * @param ourPrivateKey - Our ECDH private key + * @param encryptedKek - Encrypted KEK + * @param ephemeralPublicKey - Sender's ephemeral public key + * @param nonce - Nonce used for encryption + * @returns Decrypted KEK + */ +export async function decryptKek( + ourPrivateKey: Uint8Array, + encryptedKek: Uint8Array, + ephemeralPublicKey: Uint8Array, + nonce: Uint8Array +): Promise { + return decryptKeyFromSender(ourPrivateKey, ephemeralPublicKey, encryptedKek, nonce, KEK_HKDF_INFO) +} + +/** + * Decrypt a KEK from API response format. + * The API returns the encrypted KEK as: ephemeralPublicKey || nonce || ciphertext (all Base64 encoded together) + * + * @param ourPrivateKey - Our ECDH private key + * @param encryptedKekBase64 - Base64-encoded encrypted KEK from API + * @returns Decrypted KEK + */ +export async function decryptKekFromApiResponse( + ourPrivateKey: Uint8Array, + encryptedKekBase64: string +): Promise { + const sodium = await getSodium() + const combined = sodium.from_base64(encryptedKekBase64, sodium.base64_variants.ORIGINAL) + + // Format: ephemeralPublicKey (65 bytes) || nonce (24 bytes) || ciphertext (remaining) + const EPHEMERAL_KEY_SIZE = 65 + const NONCE_SIZE = 24 + + if (combined.length < EPHEMERAL_KEY_SIZE + NONCE_SIZE + 1) { + throw new Error('Invalid encrypted KEK format: too short') + } + + const ephemeralPublicKey = combined.slice(0, EPHEMERAL_KEY_SIZE) + const nonce = combined.slice(EPHEMERAL_KEY_SIZE, EPHEMERAL_KEY_SIZE + NONCE_SIZE) + const encryptedKek = combined.slice(EPHEMERAL_KEY_SIZE + NONCE_SIZE) + + return decryptKek(ourPrivateKey, encryptedKek, ephemeralPublicKey, nonce) +} + +/** + * Encode a KEK for API request. + * Format: ephemeralPublicKey || nonce || ciphertext (all Base64 encoded together) + * + * @param encryptedKek - Encrypted KEK + * @param ephemeralPublicKey - Ephemeral public key + * @param nonce - Nonce + * @returns Base64-encoded combined data + */ +export async function encodeKekForApi( + encryptedKek: Uint8Array, + ephemeralPublicKey: Uint8Array, + nonce: Uint8Array +): Promise { + const sodium = await getSodium() + + // Combine: ephemeralPublicKey || nonce || ciphertext + const combined = new Uint8Array( + ephemeralPublicKey.length + nonce.length + encryptedKek.length + ) + combined.set(ephemeralPublicKey, 0) + combined.set(nonce, ephemeralPublicKey.length) + combined.set(encryptedKek, ephemeralPublicKey.length + nonce.length) + + return sodium.to_base64(combined, sodium.base64_variants.ORIGINAL) +} + +/** + * Get KEK from cache or fetch from API. + * + * @param workspaceId - Workspace ID + * @param ecdhPrivateKey - Our ECDH private key + * @param fetchKekFn - Function to fetch encrypted KEK from API + * @returns Decrypted KEK + */ +export async function getOrFetchKek( + workspaceId: string, + ecdhPrivateKey: Uint8Array, + fetchKekFn: () => Promise // Returns Base64-encoded encrypted KEK +): Promise { + const cache = getKekCache() + + // Check cache first + const cached = cache.getKek(workspaceId) + if (cached) { + return cached + } + + // Fetch from API + const encryptedKekBase64 = await fetchKekFn() + + // Decrypt + const kek = await decryptKekFromApiResponse(ecdhPrivateKey, encryptedKekBase64) + + // Cache the result + cache.setKek(workspaceId, kek) + + return kek +} + +/** + * Invalidate cached KEK (e.g., after rotation). + * + * @param workspaceId - Workspace ID + */ +export function invalidateCachedKek(workspaceId: string): void { + const cache = getKekCache() + cache.deleteKek(workspaceId) +} + +/** + * Create a KEK for a new workspace member. + * + * @param kek - The workspace KEK + * @param memberPublicKey - New member's ECDH public key + * @returns Encrypted KEK for the new member (Base64) + */ +export async function createKekForMember( + kek: Uint8Array, + memberPublicKey: Uint8Array +): Promise { + const { encryptedKek, ephemeralPublicKey, nonce } = await encryptKekForRecipient( + kek, + memberPublicKey + ) + + return encodeKekForApi(encryptedKek, ephemeralPublicKey, nonce) +} diff --git a/app/src/features/security/lib/migration.ts b/app/src/features/security/lib/migration.ts new file mode 100644 index 00000000..5894e099 --- /dev/null +++ b/app/src/features/security/lib/migration.ts @@ -0,0 +1,261 @@ +/** + * E2EE Migration Helper + * + * Handles the migration of existing data to E2EE by generating + * workspace KEKs and document DEKs. + */ + +import { + me as fetchMe, + switchWorkspace, + listDocuments as apiListDocuments, + migrate as apiMigrate, +} from '@/shared/api' +import type { + MigrateRequest, + MemberEncryptedKekRequest, + EncryptedDekRequest, +} from '@/shared/api' + +import { toBase64 } from './crypto' +import { + generateDocumentDek, + createEncryptedDekForApi, +} from './keys/document-dek' +import { + generateWorkspaceKek, + encryptKekForRecipient, + encodeKekForApi, +} from './keys/workspace-kek' + +export interface MigrationProgress { + stage: 'preparing' | 'generating_keys' | 'migrating' | 'complete' + current: number + total: number + message: string +} + +export interface MigrationResult { + documentsEncrypted: number + filesEncrypted: number + updatesCleared: number + status: string +} + +/** + * Perform E2EE migration for an existing user. + * + * This function: + * 1. Gets all user's workspaces + * 2. For each workspace, generates a KEK + * 3. Lists all documents and generates DEKs + * 4. Calls the migration API to encrypt existing data + * + * @param userPublicKey - User's ECDH public key (for encrypting KEKs) + * @param userId - User's ID + * @param onProgress - Optional progress callback + * @returns Migration result + */ +export async function performMigration( + userPublicKey: Uint8Array, + userId: string, + onProgress?: (progress: MigrationProgress) => void +): Promise { + const report = (progress: MigrationProgress) => { + onProgress?.(progress) + } + + report({ + stage: 'preparing', + current: 0, + total: 0, + message: 'Preparing migration...', + }) + + // Get user info with workspaces + const userInfo = await fetchMe() + const workspaces = userInfo.workspaces || [] + + if (workspaces.length === 0) { + // No workspaces to migrate + return { + documentsEncrypted: 0, + filesEncrypted: 0, + updatesCleared: 0, + status: 'completed', + } + } + + // Track original workspace to restore later + const originalWorkspaceId = userInfo.active_workspace_id + + // Collect all documents across workspaces + const allDocuments: { workspaceId: string; documentId: string }[] = [] + + report({ + stage: 'preparing', + current: 0, + total: workspaces.length, + message: 'Listing documents...', + }) + + // For each workspace, list documents + for (let i = 0; i < workspaces.length; i++) { + const workspace = workspaces[i] + + report({ + stage: 'preparing', + current: i + 1, + total: workspaces.length, + message: `Listing documents in ${workspace.name}...`, + }) + + // Switch to this workspace + await switchWorkspace({ id: workspace.id }) + + // List all documents (including archived) + const docs = await apiListDocuments({ state: 'all' }) + + // Add to collection + for (const doc of docs.items || []) { + // Only include actual documents, not folders + if (doc.type === 'document') { + allDocuments.push({ + workspaceId: workspace.id, + documentId: doc.id, + }) + } + } + } + + // Restore original workspace + if (originalWorkspaceId) { + await switchWorkspace({ id: originalWorkspaceId }) + } + + const totalItems = workspaces.length + allDocuments.length + + report({ + stage: 'generating_keys', + current: 0, + total: totalItems, + message: 'Generating encryption keys...', + }) + + // Generate KEKs for each workspace + const workspaceKeks: { [key: string]: string } = {} + const encryptedWorkspaceKeks: { [key: string]: MemberEncryptedKekRequest[] } = {} + const workspaceKekRaw: { [key: string]: Uint8Array } = {} // Keep raw KEKs for DEK encryption + + for (let i = 0; i < workspaces.length; i++) { + const workspace = workspaces[i] + + report({ + stage: 'generating_keys', + current: i + 1, + total: totalItems, + message: `Generating key for workspace "${workspace.name}"...`, + }) + + // Generate KEK + const kek = await generateWorkspaceKek() + workspaceKekRaw[workspace.id] = kek + + // Store raw KEK (base64) for server-side encryption + workspaceKeks[workspace.id] = await toBase64(kek) + + // Encrypt KEK for the current user + const { encryptedKek, ephemeralPublicKey, nonce } = await encryptKekForRecipient( + kek, + userPublicKey + ) + const encryptedKekBase64 = await encodeKekForApi(encryptedKek, ephemeralPublicKey, nonce) + + encryptedWorkspaceKeks[workspace.id] = [ + { + userId: userId, + encryptedKek: encryptedKekBase64, + }, + ] + } + + // Generate DEKs for each document + const documentDeks: { [key: string]: string } = {} + const encryptedDocumentDeks: { [key: string]: EncryptedDekRequest } = {} + + for (let i = 0; i < allDocuments.length; i++) { + const { workspaceId, documentId } = allDocuments[i] + + report({ + stage: 'generating_keys', + current: workspaces.length + i + 1, + total: totalItems, + message: `Generating key for document ${i + 1}/${allDocuments.length}...`, + }) + + // Generate DEK + const dek = await generateDocumentDek() + + // Store raw DEK (base64) for server-side encryption + documentDeks[documentId] = await toBase64(dek) + + // Encrypt DEK with workspace KEK + const kek = workspaceKekRaw[workspaceId] + if (!kek) { + throw new Error(`No KEK found for workspace ${workspaceId}`) + } + + const { encryptedDek, nonce } = await createEncryptedDekForApi(dek, kek) + encryptedDocumentDeks[documentId] = { + encryptedDek, + nonce, + } + } + + report({ + stage: 'migrating', + current: 0, + total: 1, + message: 'Encrypting data on server...', + }) + + // Call the migration API + const migrateRequest: MigrateRequest = { + workspaceKeks, + documentDeks, + encryptedWorkspaceKeks, + encryptedDocumentDeks, + } + + const result = await apiMigrate({ requestBody: migrateRequest }) + + report({ + stage: 'complete', + current: 1, + total: 1, + message: 'Migration complete!', + }) + + // Clear raw KEKs from memory + for (const kek of Object.values(workspaceKekRaw)) { + kek.fill(0) + } + + return { + documentsEncrypted: result.documentsEncrypted, + filesEncrypted: result.filesEncrypted, + updatesCleared: result.updatesCleared, + status: result.status, + } +} + +/** + * Check if migration is needed for the current user. + */ +export async function checkNeedsMigration(): Promise { + const userInfo = await fetchMe() + const workspaces = userInfo.workspaces || [] + + // If user has workspaces but hasn't completed E2EE setup, migration is needed + return workspaces.length > 0 +} diff --git a/app/src/features/security/lib/network/index.ts b/app/src/features/security/lib/network/index.ts new file mode 100644 index 00000000..2b508f1b --- /dev/null +++ b/app/src/features/security/lib/network/index.ts @@ -0,0 +1,19 @@ +/** + * Network Module + * + * Provides retry logic and offline queue functionality. + */ + +export { + withRetry, + makeRetryable, + wrapNetworkError, + type RetryOptions, +} from './retry' + +export { + OfflineQueue, + getOfflineQueue, + type QueuedOperation, + type OperationType, +} from './offline-queue' diff --git a/app/src/features/security/lib/network/offline-queue.ts b/app/src/features/security/lib/network/offline-queue.ts new file mode 100644 index 00000000..e409ec91 --- /dev/null +++ b/app/src/features/security/lib/network/offline-queue.ts @@ -0,0 +1,290 @@ +/** + * Offline Queue + * + * Stores pending operations when offline and processes them when online. + * Only encrypted data is stored - never plaintext. + */ + +const DB_NAME = 'refmd-offline-queue' +const DB_VERSION = 1 +const STORE_NAME = 'queue' + +export type OperationType = 'sync' | 'upload' | 'key_refresh' + +export interface QueuedOperation { + /** Unique ID */ + id: string + /** Operation type */ + type: OperationType + /** Encrypted payload (never plaintext) */ + payload: string + /** Associated document ID */ + documentId?: string + /** Associated workspace ID */ + workspaceId?: string + /** When the operation was queued */ + createdAt: number + /** Number of retry attempts */ + retryCount: number +} + +/** + * Generate a unique ID + */ +function generateId(): string { + return `${Date.now()}-${Math.random().toString(36).slice(2, 11)}` +} + +/** + * Open the IndexedDB database + */ +function openDatabase(): Promise { + return new Promise((resolve, reject) => { + const request = indexedDB.open(DB_NAME, DB_VERSION) + + request.onerror = () => { + reject(new Error(`Failed to open offline queue database: ${request.error?.message}`)) + } + + request.onsuccess = () => { + resolve(request.result) + } + + request.onupgradeneeded = (event) => { + const db = (event.target as IDBOpenDBRequest).result + + if (!db.objectStoreNames.contains(STORE_NAME)) { + const store = db.createObjectStore(STORE_NAME, { keyPath: 'id' }) + store.createIndex('type', 'type', { unique: false }) + store.createIndex('createdAt', 'createdAt', { unique: false }) + } + } + }) +} + +/** + * OfflineQueue - manages pending operations when offline + */ +export class OfflineQueue { + private db: IDBDatabase | null = null + + /** + * Initialize the queue + */ + async initialize(): Promise { + if (this.db) return + this.db = await openDatabase() + } + + /** + * Ensure database is initialized + */ + private async ensureDb(): Promise { + if (!this.db) { + await this.initialize() + } + return this.db! + } + + /** + * Add an operation to the queue + */ + async add( + operation: Omit + ): Promise { + const db = await this.ensureDb() + + const entry: QueuedOperation = { + id: generateId(), + createdAt: Date.now(), + retryCount: 0, + ...operation, + } + + return new Promise((resolve, reject) => { + const transaction = db.transaction(STORE_NAME, 'readwrite') + const store = transaction.objectStore(STORE_NAME) + const request = store.add(entry) + + request.onerror = () => { + reject(new Error(`Failed to add to offline queue: ${request.error?.message}`)) + } + + request.onsuccess = () => { + resolve(entry.id) + } + }) + } + + /** + * Get all queued operations + */ + async getAll(): Promise { + const db = await this.ensureDb() + + return new Promise((resolve, reject) => { + const transaction = db.transaction(STORE_NAME, 'readonly') + const store = transaction.objectStore(STORE_NAME) + const request = store.getAll() + + request.onerror = () => { + reject(new Error(`Failed to get offline queue: ${request.error?.message}`)) + } + + request.onsuccess = () => { + // Sort by createdAt ascending (oldest first) + const operations = (request.result as QueuedOperation[]).sort( + (a, b) => a.createdAt - b.createdAt + ) + resolve(operations) + } + }) + } + + /** + * Get operations by type + */ + async getByType(type: OperationType): Promise { + const all = await this.getAll() + return all.filter((op) => op.type === type) + } + + /** + * Get queue count + */ + async count(): Promise { + const db = await this.ensureDb() + + return new Promise((resolve, reject) => { + const transaction = db.transaction(STORE_NAME, 'readonly') + const store = transaction.objectStore(STORE_NAME) + const request = store.count() + + request.onerror = () => { + reject(new Error(`Failed to count offline queue: ${request.error?.message}`)) + } + + request.onsuccess = () => { + resolve(request.result) + } + }) + } + + /** + * Remove an operation from the queue + */ + async remove(id: string): Promise { + const db = await this.ensureDb() + + return new Promise((resolve, reject) => { + const transaction = db.transaction(STORE_NAME, 'readwrite') + const store = transaction.objectStore(STORE_NAME) + const request = store.delete(id) + + request.onerror = () => { + reject(new Error(`Failed to remove from offline queue: ${request.error?.message}`)) + } + + request.onsuccess = () => { + resolve() + } + }) + } + + /** + * Increment retry count for an operation + */ + async incrementRetryCount(id: string): Promise { + const db = await this.ensureDb() + + return new Promise((resolve, reject) => { + const transaction = db.transaction(STORE_NAME, 'readwrite') + const store = transaction.objectStore(STORE_NAME) + const getRequest = store.get(id) + + getRequest.onerror = () => { + reject(new Error(`Failed to get operation: ${getRequest.error?.message}`)) + } + + getRequest.onsuccess = () => { + const operation = getRequest.result as QueuedOperation | undefined + if (!operation) { + resolve() + return + } + + operation.retryCount++ + const putRequest = store.put(operation) + + putRequest.onerror = () => { + reject(new Error(`Failed to update operation: ${putRequest.error?.message}`)) + } + + putRequest.onsuccess = () => { + resolve() + } + } + }) + } + + /** + * Clear all operations + */ + async clear(): Promise { + const db = await this.ensureDb() + + return new Promise((resolve, reject) => { + const transaction = db.transaction(STORE_NAME, 'readwrite') + const store = transaction.objectStore(STORE_NAME) + const request = store.clear() + + request.onerror = () => { + reject(new Error(`Failed to clear offline queue: ${request.error?.message}`)) + } + + request.onsuccess = () => { + resolve() + } + }) + } + + /** + * Close the database connection + */ + close(): void { + if (this.db) { + this.db.close() + this.db = null + } + } + + /** + * Delete the entire database + */ + static async deleteDatabase(): Promise { + return new Promise((resolve, reject) => { + const request = indexedDB.deleteDatabase(DB_NAME) + + request.onerror = () => { + reject(new Error(`Failed to delete offline queue database: ${request.error?.message}`)) + } + + request.onsuccess = () => { + resolve() + } + }) + } +} + +// Singleton instance +let offlineQueueInstance: OfflineQueue | null = null + +/** + * Get the singleton OfflineQueue instance + */ +export function getOfflineQueue(): OfflineQueue { + if (!offlineQueueInstance) { + offlineQueueInstance = new OfflineQueue() + } + return offlineQueueInstance +} diff --git a/app/src/features/security/lib/network/retry.ts b/app/src/features/security/lib/network/retry.ts new file mode 100644 index 00000000..e7fb7332 --- /dev/null +++ b/app/src/features/security/lib/network/retry.ts @@ -0,0 +1,142 @@ +/** + * Retry Logic with Exponential Backoff + * + * Provides retry functionality for network operations. + */ + +import { CryptoError, ERROR_CODES, isCryptoError } from '@/shared/types/security' + +const DEFAULT_MAX_RETRIES = 3 +const DEFAULT_RETRY_DELAYS = [1000, 5000, 15000] // 1s, 5s, 15s + +export interface RetryOptions { + /** Maximum number of retry attempts (default: 3) */ + maxRetries?: number + /** Delay between retries in ms (default: [1000, 5000, 15000]) */ + retryDelays?: number[] + /** Called on each retry attempt */ + onRetry?: (attempt: number, error: Error) => void + /** Custom function to determine if error is retryable */ + shouldRetry?: (error: Error) => boolean +} + +/** + * Check if an error is retryable + */ +function isRetryableError(error: unknown): boolean { + if (isCryptoError(error)) { + return ( + error.code === ERROR_CODES.NETWORK_FAILED || + error.code === ERROR_CODES.SYNC_TIMEOUT || + error.code === ERROR_CODES.SERVER_ERROR + ) + } + + // Check for standard network errors + if (error instanceof Error) { + const message = error.message.toLowerCase() + return ( + message.includes('network') || + message.includes('fetch') || + message.includes('timeout') || + message.includes('connection') + ) + } + + return false +} + +/** + * Delay for a specified number of milliseconds + */ +function delay(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)) +} + +/** + * Execute a function with retry logic + * + * @param fn - The async function to execute + * @param options - Retry options + * @returns The result of the function + * @throws The last error if all retries fail + * + * @example + * ```ts + * const result = await withRetry( + * () => fetchData(), + * { onRetry: (attempt) => console.log(`Retry ${attempt}`) } + * ) + * ``` + */ +export async function withRetry( + fn: () => Promise, + options?: RetryOptions +): Promise { + const { + maxRetries = DEFAULT_MAX_RETRIES, + retryDelays = DEFAULT_RETRY_DELAYS, + onRetry, + shouldRetry = isRetryableError, + } = options ?? {} + + let lastError: Error | undefined + + for (let attempt = 0; attempt <= maxRetries; attempt++) { + try { + return await fn() + } catch (error) { + lastError = error instanceof Error ? error : new Error(String(error)) + + const isRetryable = shouldRetry(lastError) + const hasMoreAttempts = attempt < maxRetries + + if (isRetryable && hasMoreAttempts) { + const delayMs = retryDelays[attempt] ?? retryDelays[retryDelays.length - 1] + onRetry?.(attempt + 1, lastError) + await delay(delayMs) + continue + } + + // Not retryable or no more attempts - throw + throw lastError + } + } + + // Should never reach here, but TypeScript needs this + throw lastError ?? new Error('Unknown error') +} + +/** + * Create a retryable version of an async function + * + * @param fn - The async function to wrap + * @param options - Retry options + * @returns A wrapped function with retry logic + * + * @example + * ```ts + * const retryableFetch = makeRetryable(fetchData, { maxRetries: 5 }) + * const result = await retryableFetch() + * ``` + */ +export function makeRetryable( + fn: (...args: T) => Promise, + options?: RetryOptions +): (...args: T) => Promise { + return (...args: T) => withRetry(() => fn(...args), options) +} + +/** + * Wrap a network error as a CryptoError + */ +export function wrapNetworkError(error: unknown): CryptoError { + if (isCryptoError(error)) { + return error + } + + const message = error instanceof Error ? error.message : 'Network error' + return new CryptoError(ERROR_CODES.NETWORK_FAILED, message, { + cause: error instanceof Error ? error : undefined, + }) +} diff --git a/app/src/features/security/lib/plugins/__tests__/plugin-encryption.test.ts b/app/src/features/security/lib/plugins/__tests__/plugin-encryption.test.ts new file mode 100644 index 00000000..50885a1b --- /dev/null +++ b/app/src/features/security/lib/plugins/__tests__/plugin-encryption.test.ts @@ -0,0 +1,183 @@ +import { describe, it, expect, beforeAll } from 'vitest' + +import { generateKey } from '@/shared/lib/crypto' + +import { derivePluginDEK } from '../plugin-dek' +import { + encryptKV, + decryptKV, + isEncryptedKVValue, + isLegacyKVValue, +} from '../plugin-kv' +import { + encryptRecordData, + decryptRecordData, + decryptRecords, + isEncryptedRecordData, +} from '../plugin-records' + +describe('Plugin DEK derivation', () => { + let documentDEK: Uint8Array + + beforeAll(async () => { + documentDEK = await generateKey() + }) + + it('derives a 32-byte DEK from document DEK', async () => { + const pluginDEK = await derivePluginDEK(documentDEK, 'test-plugin') + expect(pluginDEK).toBeInstanceOf(Uint8Array) + expect(pluginDEK.length).toBe(32) + }) + + it('derives different DEKs for different plugins', async () => { + const dek1 = await derivePluginDEK(documentDEK, 'plugin-a') + const dek2 = await derivePluginDEK(documentDEK, 'plugin-b') + + expect(dek1).not.toEqual(dek2) + }) + + it('derives the same DEK for the same plugin', async () => { + const dek1 = await derivePluginDEK(documentDEK, 'test-plugin') + const dek2 = await derivePluginDEK(documentDEK, 'test-plugin') + + expect(dek1).toEqual(dek2) + }) + + it('derives different DEKs for different document DEKs', async () => { + const anotherDocDEK = await generateKey() + const dek1 = await derivePluginDEK(documentDEK, 'test-plugin') + const dek2 = await derivePluginDEK(anotherDocDEK, 'test-plugin') + + expect(dek1).not.toEqual(dek2) + }) +}) + +describe('Plugin KV encryption', () => { + let documentDEK: Uint8Array + const pluginId = 'test-plugin' + + beforeAll(async () => { + documentDEK = await generateKey() + }) + + it('encrypts and decrypts string value', async () => { + const original = 'hello world' + const encrypted = await encryptKV(original, documentDEK, pluginId) + const decrypted = await decryptKV(encrypted, documentDEK, pluginId) + + expect(decrypted).toBe(original) + expect(isEncryptedKVValue(encrypted)).toBe(true) + }) + + it('encrypts and decrypts object value', async () => { + const original = { isKanban: true, columns: ['todo', 'doing', 'done'] } + const encrypted = await encryptKV(original, documentDEK, pluginId) + const decrypted = await decryptKV(encrypted, documentDEK, pluginId) + + expect(decrypted).toEqual(original) + }) + + it('encrypts and decrypts array value', async () => { + const original = [1, 2, 3, 'four', { five: 5 }] + const encrypted = await encryptKV(original, documentDEK, pluginId) + const decrypted = await decryptKV(encrypted, documentDEK, pluginId) + + expect(decrypted).toEqual(original) + }) + + it('handles null value', async () => { + const decrypted = await decryptKV(null, documentDEK, pluginId) + expect(decrypted).toBeNull() + }) + + it('handles undefined value', async () => { + const decrypted = await decryptKV(undefined, documentDEK, pluginId) + expect(decrypted).toBeUndefined() + }) + + it('handles legacy plaintext value', async () => { + const legacy = { value: 'old data', _encrypted: false as const } + expect(isLegacyKVValue(legacy)).toBe(true) + const decrypted = await decryptKV(legacy, documentDEK, pluginId) + expect(decrypted).toBe('old data') + }) + + it('returns unknown format as-is for backward compatibility', async () => { + const unknown = { someData: 'raw' } + const decrypted = await decryptKV(unknown, documentDEK, pluginId) + expect(decrypted).toEqual(unknown) + }) + + it('different plugins cannot decrypt each other data', async () => { + const original = 'secret data' + const encrypted = await encryptKV(original, documentDEK, 'plugin-a') + + await expect( + decryptKV(encrypted, documentDEK, 'plugin-b') + ).rejects.toThrow() + }) +}) + +describe('Plugin Records encryption', () => { + let documentDEK: Uint8Array + const pluginId = 'test-plugin' + + beforeAll(async () => { + documentDEK = await generateKey() + }) + + it('encrypts and decrypts record data', async () => { + const original = { title: 'Task 1', description: 'Do something', priority: 1 } + const encrypted = await encryptRecordData(original, documentDEK, pluginId) + const decrypted = await decryptRecordData(encrypted, documentDEK, pluginId) + + expect(decrypted).toEqual(original) + expect(isEncryptedRecordData(encrypted)).toBe(true) + }) + + it('handles null data', async () => { + const decrypted = await decryptRecordData(null, documentDEK, pluginId) + expect(decrypted).toBeNull() + }) + + it('returns unknown format as-is for backward compatibility', async () => { + const legacy = { title: 'Old task' } + const decrypted = await decryptRecordData(legacy, documentDEK, pluginId) + expect(decrypted).toEqual(legacy) + }) + + it('decrypts multiple records', async () => { + const records = [ + { id: '1', data: await encryptRecordData({ title: 'Task 1' }, documentDEK, pluginId), createdAt: '2024-01-01' }, + { id: '2', data: await encryptRecordData({ title: 'Task 2' }, documentDEK, pluginId), createdAt: '2024-01-02' }, + { id: '3', data: { title: 'Legacy Task' }, createdAt: '2024-01-03' }, // legacy plaintext + ] + + const decrypted = await decryptRecords(records, documentDEK, pluginId) + + expect(decrypted).toHaveLength(3) + expect(decrypted[0].data).toEqual({ title: 'Task 1' }) + expect(decrypted[1].data).toEqual({ title: 'Task 2' }) + expect(decrypted[2].data).toEqual({ title: 'Legacy Task' }) + expect(decrypted[0].id).toBe('1') + expect(decrypted[0].createdAt).toBe('2024-01-01') + }) + + it('handles records without data field', async () => { + const records = [ + { id: '1', someOtherField: 'value' }, + ] + + const decrypted = await decryptRecords(records, documentDEK, pluginId) + expect(decrypted[0]).toEqual({ id: '1', someOtherField: 'value' }) + }) + + it('different plugins cannot decrypt each other records', async () => { + const original = { secret: 'data' } + const encrypted = await encryptRecordData(original, documentDEK, 'plugin-a') + + await expect( + decryptRecordData(encrypted, documentDEK, 'plugin-b') + ).rejects.toThrow() + }) +}) diff --git a/app/src/features/security/lib/plugins/index.ts b/app/src/features/security/lib/plugins/index.ts new file mode 100644 index 00000000..0fd7d3d3 --- /dev/null +++ b/app/src/features/security/lib/plugins/index.ts @@ -0,0 +1,32 @@ +/** + * Plugin E2EE Module + * + * Provides transparent encryption for plugin KV and Records storage. + * Plugins do not need to handle encryption themselves - the runtime + * layer encrypts data before sending to the server and decrypts + * when receiving. + */ + +// Plugin DEK derivation +export { derivePluginDEK } from './plugin-dek' + +// KV encryption +export { + encryptKV, + decryptKV, + isEncryptedKVValue, + isLegacyKVValue, + type EncryptedKVValue, + type LegacyKVValue, +} from './plugin-kv' + +// Records encryption +export { + encryptRecordData, + decryptRecordData, + decryptRecords, + isEncryptedRecordData, + type EncryptedRecordData, + type EncryptedRecord, + type PlaintextRecord, +} from './plugin-records' diff --git a/app/src/features/security/lib/plugins/plugin-dek.ts b/app/src/features/security/lib/plugins/plugin-dek.ts new file mode 100644 index 00000000..423d65e5 --- /dev/null +++ b/app/src/features/security/lib/plugins/plugin-dek.ts @@ -0,0 +1,45 @@ +/** + * Plugin DEK derivation module + * + * Derives a unique DEK (Data Encryption Key) for each plugin from the document DEK. + * This ensures that each plugin has its own encryption namespace, and plugins + * cannot access each other's encrypted data. + */ + +import { deriveKey, HKDF_CONTEXTS } from '@/shared/lib/crypto' + +/** + * Derive a plugin-specific DEK from the document DEK. + * + * Uses HKDF-SHA256 with context 'refmd_pl' and a subkey ID derived from + * the plugin ID hash to ensure each plugin gets a unique key. + * + * @param documentDEK - 32-byte document DEK + * @param pluginId - Unique plugin identifier + * @returns 32-byte plugin DEK + */ +export async function derivePluginDEK( + documentDEK: Uint8Array, + pluginId: string +): Promise { + // Generate a deterministic subkey ID from plugin ID + // Use a simple hash: sum of char codes mod 2^31 + const subkeyId = hashPluginId(pluginId) + + return deriveKey(documentDEK, subkeyId, HKDF_CONTEXTS.PLUGIN, 32) +} + +/** + * Hash plugin ID to a numeric subkey ID. + * + * Produces a deterministic 31-bit positive integer from the plugin ID string. + */ +function hashPluginId(pluginId: string): number { + let hash = 0 + for (let i = 0; i < pluginId.length; i++) { + const char = pluginId.charCodeAt(i) + hash = ((hash << 5) - hash + char) | 0 + } + // Ensure positive 31-bit integer + return Math.abs(hash) & 0x7fffffff +} diff --git a/app/src/features/security/lib/plugins/plugin-kv.ts b/app/src/features/security/lib/plugins/plugin-kv.ts new file mode 100644 index 00000000..f803311b --- /dev/null +++ b/app/src/features/security/lib/plugins/plugin-kv.ts @@ -0,0 +1,124 @@ +/** + * Plugin KV encryption/decryption module + * + * Provides transparent encryption for plugin key-value storage. + * Values are encrypted with the plugin-specific DEK before storage, + * and decrypted when retrieved. + */ + +import { toBase64, fromBase64, encrypt, decrypt } from '@/shared/lib/crypto' + +import { derivePluginDEK } from './plugin-dek' + +/** Encrypted KV value format */ +export interface EncryptedKVValue { + /** Base64-encoded ciphertext */ + ciphertext: string + /** Base64-encoded nonce */ + nonce: string + /** Encryption version for future compatibility */ + _v: 1 + /** Marker to distinguish from legacy plaintext values */ + _encrypted: true +} + +/** Legacy plaintext value format (for backward compatibility) */ +export interface LegacyKVValue { + value: unknown + _encrypted: false +} + +/** + * Check if a stored value is encrypted. + */ +export function isEncryptedKVValue(value: unknown): value is EncryptedKVValue { + return ( + value !== null && + typeof value === 'object' && + '_encrypted' in value && + (value as any)._encrypted === true && + '_v' in value && + (value as any)._v === 1 && + 'ciphertext' in value && + 'nonce' in value + ) +} + +/** + * Check if a stored value is a legacy plaintext value. + */ +export function isLegacyKVValue(value: unknown): value is LegacyKVValue { + return ( + value !== null && + typeof value === 'object' && + '_encrypted' in value && + (value as any)._encrypted === false && + 'value' in value + ) +} + +/** + * Encrypt a KV value for storage. + * + * @param value - Value to encrypt (will be JSON serialized) + * @param documentDEK - Document DEK + * @param pluginId - Plugin identifier for key derivation + * @returns Encrypted value object ready for storage + */ +export async function encryptKV( + value: unknown, + documentDEK: Uint8Array, + pluginId: string +): Promise { + const pluginDEK = await derivePluginDEK(documentDEK, pluginId) + + const plaintext = new TextEncoder().encode(JSON.stringify(value)) + const { ciphertext, nonce } = await encrypt(pluginDEK, plaintext) + + return { + ciphertext: await toBase64(ciphertext), + nonce: await toBase64(nonce), + _v: 1, + _encrypted: true, + } +} + +/** + * Decrypt a KV value from storage. + * + * Handles both encrypted and legacy plaintext values. + * + * @param stored - Stored value (may be encrypted or legacy) + * @param documentDEK - Document DEK + * @param pluginId - Plugin identifier for key derivation + * @returns Decrypted value + */ +export async function decryptKV( + stored: unknown, + documentDEK: Uint8Array, + pluginId: string +): Promise { + // Handle null/undefined + if (stored === null || stored === undefined) { + return stored + } + + // Handle legacy plaintext values + if (isLegacyKVValue(stored)) { + return stored.value + } + + // Handle encrypted values + if (isEncryptedKVValue(stored)) { + const pluginDEK = await derivePluginDEK(documentDEK, pluginId) + const ciphertext = await fromBase64(stored.ciphertext) + const nonce = await fromBase64(stored.nonce) + + const plaintext = await decrypt(pluginDEK, ciphertext, nonce) + return JSON.parse(new TextDecoder().decode(plaintext)) + } + + // Unknown format - return as-is for backward compatibility + // This handles cases where the value was stored before encryption was enabled + return stored +} diff --git a/app/src/features/security/lib/plugins/plugin-records.ts b/app/src/features/security/lib/plugins/plugin-records.ts new file mode 100644 index 00000000..ef2e08a2 --- /dev/null +++ b/app/src/features/security/lib/plugins/plugin-records.ts @@ -0,0 +1,153 @@ +/** + * Plugin Records encryption/decryption module + * + * Provides transparent encryption for plugin records storage. + * Record data is encrypted with the plugin-specific DEK before storage, + * and decrypted when retrieved. + * + * Note: Only the `data` field is encrypted. Metadata fields (id, kind, + * createdAt, updatedAt) remain in plaintext for routing and sorting. + */ + +import { toBase64, fromBase64, encrypt, decrypt } from '@/shared/lib/crypto' + +import { derivePluginDEK } from './plugin-dek' + +/** Encrypted record data format */ +export interface EncryptedRecordData { + /** Base64-encoded ciphertext */ + ciphertext: string + /** Base64-encoded nonce */ + nonce: string + /** Encryption version for future compatibility */ + _v: 1 + /** Marker to distinguish from legacy plaintext values */ + _encrypted: true +} + +/** Record with encrypted data field */ +export interface EncryptedRecord { + id: string + data: EncryptedRecordData + createdAt?: string + updatedAt?: string + [key: string]: unknown +} + +/** Record with plaintext data field (legacy or decrypted) */ +export interface PlaintextRecord { + id: string + data: unknown + createdAt?: string + updatedAt?: string + [key: string]: unknown +} + +/** + * Check if a record's data is encrypted. + */ +export function isEncryptedRecordData(data: unknown): data is EncryptedRecordData { + return ( + data !== null && + typeof data === 'object' && + '_encrypted' in data && + (data as any)._encrypted === true && + '_v' in data && + (data as any)._v === 1 && + 'ciphertext' in data && + 'nonce' in data + ) +} + +/** + * Encrypt record data for storage. + * + * @param data - Record data to encrypt (will be JSON serialized) + * @param documentDEK - Document DEK + * @param pluginId - Plugin identifier for key derivation + * @returns Encrypted data object ready for storage + */ +export async function encryptRecordData( + data: unknown, + documentDEK: Uint8Array, + pluginId: string +): Promise { + const pluginDEK = await derivePluginDEK(documentDEK, pluginId) + + const plaintext = new TextEncoder().encode(JSON.stringify(data)) + const { ciphertext, nonce } = await encrypt(pluginDEK, plaintext) + + return { + ciphertext: await toBase64(ciphertext), + nonce: await toBase64(nonce), + _v: 1, + _encrypted: true, + } +} + +/** + * Decrypt record data from storage. + * + * Handles both encrypted and legacy plaintext data. + * + * @param data - Stored data (may be encrypted or legacy) + * @param documentDEK - Document DEK + * @param pluginId - Plugin identifier for key derivation + * @returns Decrypted data + */ +export async function decryptRecordData( + data: unknown, + documentDEK: Uint8Array, + pluginId: string +): Promise { + // Handle null/undefined + if (data === null || data === undefined) { + return data + } + + // Handle encrypted data + if (isEncryptedRecordData(data)) { + const pluginDEK = await derivePluginDEK(documentDEK, pluginId) + const ciphertext = await fromBase64(data.ciphertext) + const nonce = await fromBase64(data.nonce) + + const plaintext = await decrypt(pluginDEK, ciphertext, nonce) + return JSON.parse(new TextDecoder().decode(plaintext)) + } + + // Unknown format - return as-is for backward compatibility + return data +} + +/** + * Decrypt multiple records from storage. + * + * @param records - Array of records with potentially encrypted data + * @param documentDEK - Document DEK + * @param pluginId - Plugin identifier for key derivation + * @returns Array of records with decrypted data + */ +export async function decryptRecords( + records: unknown[], + documentDEK: Uint8Array, + pluginId: string +): Promise { + return Promise.all( + records.map(async (record) => { + if (!record || typeof record !== 'object') { + return record as PlaintextRecord + } + + const rec = record as Record + if (!('data' in rec)) { + return rec as PlaintextRecord + } + + const decryptedData = await decryptRecordData(rec.data, documentDEK, pluginId) + return { + ...rec, + data: decryptedData, + } as PlaintextRecord + }) + ) +} diff --git a/app/src/features/security/lib/tags/__tests__/deterministic.test.ts b/app/src/features/security/lib/tags/__tests__/deterministic.test.ts new file mode 100644 index 00000000..a8b47e66 --- /dev/null +++ b/app/src/features/security/lib/tags/__tests__/deterministic.test.ts @@ -0,0 +1,221 @@ +import { describe, it, expect, beforeEach, afterEach } from 'vitest' + +import { getSodium } from '@/shared/lib/crypto' +import { + encryptTagDeterministic, + encryptTags, + buildTagLookupTable, + decryptTag, + decryptTags, + TagLookupManager, + getTagLookupManager, + resetTagLookupManager, + HMAC_KEY_SIZE, +} from '@/shared/lib/tags' + +describe('deterministic tag encryption', () => { + let testKek: Uint8Array + + beforeEach(async () => { + // Generate a test KEK + const sodium = await getSodium() + testKek = sodium.randombytes_buf(HMAC_KEY_SIZE) + resetTagLookupManager() + }) + + afterEach(() => { + resetTagLookupManager() + }) + + describe('encryptTagDeterministic', () => { + it('should produce deterministic output for same tag and key', async () => { + const result1 = await encryptTagDeterministic('hello', testKek) + const result2 = await encryptTagDeterministic('hello', testKek) + expect(result1).toBe(result2) + }) + + it('should produce different output for different tags', async () => { + const result1 = await encryptTagDeterministic('hello', testKek) + const result2 = await encryptTagDeterministic('world', testKek) + expect(result1).not.toBe(result2) + }) + + it('should produce different output for different keys', async () => { + const sodium = await getSodium() + const otherKek = sodium.randombytes_buf(HMAC_KEY_SIZE) + + const result1 = await encryptTagDeterministic('hello', testKek) + const result2 = await encryptTagDeterministic('hello', otherKek) + expect(result1).not.toBe(result2) + }) + + it('should normalize tags to lowercase', async () => { + const result1 = await encryptTagDeterministic('Hello', testKek) + const result2 = await encryptTagDeterministic('hello', testKek) + const result3 = await encryptTagDeterministic('HELLO', testKek) + expect(result1).toBe(result2) + expect(result2).toBe(result3) + }) + + it('should trim whitespace from tags', async () => { + const result1 = await encryptTagDeterministic(' hello ', testKek) + const result2 = await encryptTagDeterministic('hello', testKek) + expect(result1).toBe(result2) + }) + + it('should return Base64-encoded string', async () => { + const result = await encryptTagDeterministic('hello', testKek) + // Base64 pattern: alphanumeric, +, /, and = for padding + expect(result).toMatch(/^[A-Za-z0-9+/]+=*$/) + }) + + it('should throw for empty tag', async () => { + await expect(encryptTagDeterministic('', testKek)).rejects.toThrow() + await expect(encryptTagDeterministic(' ', testKek)).rejects.toThrow() + }) + + it('should throw for invalid KEK length', async () => { + const shortKek = new Uint8Array(16) + await expect(encryptTagDeterministic('hello', shortKek)).rejects.toThrow(/Invalid KEK length/) + }) + }) + + describe('encryptTags', () => { + it('should encrypt multiple tags', async () => { + const tags = ['hello', 'world', 'foo'] + const encrypted = await encryptTags(tags, testKek) + expect(encrypted).toHaveLength(3) + expect(encrypted[0]).not.toBe(encrypted[1]) + expect(encrypted[1]).not.toBe(encrypted[2]) + }) + + it('should maintain order', async () => { + const tags = ['alpha', 'beta', 'gamma'] + const encrypted = await encryptTags(tags, testKek) + + // Verify order by encrypting individually + const alpha = await encryptTagDeterministic('alpha', testKek) + const beta = await encryptTagDeterministic('beta', testKek) + const gamma = await encryptTagDeterministic('gamma', testKek) + + expect(encrypted[0]).toBe(alpha) + expect(encrypted[1]).toBe(beta) + expect(encrypted[2]).toBe(gamma) + }) + + it('should handle empty array', async () => { + const encrypted = await encryptTags([], testKek) + expect(encrypted).toEqual([]) + }) + }) + + describe('buildTagLookupTable and decryptTag', () => { + it('should build lookup table and decrypt', async () => { + const knownTags = ['hello', 'world'] + const table = await buildTagLookupTable(knownTags, testKek) + + const encrypted = await encryptTagDeterministic('hello', testKek) + expect(decryptTag(encrypted, table)).toBe('hello') + }) + + it('should return null for unknown encrypted tag', async () => { + const table = await buildTagLookupTable(['hello'], testKek) + const unknownEncrypted = await encryptTagDeterministic('unknown', testKek) + expect(decryptTag(unknownEncrypted, table)).toBeNull() + }) + + it('should deduplicate tags in lookup table', async () => { + const table = await buildTagLookupTable(['Hello', 'HELLO', 'hello'], testKek) + // Should only have one entry + expect(table.size).toBe(1) + }) + + it('should handle empty array', async () => { + const table = await buildTagLookupTable([], testKek) + expect(table.size).toBe(0) + }) + }) + + describe('decryptTags', () => { + it('should decrypt multiple tags', async () => { + const knownTags = ['alpha', 'beta', 'gamma'] + const table = await buildTagLookupTable(knownTags, testKek) + const encrypted = await encryptTags(knownTags, testKek) + + const decrypted = decryptTags(encrypted, table) + expect(decrypted).toEqual(['alpha', 'beta', 'gamma']) + }) + + it('should return null for unknown tags', async () => { + const table = await buildTagLookupTable(['known'], testKek) + const encrypted = [ + await encryptTagDeterministic('known', testKek), + await encryptTagDeterministic('unknown', testKek), + ] + + const decrypted = decryptTags(encrypted, table) + expect(decrypted).toEqual(['known', null]) + }) + }) + + describe('TagLookupManager', () => { + it('should encrypt and decrypt tags', async () => { + const manager = new TagLookupManager() + manager.setKek(testKek) + + const encrypted = await manager.encrypt('hello') + const decrypted = await manager.decrypt(encrypted) + expect(decrypted).toBe('hello') + }) + + it('should add known tags and decrypt them', async () => { + const manager = new TagLookupManager() + manager.setKek(testKek) + manager.addKnownTags(['alpha', 'beta']) + + const encryptedAlpha = await encryptTagDeterministic('alpha', testKek) + const encryptedBeta = await encryptTagDeterministic('beta', testKek) + + expect(await manager.decrypt(encryptedAlpha)).toBe('alpha') + expect(await manager.decrypt(encryptedBeta)).toBe('beta') + }) + + it('should return null for unknown tags', async () => { + const manager = new TagLookupManager() + manager.setKek(testKek) + manager.addKnownTags(['known']) + + const encryptedUnknown = await encryptTagDeterministic('unknown', testKek) + expect(await manager.decrypt(encryptedUnknown)).toBeNull() + }) + + it('should clear all state', async () => { + const manager = new TagLookupManager() + manager.setKek(testKek) + await manager.encrypt('hello') + + manager.clear() + expect(manager.getKnownTags()).toEqual([]) + }) + + it('should throw if KEK not set', async () => { + const manager = new TagLookupManager() + await expect(manager.encrypt('hello')).rejects.toThrow('KEK not set') + }) + }) + + describe('getTagLookupManager', () => { + it('should return singleton instance', () => { + const manager1 = getTagLookupManager() + const manager2 = getTagLookupManager() + expect(manager1).toBe(manager2) + }) + + it('should reset singleton', () => { + const manager1 = getTagLookupManager() + resetTagLookupManager() + const manager2 = getTagLookupManager() + expect(manager1).not.toBe(manager2) + }) + }) +}) diff --git a/app/src/features/security/lib/tags/__tests__/extract.test.ts b/app/src/features/security/lib/tags/__tests__/extract.test.ts new file mode 100644 index 00000000..7091f372 --- /dev/null +++ b/app/src/features/security/lib/tags/__tests__/extract.test.ts @@ -0,0 +1,216 @@ +import { describe, it, expect } from 'vitest' + +import { extractTags, extractTagsPreserveCase } from '@/shared/lib/tags' + +describe('extractTags', () => { + describe('basic extraction', () => { + it('should extract a single tag', () => { + expect(extractTags('Hello #world')).toEqual(['world']) + }) + + it('should extract multiple tags', () => { + const tags = extractTags('Hello #world and #foo') + expect(tags).toContain('world') + expect(tags).toContain('foo') + expect(tags).toHaveLength(2) + }) + + it('should extract tags with underscores', () => { + expect(extractTags('#foo_bar')).toEqual(['foo_bar']) + }) + + it('should extract tags with numbers', () => { + expect(extractTags('#tag123')).toEqual(['tag123']) + }) + + it('should normalize tags to lowercase', () => { + expect(extractTags('#Hello #WORLD #FooBar')).toEqual(['hello', 'world', 'foobar']) + }) + + it('should deduplicate tags (case-insensitive)', () => { + expect(extractTags('#hello #Hello #HELLO')).toEqual(['hello']) + }) + + it('should extract tag at start of text', () => { + expect(extractTags('#start of text')).toEqual(['start']) + }) + + it('should extract tag at end of text', () => { + expect(extractTags('end of text #end')).toEqual(['end']) + }) + }) + + describe('preceding character checks', () => { + it('should ignore tag preceded by alphanumeric', () => { + expect(extractTags('word#tag')).toEqual([]) + expect(extractTags('123#tag')).toEqual([]) + }) + + it('should ignore tag preceded by slash', () => { + expect(extractTags('path/#tag')).toEqual([]) + expect(extractTags('https://example.com#anchor')).toEqual([]) + }) + + it('should ignore tag preceded by colon', () => { + expect(extractTags('prefix:#tag')).toEqual([]) + }) + + it('should ignore tag preceded by at sign', () => { + expect(extractTags('email@domain.com#tag')).toEqual([]) + }) + + it('should ignore tag preceded by dot', () => { + expect(extractTags('file.#tag')).toEqual([]) + }) + + it('should ignore tag preceded by hyphen', () => { + expect(extractTags('word-#tag')).toEqual([]) + }) + + it('should ignore tag preceded by underscore', () => { + expect(extractTags('word_#tag')).toEqual([]) + }) + + it('should ignore tag preceded by plus', () => { + expect(extractTags('word+#tag')).toEqual([]) + }) + + it('should ignore tag preceded by tilde', () => { + expect(extractTags('word~#tag')).toEqual([]) + }) + + it('should ignore tag preceded by equals', () => { + expect(extractTags('word=#tag')).toEqual([]) + }) + + it('should ignore tag preceded by question mark', () => { + expect(extractTags('param?#tag')).toEqual([]) + }) + + it('should ignore tag preceded by ampersand', () => { + expect(extractTags('param&#tag')).toEqual([]) + }) + + it('should ignore tag preceded by percent', () => { + expect(extractTags('100%#tag')).toEqual([]) + }) + + it('should extract tag preceded by space', () => { + expect(extractTags('word #tag')).toEqual(['tag']) + }) + + it('should extract tag preceded by newline', () => { + expect(extractTags('word\n#tag')).toEqual(['tag']) + }) + + it('should extract tag preceded by tab', () => { + expect(extractTags('word\t#tag')).toEqual(['tag']) + }) + + it('should extract tag preceded by parenthesis', () => { + expect(extractTags('(#tag)')).toEqual(['tag']) + }) + + it('should extract tag preceded by bracket', () => { + expect(extractTags('[#tag]')).toEqual(['tag']) + }) + }) + + describe('code block exclusion', () => { + it('should ignore tags inside fenced code blocks', () => { + const markdown = '```\n#code_tag\n```\n#real_tag' + expect(extractTags(markdown)).toEqual(['real_tag']) + }) + + it('should ignore tags inside inline code', () => { + const markdown = 'Check `#inline_code` and #real_tag' + expect(extractTags(markdown)).toEqual(['real_tag']) + }) + + it('should handle multiple code blocks', () => { + const markdown = '```\n#tag1\n```\n#real1\n```js\n#tag2\n```\n#real2' + const tags = extractTags(markdown) + expect(tags).toContain('real1') + expect(tags).toContain('real2') + expect(tags).not.toContain('tag1') + expect(tags).not.toContain('tag2') + }) + + it('should handle code block with language specifier', () => { + const markdown = '```typescript\nconst x = "#not_a_tag";\n```\n#real_tag' + expect(extractTags(markdown)).toEqual(['real_tag']) + }) + }) + + describe('length limits', () => { + it('should extract tags up to 50 characters', () => { + const tag50 = 'a'.repeat(50) + expect(extractTags(`#${tag50}`)).toEqual([tag50]) + }) + + it('should truncate tags longer than 50 characters', () => { + const tag60 = 'a'.repeat(60) + const expected = 'a'.repeat(50) + expect(extractTags(`#${tag60}`)).toEqual([expected]) + }) + }) + + describe('edge cases', () => { + it('should return empty array for empty string', () => { + expect(extractTags('')).toEqual([]) + }) + + it('should return empty array for null/undefined', () => { + expect(extractTags(null as unknown as string)).toEqual([]) + expect(extractTags(undefined as unknown as string)).toEqual([]) + }) + + it('should return empty array for string without tags', () => { + expect(extractTags('Hello world')).toEqual([]) + }) + + it('should return empty array for lone hash', () => { + expect(extractTags('#')).toEqual([]) + expect(extractTags('# ')).toEqual([]) + }) + + it('should handle consecutive tags', () => { + const tags = extractTags('#tag1#tag2') + // First tag is valid, second is preceded by alphanumeric + expect(tags).toEqual(['tag1']) + }) + + it('should handle tags separated by space', () => { + const tags = extractTags('#tag1 #tag2') + expect(tags).toContain('tag1') + expect(tags).toContain('tag2') + }) + + it('should handle markdown headings (not tags)', () => { + // # followed by space is a heading, not a tag + expect(extractTags('# Heading')).toEqual([]) + expect(extractTags('## Another Heading')).toEqual([]) + }) + + it('should handle special characters in tag body', () => { + // Only alphanumeric and underscore are valid + expect(extractTags('#tag-with-dash')).toEqual(['tag']) + expect(extractTags('#tag.with.dot')).toEqual(['tag']) + }) + }) +}) + +describe('extractTagsPreserveCase', () => { + it('should preserve original casing of first occurrence', () => { + expect(extractTagsPreserveCase('#Hello #WORLD')).toEqual(['Hello', 'WORLD']) + }) + + it('should deduplicate based on lowercase but keep first casing', () => { + expect(extractTagsPreserveCase('#Hello #hello #HELLO')).toEqual(['Hello']) + }) + + it('should work with mixed case tags', () => { + const tags = extractTagsPreserveCase('#FooBar #another #FooBar') + expect(tags).toEqual(['FooBar', 'another']) + }) +}) diff --git a/app/src/features/security/lib/tags/index.ts b/app/src/features/security/lib/tags/index.ts new file mode 100644 index 00000000..a115de2c --- /dev/null +++ b/app/src/features/security/lib/tags/index.ts @@ -0,0 +1,7 @@ +/** + * E2EE Tag Module + * + * Re-exported from shared/lib/tags for backward compatibility. + */ + +export * from '@/shared/lib/tags' diff --git a/app/src/features/security/lib/types/index.ts b/app/src/features/security/lib/types/index.ts new file mode 100644 index 00000000..d86d305a --- /dev/null +++ b/app/src/features/security/lib/types/index.ts @@ -0,0 +1,7 @@ +/** + * E2EE Type Definitions + * + * Re-exported from shared/types/security for backward compatibility. + */ + +export * from '@/shared/types/security' diff --git a/app/src/features/security/ui/DecryptionErrorDialog.tsx b/app/src/features/security/ui/DecryptionErrorDialog.tsx new file mode 100644 index 00000000..ed64f769 --- /dev/null +++ b/app/src/features/security/ui/DecryptionErrorDialog.tsx @@ -0,0 +1,223 @@ +import { AlertTriangle, Key, RefreshCw, ShieldAlert, X } from 'lucide-react' + +import { type CryptoError, ERROR_CODES } from '@/shared/types/security' +import { + AlertDialog, + AlertDialogContent, + AlertDialogDescription, + AlertDialogFooter, + AlertDialogHeader, + AlertDialogTitle, +} from '@/shared/ui/alert-dialog' +import { Button } from '@/shared/ui/button' + + +type RecoveryAction = 'retry' | 'recovery_key' | 'contact_admin' | 'close' + +interface DecryptionErrorDialogProps { + /** Whether the dialog is open */ + open: boolean + /** Callback when open state changes */ + onOpenChange: (open: boolean) => void + /** The error that occurred */ + error: CryptoError | null + /** Document ID (for context display) */ + documentId?: string + /** Callback when retry is requested */ + onRetry?: () => void + /** Callback when recovery key input is requested */ + onRecoveryKey?: () => void +} + +/** + * Determine the appropriate recovery actions based on error code + */ +function getRecoveryActions(error: CryptoError): RecoveryAction[] { + switch (error.code) { + case ERROR_CODES.SIGNATURE_INVALID: + // Data may be tampered - contact admin + return ['contact_admin', 'close'] + + case ERROR_CODES.DECRYPTION_FAILED: + case ERROR_CODES.KEY_NOT_FOUND: + case ERROR_CODES.KEY_INVALID: + // Key issues - try recovery key + return ['recovery_key', 'retry', 'close'] + + case ERROR_CODES.FILE_CORRUPTED: + // File corrupted - not much we can do + return ['contact_admin', 'close'] + + case ERROR_CODES.NETWORK_FAILED: + case ERROR_CODES.SYNC_TIMEOUT: + // Network issues - retry + return ['retry', 'close'] + + default: + return ['retry', 'close'] + } +} + +/** + * Get icon for error type + */ +function ErrorIcon({ error }: { error: CryptoError }) { + switch (error.code) { + case ERROR_CODES.SIGNATURE_INVALID: + return + case ERROR_CODES.KEY_NOT_FOUND: + case ERROR_CODES.KEY_INVALID: + return + default: + return + } +} + +/** + * Get title for error type + */ +function getErrorTitle(error: CryptoError): string { + switch (error.code) { + case ERROR_CODES.SIGNATURE_INVALID: + return 'Data Integrity Error' + case ERROR_CODES.KEY_NOT_FOUND: + return 'Encryption Key Missing' + case ERROR_CODES.KEY_INVALID: + return 'Invalid Encryption Key' + case ERROR_CODES.DECRYPTION_FAILED: + return 'Decryption Failed' + case ERROR_CODES.FILE_CORRUPTED: + return 'File Corrupted' + case ERROR_CODES.NETWORK_FAILED: + return 'Network Error' + default: + return 'Error' + } +} + +/** + * Get description for error type + */ +function getErrorDescription(error: CryptoError): string { + switch (error.code) { + case ERROR_CODES.SIGNATURE_INVALID: + return 'The document signature is invalid. This could indicate that the content has been tampered with or corrupted.' + case ERROR_CODES.KEY_NOT_FOUND: + return 'The encryption key for this document was not found. You may need to restore your keys using your recovery key.' + case ERROR_CODES.KEY_INVALID: + return 'The encryption key is invalid. Try entering your passphrase again or use your recovery key.' + case ERROR_CODES.DECRYPTION_FAILED: + return 'Failed to decrypt the content. The data may be corrupted or your encryption keys may need to be restored.' + case ERROR_CODES.FILE_CORRUPTED: + return 'The file appears to be corrupted and cannot be opened.' + default: + return error.getUserMessage() + } +} + +export function DecryptionErrorDialog({ + open, + onOpenChange, + error, + documentId, + onRetry, + onRecoveryKey, +}: DecryptionErrorDialogProps) { + if (!error) return null + + const actions = getRecoveryActions(error) + + const handleAction = (action: RecoveryAction) => { + switch (action) { + case 'retry': + onRetry?.() + onOpenChange(false) + break + case 'recovery_key': + onRecoveryKey?.() + onOpenChange(false) + break + case 'contact_admin': + // For now, just close - in the future could open support dialog + onOpenChange(false) + break + case 'close': + onOpenChange(false) + break + } + } + + return ( + + + +
+
+ +
+
+ {getErrorTitle(error)} + {documentId && ( +

+ Document: {documentId.slice(0, 8)}... +

+ )} +
+
+
+ + +

{getErrorDescription(error)}

+ + {error.code === ERROR_CODES.SIGNATURE_INVALID && ( +
+

Security Warning

+

+ If you did not expect this document to be modified, please contact your workspace administrator. +

+
+ )} + +

+ Error code: {error.code} +

+
+ + + {actions.includes('contact_admin') && ( + + )} + + {actions.includes('retry') && onRetry && ( + + )} + + {actions.includes('recovery_key') && onRecoveryKey && ( + + )} + + {actions.includes('close') && !actions.includes('recovery_key') && ( + + )} + +
+
+ ) +} diff --git a/app/src/features/security/ui/ErrorBoundary.tsx b/app/src/features/security/ui/ErrorBoundary.tsx new file mode 100644 index 00000000..ec2ebe5b --- /dev/null +++ b/app/src/features/security/ui/ErrorBoundary.tsx @@ -0,0 +1,209 @@ +import { AlertTriangle, Key, RefreshCw } from 'lucide-react' +import { Component, type ReactNode } from 'react' + +import { + CryptoError, + ERROR_CATEGORY, + ERROR_CODES, + isCryptoError, +} from '@/shared/types/security' +import { Button } from '@/shared/ui/button' +import { Card, CardContent, CardDescription, CardHeader, CardTitle } from '@/shared/ui/card' + + +import { logError } from '../lib/error-log/log-error' + +interface ErrorBoundaryProps { + /** Children to render */ + children: ReactNode + /** Fallback UI when error is not a CryptoError */ + fallback?: ReactNode + /** Called when an error occurs */ + onError?: (error: Error) => void + /** Called when recovery key input is requested */ + onRecoveryKey?: () => void + /** Called when retry is requested */ + onRetry?: () => void +} + +interface ErrorBoundaryState { + hasError: boolean + error: Error | null + cryptoError: CryptoError | null +} + +/** + * Error Boundary for E2EE errors + * + * Catches CryptoError instances and displays appropriate recovery UI. + * + * @example + * ```tsx + * navigate('/recovery')}> + * + * + * ``` + */ +export class ErrorBoundary extends Component { + constructor(props: ErrorBoundaryProps) { + super(props) + this.state = { + hasError: false, + error: null, + cryptoError: null, + } + } + + static getDerivedStateFromError(error: Error): Partial { + return { + hasError: true, + error, + cryptoError: isCryptoError(error) ? error : null, + } + } + + componentDidCatch(error: Error): void { + // Log the error + if (isCryptoError(error)) { + logError(error).catch(console.error) + } + + // Call optional error handler + this.props.onError?.(error) + } + + handleRetry = (): void => { + this.setState({ + hasError: false, + error: null, + cryptoError: null, + }) + this.props.onRetry?.() + } + + handleRecoveryKey = (): void => { + this.props.onRecoveryKey?.() + } + + render(): ReactNode { + const { hasError, error, cryptoError } = this.state + const { children, fallback } = this.props + + if (!hasError) { + return children + } + + // If it's a CryptoError, show specialized UI + if (cryptoError) { + return + } + + // If a custom fallback is provided, use it + if (fallback) { + return fallback + } + + // Default fallback for non-crypto errors + return ( + + ) + } +} + +interface CryptoErrorFallbackProps { + error: CryptoError + onRetry: () => void + onRecoveryKey?: () => void +} + +function CryptoErrorFallback({ error, onRetry, onRecoveryKey }: CryptoErrorFallbackProps) { + const showRecoveryButton = + error.code === ERROR_CODES.KEY_NOT_FOUND || + error.code === ERROR_CODES.KEY_INVALID || + error.code === ERROR_CODES.DECRYPTION_FAILED || + error.code === ERROR_CODES.SESSION_LOCKED + + const isCritical = error.category === ERROR_CATEGORY.CRITICAL + + return ( +
+ + +
+ {showRecoveryButton ? ( + + ) : ( + + )} +
+ + {isCritical ? 'Encryption Error' : 'Something went wrong'} + + {error.getUserMessage()} +
+ +

+ Error code: {error.code} +

+ +
+ {showRecoveryButton && onRecoveryKey && ( + + )} + +
+
+
+
+ ) +} + +interface DefaultErrorFallbackProps { + error: Error | null + onRetry: () => void +} + +function DefaultErrorFallback({ error, onRetry }: DefaultErrorFallbackProps) { + return ( +
+ + +
+ +
+ Something went wrong + + {error?.message ?? 'An unexpected error occurred'} + +
+ + + +
+
+ ) +} diff --git a/app/src/features/security/ui/MigrationProgress.tsx b/app/src/features/security/ui/MigrationProgress.tsx new file mode 100644 index 00000000..b714fa14 --- /dev/null +++ b/app/src/features/security/ui/MigrationProgress.tsx @@ -0,0 +1,195 @@ +import { Check, Loader2, AlertCircle, RefreshCw } from 'lucide-react' +import { useState, useEffect, useCallback } from 'react' + +import { cn } from '@/shared/lib/utils' +import { Button } from '@/shared/ui/button' + +import { migrateUserData, markSecuritySetupComplete, type MigrateRequest } from '@/entities/user' + +type StepStatus = 'pending' | 'in_progress' | 'completed' | 'error' + +interface MigrationStep { + id: string + label: string + status: StepStatus +} + +interface MigrationProgressProps { + /** Migration request data (keys, etc.) */ + migrationData: MigrateRequest + /** Called when migration completes successfully */ + onComplete: () => void + /** Called when migration fails */ + onError: (error: Error) => void +} + +const INITIAL_STEPS: MigrationStep[] = [ + { id: 'keys', label: 'Registering encryption keys', status: 'pending' }, + { id: 'data', label: 'Encrypting data', status: 'pending' }, + { id: 'verify', label: 'Verifying', status: 'pending' }, + { id: 'complete', label: 'Completing setup', status: 'pending' }, +] + +export function MigrationProgress({ + migrationData, + onComplete, + onError, +}: MigrationProgressProps) { + const [steps, setSteps] = useState(INITIAL_STEPS) + const [error, setError] = useState(null) + const [isRetrying, setIsRetrying] = useState(false) + + const updateStepStatus = useCallback((stepId: string, status: StepStatus) => { + setSteps((prev) => + prev.map((step) => (step.id === stepId ? { ...step, status } : step)) + ) + }, []) + + const runMigration = useCallback(async () => { + setError(null) + setSteps(INITIAL_STEPS) + + try { + // Step 1: Register keys + updateStepStatus('keys', 'in_progress') + await new Promise((r) => setTimeout(r, 500)) // Small delay for UX + updateStepStatus('keys', 'completed') + + // Step 2: Migrate data + updateStepStatus('data', 'in_progress') + await migrateUserData(migrationData) + updateStepStatus('data', 'completed') + + // Step 3: Verify + updateStepStatus('verify', 'in_progress') + await new Promise((r) => setTimeout(r, 300)) + updateStepStatus('verify', 'completed') + + // Step 4: Mark complete + updateStepStatus('complete', 'in_progress') + await markSecuritySetupComplete() + updateStepStatus('complete', 'completed') + + // Success + onComplete() + } catch (err) { + const message = err instanceof Error ? err.message : 'Migration failed' + setError(message) + + // Find the in_progress step and mark it as error + setSteps((prev) => + prev.map((step) => ({ + ...step, + status: step.status === 'in_progress' ? 'error' : step.status, + })) + ) + + onError(err instanceof Error ? err : new Error(message)) + } + }, [migrationData, onComplete, onError, updateStepStatus]) + + const handleRetry = useCallback(async () => { + setIsRetrying(true) + await runMigration() + setIsRetrying(false) + }, [runMigration]) + + // Start migration on mount + useEffect(() => { + runMigration() + }, [runMigration]) + + const progress = steps.filter((s) => s.status === 'completed').length / steps.length + + return ( +
+ {/* Progress Bar */} +
+
+ Migrating... + {Math.round(progress * 100)}% +
+
+
+
+
+ + {/* Steps */} +
+ {steps.map((step) => ( + + ))} +
+ + {/* Error */} + {error && ( +
+
+ +
+

Migration failed

+

{error}

+
+
+ + +
+ )} +
+ ) +} + +function StepItem({ step }: { step: MigrationStep }) { + return ( +
+
+ {step.status === 'completed' && } + {step.status === 'in_progress' && ( + + )} + {step.status === 'error' && } + {step.status === 'pending' && ( + + )} +
+ + {step.label} + +
+ ) +} diff --git a/app/src/features/security/ui/OfflineBanner.tsx b/app/src/features/security/ui/OfflineBanner.tsx new file mode 100644 index 00000000..8a504490 --- /dev/null +++ b/app/src/features/security/ui/OfflineBanner.tsx @@ -0,0 +1,96 @@ +import { Loader2, WifiOff, RefreshCw } from 'lucide-react' + +import { cn } from '@/shared/lib/utils' +import { Button } from '@/shared/ui/button' + +import { useNetworkStatus } from '../hooks/useNetworkStatus' +import { useOfflineQueue } from '../hooks/useOfflineQueue' + +interface OfflineBannerProps { + /** Additional class names */ + className?: string + /** Whether to show pending operations count */ + showPendingCount?: boolean + /** Handler to process operations (passed to useOfflineQueue) */ + processOperation?: (operation: unknown) => Promise +} + +/** + * Banner that shows offline status and pending operations + * + * @example + * ```tsx + * // In your layout + * + * ``` + */ +export function OfflineBanner({ + className, + showPendingCount = true, + processOperation, +}: OfflineBannerProps) { + const { isOnline } = useNetworkStatus() + const { pendingCount, processing, processQueue } = useOfflineQueue({ + processOperation: processOperation as (op: { payload: string }) => Promise, + autoProcess: true, + }) + + // Don't show if online and no pending operations + if (isOnline && pendingCount === 0) { + return null + } + + return ( +
+
+ {!isOnline ? ( + <> + + + You are offline. + {showPendingCount && pendingCount > 0 && ( + + {pendingCount} {pendingCount === 1 ? 'change' : 'changes'} will sync when you reconnect. + + )} + + + ) : processing ? ( + <> + + + Syncing {pendingCount} {pendingCount === 1 ? 'change' : 'changes'}... + + + ) : ( + <> + + + {pendingCount} pending {pendingCount === 1 ? 'change' : 'changes'} + + + )} +
+ + {isOnline && !processing && pendingCount > 0 && ( + + )} +
+ ) +} diff --git a/app/src/features/security/ui/PassphraseInput.tsx b/app/src/features/security/ui/PassphraseInput.tsx new file mode 100644 index 00000000..328773e2 --- /dev/null +++ b/app/src/features/security/ui/PassphraseInput.tsx @@ -0,0 +1,270 @@ +import { Eye, EyeOff, Check, X } from 'lucide-react' +import { useState, useCallback, useMemo, useEffect, useRef } from 'react' +import type zxcvbnType from 'zxcvbn' + +import { cn } from '@/shared/lib/utils' +import { Button } from '@/shared/ui/button' +import { Input } from '@/shared/ui/input' +import { Label } from '@/shared/ui/label' + +const MIN_PASSPHRASE_LENGTH = 12 + +interface PassphraseInputProps { + onSubmit: (passphrase: string) => void | Promise + loading?: boolean + error?: string + requireConfirmation?: boolean + minLength?: number + submitLabel?: string +} + +interface StrengthInfo { + score: number + label: string + color: string + feedback: string[] +} + +function getStrengthInfo(result: zxcvbnType.ZXCVBNResult): StrengthInfo { + const labels = ['Very weak', 'Weak', 'Fair', 'Strong', 'Very strong'] + const colors = [ + 'bg-red-500', + 'bg-orange-500', + 'bg-yellow-500', + 'bg-lime-500', + 'bg-green-500', + ] + + const feedback = [ + ...result.feedback.suggestions, + result.feedback.warning, + ].filter(Boolean) as string[] + + return { + score: result.score, + label: labels[result.score], + color: colors[result.score], + feedback, + } +} + +export function PassphraseInput({ + onSubmit, + loading = false, + error, + requireConfirmation = true, + minLength = MIN_PASSPHRASE_LENGTH, + submitLabel = 'Next', +}: PassphraseInputProps) { + const [passphrase, setPassphrase] = useState('') + const [confirmation, setConfirmation] = useState('') + const [showPassphrase, setShowPassphrase] = useState(false) + const [showConfirmation, setShowConfirmation] = useState(false) + const [touched, setTouched] = useState(false) + + // Dynamically load zxcvbn only when needed (saves ~800KB initial bundle) + const zxcvbnRef = useRef(null) + const [, forceUpdate] = useState({}) + + useEffect(() => { + // Load zxcvbn on mount (preload for better UX) + if (!zxcvbnRef.current) { + import('zxcvbn').then((module) => { + zxcvbnRef.current = module.default + forceUpdate({}) + }) + } + }, []) + + const strength = useMemo(() => { + if (!passphrase || !zxcvbnRef.current) return null + return getStrengthInfo(zxcvbnRef.current(passphrase)) + }, [passphrase]) + + const validations = useMemo(() => { + return { + minLength: passphrase.length >= minLength, + strongEnough: strength ? strength.score >= 2 : false, + matches: !requireConfirmation || passphrase === confirmation, + } + }, [passphrase, confirmation, minLength, strength, requireConfirmation]) + + const isValid = validations.minLength && validations.strongEnough && validations.matches + + const handleSubmit = useCallback( + async (e: React.FormEvent) => { + e.preventDefault() + setTouched(true) + if (isValid && !loading) { + try { + await onSubmit(passphrase) + } catch { + // Error is handled by the parent component via error prop + } + } + }, + [isValid, loading, onSubmit, passphrase] + ) + + // Clear confirmation when passphrase changes + useEffect(() => { + if (requireConfirmation && passphrase !== confirmation && confirmation) { + // Keep confirmation as-is, validation will show mismatch + } + }, [passphrase, confirmation, requireConfirmation]) + + return ( +
+ {/* Passphrase Input */} +
+ +
+ setPassphrase(e.target.value)} + onBlur={() => setTouched(true)} + placeholder={`${minLength}+ characters`} + autoComplete="new-password" + aria-invalid={touched && !validations.minLength} + disabled={loading} + className="pr-10" + /> + +
+ + {/* Strength Meter */} + {passphrase && strength && ( +
+
+ {[0, 1, 2, 3, 4].map((i) => ( +
+ ))} +
+
+ = 2 ? 'text-green-600' : 'text-destructive')}> + {strength.label} + + {strength.feedback.length > 0 && ( + + {strength.feedback[0]} + + )} +
+
+ )} + + {/* Validation Checklist */} +
+ 0} + /> + 0} + /> +
+
+ + {/* Confirmation Input */} + {requireConfirmation && ( +
+ +
+ setConfirmation(e.target.value)} + placeholder="Enter again" + autoComplete="new-password" + aria-invalid={touched && confirmation.length > 0 && !validations.matches} + disabled={loading} + className="pr-10" + /> + +
+ {confirmation && !validations.matches && ( +

Passphrases do not match

+ )} +
+ )} + + {/* Error Message */} + {error && ( +

{error}

+ )} + + {/* Submit Button */} + + + ) +} + +function ValidationItem({ + valid, + label, + touched, +}: { + valid: boolean + label: string + touched: boolean +}) { + if (!touched) { + return ( +
+
+ {label} +
+ ) + } + + return ( +
+ {valid ? : } + {label} +
+ ) +} diff --git a/app/src/features/security/ui/PassphraseResetWizard.tsx b/app/src/features/security/ui/PassphraseResetWizard.tsx new file mode 100644 index 00000000..cd1a8c8f --- /dev/null +++ b/app/src/features/security/ui/PassphraseResetWizard.tsx @@ -0,0 +1,233 @@ +import { AlertCircle, ArrowLeft, ArrowRight, Check, Key, Loader2, Lock } from 'lucide-react' +import { useState, useCallback } from 'react' + +import { Button } from '@/shared/ui/button' +import { Card, CardContent, CardDescription, CardHeader, CardTitle } from '@/shared/ui/card' + +import { useKeyVault } from '../context/key-vault-context' +import { useKeyManager } from '../hooks/useKeyManager' + +import { PassphraseInput } from './PassphraseInput' +import { RecoveryKeyDisplay } from './RecoveryKeyDisplay' + +type ResetStep = 'recovery_input' | 'new_passphrase' | 'new_recovery_display' | 'complete' + +interface PassphraseResetWizardProps { + /** Called when the reset process is complete */ + onComplete?: () => void + /** Called when the user cancels */ + onCancel?: () => void +} + +export function PassphraseResetWizard({ + onComplete, + onCancel, +}: PassphraseResetWizardProps) { + const { unlockWithRecovery } = useKeyVault() + const { changePassphrase } = useKeyManager() + + const [step, setStep] = useState('recovery_input') + const [recoveryKey, setRecoveryKey] = useState('') + const [newRecoveryKey, setNewRecoveryKey] = useState('') + const [loading, setLoading] = useState(false) + const [error, setError] = useState(null) + + // Step 1: Validate recovery key and unlock + const handleRecoveryKeySubmit = useCallback(async () => { + if (!recoveryKey.trim()) return + + setLoading(true) + setError(null) + + try { + // Validate format + const words = recoveryKey.trim().split(/\s+/) + if (words.length !== 24) { + throw new Error('Recovery key must be exactly 24 words') + } + + // Unlock with recovery key (uses rememberMe from auth context) + await unlockWithRecovery(recoveryKey) + + // Move to next step + setStep('new_passphrase') + } catch (err) { + setError(err instanceof Error ? err.message : 'Invalid recovery key') + } finally { + setLoading(false) + } + }, [recoveryKey, unlockWithRecovery]) + + // Step 2: Set new passphrase and generate new recovery key + const handleNewPassphraseSubmit = useCallback(async (passphrase: string) => { + setLoading(true) + setError(null) + + try { + // Change passphrase (this will also generate a new recovery key) + const result = await changePassphrase(passphrase) + setNewRecoveryKey(result) + setStep('new_recovery_display') + } catch (err) { + setError(err instanceof Error ? err.message : 'Failed to reset passphrase') + } finally { + setLoading(false) + } + }, [changePassphrase]) + + // Step 3: User has seen the new recovery key + const handleRecoveryKeyConfirmed = useCallback(() => { + setStep('complete') + }, []) + + // Step 4: Complete + const handleComplete = useCallback(() => { + onComplete?.() + }, [onComplete]) + + const renderStep = () => { + switch (step) { + case 'recovery_input': + return ( + <> + +
+ +
+ Reset Passphrase + + Enter your 24-word recovery key to reset your passphrase + +
+ +
+