diff --git a/src-tauri/src/acp/binary_cache.rs b/src-tauri/src/acp/binary_cache.rs index 471c0ec7..0b502881 100644 --- a/src-tauri/src/acp/binary_cache.rs +++ b/src-tauri/src/acp/binary_cache.rs @@ -293,17 +293,29 @@ async fn ensure_binary_with_progress( std::fs::create_dir_all(&extract_dir) .map_err(|e| AcpError::DownloadFailed(format!("failed to create extract dir: {e}")))?; - on_progress("Extracting archive..."); + let final_path = dir.join(&bin_name); if archive_url.ends_with(".tar.gz") || archive_url.ends_with(".tgz") { + on_progress("Extracting archive..."); extract_tar_gz(&archive_path, &extract_dir)?; } else if archive_url.ends_with(".tar.bz2") || archive_url.ends_with(".tbz2") { + on_progress("Extracting archive..."); extract_tar_bz2(&archive_path, &extract_dir)?; } else if archive_url.ends_with(".zip") { + on_progress("Extracting archive..."); extract_zip(&archive_path, &extract_dir)?; } else { - return Err(AcpError::DownloadFailed(format!( - "unsupported archive format: {archive_url}" - ))); + on_progress("Installing executable..."); + std::fs::copy(&archive_path, &final_path) + .map_err(|e| AcpError::DownloadFailed(format!("failed to copy binary: {e}")))?; + if !is_binary_file_compatible(&final_path) { + let _ = std::fs::remove_file(&final_path); + return Err(AcpError::DownloadFailed( + "downloaded binary format is invalid for current platform".into(), + )); + } + set_executable_permissions(&final_path)?; + on_progress("Binary installed successfully"); + return Ok(final_path); } // Find the binary in extracted files and move to final location. @@ -312,7 +324,6 @@ async fn ensure_binary_with_progress( AcpError::DownloadFailed(format!("binary '{bin_name}' not found in archive")) })?; - let final_path = dir.join(&bin_name); std::fs::copy(&extracted_bin, &final_path) .map_err(|e| AcpError::DownloadFailed(format!("failed to copy binary: {e}")))?; diff --git a/src-tauri/src/acp/registry.rs b/src-tauri/src/acp/registry.rs index de5cf792..0b6514cd 100644 --- a/src-tauri/src/acp/registry.rs +++ b/src-tauri/src/acp/registry.rs @@ -80,6 +80,7 @@ pub fn all_acp_agents() -> Vec { AgentType::OpenClaw, AgentType::OpenCode, AgentType::Cline, + AgentType::Grok, ] } @@ -91,6 +92,7 @@ pub fn registry_id_for(agent_type: AgentType) -> &'static str { AgentType::OpenClaw => "openclaw-acp", AgentType::OpenCode => "opencode", AgentType::Cline => "cline", + AgentType::Grok => "grok", } } @@ -102,6 +104,7 @@ pub fn from_registry_id(id: &str) -> Option { "openclaw-acp" => Some(AgentType::OpenClaw), "opencode" => Some(AgentType::OpenCode), "cline" => Some(AgentType::Cline), + "grok" => Some(AgentType::Grok), _ => None, } } @@ -238,5 +241,70 @@ pub fn get_agent_meta(agent_type: AgentType) -> AcpAgentMeta { ], }, }, + AgentType::Grok => AcpAgentMeta { + agent_type, + name: "Grok", + description: "xAI's coding agent CLI", + distribution: AgentDistribution::Binary { + version: "0.1.210", + cmd: "grok", + args: &["agent", "stdio"], + env: &[], + platforms: &[ + PlatformBinary { + platform: "darwin-aarch64", + url: "https://storage.googleapis.com/grok-build-public-artifacts/cli/grok-0.1.210-macos-aarch64", + }, + PlatformBinary { + platform: "darwin-x86_64", + url: "https://storage.googleapis.com/grok-build-public-artifacts/cli/grok-0.1.210-macos-x86_64", + }, + PlatformBinary { + platform: "linux-aarch64", + url: "https://storage.googleapis.com/grok-build-public-artifacts/cli/grok-0.1.210-linux-aarch64", + }, + PlatformBinary { + platform: "linux-x86_64", + url: "https://storage.googleapis.com/grok-build-public-artifacts/cli/grok-0.1.210-linux-x86_64", + }, + PlatformBinary { + platform: "windows-x86_64", + url: "https://storage.googleapis.com/grok-build-public-artifacts/cli/grok-0.1.210-windows-x86_64.exe", + }, + ], + }, + }, + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn grok_is_registered_as_binary_stdio_agent() { + assert!(all_acp_agents().contains(&AgentType::Grok)); + assert_eq!(registry_id_for(AgentType::Grok), "grok"); + assert_eq!(from_registry_id("grok"), Some(AgentType::Grok)); + + let meta = get_agent_meta(AgentType::Grok); + assert_eq!(meta.name, "Grok"); + assert_eq!(meta.registry_version(), Some("0.1.210")); + + match meta.distribution { + AgentDistribution::Binary { + cmd, + args, + env, + platforms, + .. + } => { + assert_eq!(cmd, "grok"); + assert_eq!(args, &["agent", "stdio"]); + assert!(env.is_empty()); + assert!(platforms.iter().any(|p| p.platform == "darwin-aarch64")); + } + other => panic!("expected binary distribution, got {other:?}"), + } } } diff --git a/src-tauri/src/commands/acp.rs b/src-tauri/src/commands/acp.rs index b72728df..4c55a3e0 100644 --- a/src-tauri/src/commands/acp.rs +++ b/src-tauri/src/commands/acp.rs @@ -1579,6 +1579,14 @@ pub(crate) fn skill_storage_spec(agent_type: AgentType) -> Option Some(SkillStorageSpec { + kind: SkillStorageKind::SkillDirectoryOnly, + global_dirs: vec![ + home_dir_or_default().join(".grok").join("skills"), + home_dir_or_default().join(".agents").join("skills"), + ], + project_rel_dirs: vec![".grok/skills", ".agents/skills"], + }), } } @@ -2069,7 +2077,7 @@ fn cascade_update_agent_config( serde_json::to_string(&patch).map_err(|e| AcpError::protocol(e.to_string()))?; persist_agent_local_config_json(agent_type, Some(&patch_str))?; } - AgentType::Cline => {} + AgentType::Cline | AgentType::Grok => {} } Ok(()) } @@ -2864,7 +2872,6 @@ pub(crate) async fn acp_download_agent_binary_core( meta.name )) })?; - emit_agent_install_event( emitter, &task_id, diff --git a/src-tauri/src/commands/conversations.rs b/src-tauri/src/commands/conversations.rs index 8397aff7..ed5afa65 100644 --- a/src-tauri/src/commands/conversations.rs +++ b/src-tauri/src/commands/conversations.rs @@ -13,6 +13,7 @@ use crate::parsers::claude::ClaudeParser; use crate::parsers::cline::ClineParser; use crate::parsers::codex::CodexParser; use crate::parsers::gemini::GeminiParser; +use crate::parsers::grok::GrokParser; use crate::parsers::openclaw::OpenClawParser; use crate::parsers::opencode::OpenCodeParser; use crate::parsers::{path_eq_for_matching, AgentParser, ParseError}; @@ -72,6 +73,7 @@ fn list_conversations_sync( (AgentType::Gemini, Box::new(GeminiParser::new())), (AgentType::OpenClaw, Box::new(OpenClawParser::new())), (AgentType::Cline, Box::new(ClineParser::new())), + (AgentType::Grok, Box::new(GrokParser::new())), ]; for (at, parser) in &parsers { @@ -173,6 +175,7 @@ pub async fn get_conversation( AgentType::Gemini => Box::new(GeminiParser::new()), AgentType::OpenClaw => Box::new(OpenClawParser::new()), AgentType::Cline => Box::new(ClineParser::new()), + AgentType::Grok => Box::new(GrokParser::new()), }; parser @@ -307,6 +310,7 @@ pub async fn get_folder_conversation_core( AgentType::Gemini => Box::new(GeminiParser::new()), AgentType::OpenClaw => Box::new(OpenClawParser::new()), AgentType::Cline => Box::new(ClineParser::new()), + AgentType::Grok => Box::new(GrokParser::new()), }; match parser.get_conversation(&eid) { Ok(d) => Ok((d.turns, d.session_stats, None)), @@ -318,7 +322,10 @@ pub async fn get_folder_conversation_core( // and started_at from the parsed conversation list. if matches!( at, - AgentType::OpenClaw | AgentType::Cline | AgentType::Gemini + AgentType::OpenClaw + | AgentType::Cline + | AgentType::Gemini + | AgentType::Grok ) { if let Ok(all) = parser.list_conversations() { // Filter by folder_path first, then find the closest diff --git a/src-tauri/src/commands/experts.rs b/src-tauri/src/commands/experts.rs index 06739522..0d2dbcf6 100644 --- a/src-tauri/src/commands/experts.rs +++ b/src-tauri/src/commands/experts.rs @@ -779,6 +779,7 @@ fn supported_agents() -> Vec { AgentType::Gemini, AgentType::OpenClaw, AgentType::Cline, + AgentType::Grok, ]; ALL.iter() .filter(|a| skill_storage_spec(**a).is_some()) diff --git a/src-tauri/src/commands/mcp.rs b/src-tauri/src/commands/mcp.rs index fb4cf807..26518cb5 100644 --- a/src-tauri/src/commands/mcp.rs +++ b/src-tauri/src/commands/mcp.rs @@ -54,6 +54,7 @@ pub enum McpAppType { OpenClaw, OpenCode, Cline, + Grok, } #[derive(Debug, Clone, Serialize)] @@ -367,6 +368,7 @@ pub async fn mcp_upsert_local_server( McpAppType::OpenClaw, McpAppType::OpenCode, McpAppType::Cline, + McpAppType::Grok, ]; for app in all_apps { @@ -421,6 +423,7 @@ pub async fn mcp_remove_server( McpAppType::OpenClaw, McpAppType::OpenCode, McpAppType::Cline, + McpAppType::Grok, ], }; @@ -637,36 +640,16 @@ fn cline_config_path() -> PathBuf { .join("cline_mcp_settings.json") } -fn read_json_file(path: &Path) -> Result { - if !path.exists() { - return Ok(json!({})); - } - - let raw = fs::read_to_string(path).map_err(AppCommandError::io)?; - serde_json::from_str::(&raw) - .map_err(|e| mcp_configuration_invalid(format!("invalid JSON at {}: {e}", path.display()))) -} - -fn write_json_file(path: &Path, value: &Value) -> Result<(), AppCommandError> { - if let Some(parent) = path.parent() { - fs::create_dir_all(parent).map_err(AppCommandError::io)?; - } - let serialized = serde_json::to_string_pretty(value).map_err(|e| { - mcp_configuration_invalid(format!( - "failed to serialize JSON for {}: {e}", - path.display() - )) - })?; - fs::write(path, format!("{serialized}\n")).map_err(AppCommandError::io) +fn grok_config_path() -> PathBuf { + home_dir_or_default().join(".grok").join("config.toml") } -fn read_codex_root_toml() -> Result { - let path = codex_config_toml_path(); +fn read_toml_file(path: &Path) -> Result { if !path.exists() { return Ok(toml::Value::Table(toml::map::Map::new())); } - let raw = fs::read_to_string(&path).map_err(AppCommandError::io)?; + let raw = fs::read_to_string(path).map_err(AppCommandError::io)?; let parsed = raw.parse::().map_err(|e| { mcp_configuration_invalid(format!("invalid TOML at {}: {e}", path.display())) })?; @@ -681,19 +664,49 @@ fn read_codex_root_toml() -> Result { Ok(parsed) } -fn write_codex_root_toml(root: &toml::Value) -> Result<(), AppCommandError> { - let path = codex_config_toml_path(); +fn write_toml_file(path: &Path, value: &toml::Value) -> Result<(), AppCommandError> { if let Some(parent) = path.parent() { fs::create_dir_all(parent).map_err(AppCommandError::io)?; } - let serialized = toml::to_string_pretty(root).map_err(|e| { + let serialized = toml::to_string_pretty(value).map_err(|e| { mcp_configuration_invalid(format!( "failed to serialize TOML for {}: {e}", path.display() )) })?; - fs::write(&path, format!("{serialized}\n")).map_err(AppCommandError::io) + fs::write(path, format!("{serialized}\n")).map_err(AppCommandError::io) +} + +fn read_json_file(path: &Path) -> Result { + if !path.exists() { + return Ok(json!({})); + } + + let raw = fs::read_to_string(path).map_err(AppCommandError::io)?; + serde_json::from_str::(&raw) + .map_err(|e| mcp_configuration_invalid(format!("invalid JSON at {}: {e}", path.display()))) +} + +fn write_json_file(path: &Path, value: &Value) -> Result<(), AppCommandError> { + if let Some(parent) = path.parent() { + fs::create_dir_all(parent).map_err(AppCommandError::io)?; + } + let serialized = serde_json::to_string_pretty(value).map_err(|e| { + mcp_configuration_invalid(format!( + "failed to serialize JSON for {}: {e}", + path.display() + )) + })?; + fs::write(path, format!("{serialized}\n")).map_err(AppCommandError::io) +} + +fn read_codex_root_toml() -> Result { + read_toml_file(&codex_config_toml_path()) +} + +fn write_codex_root_toml(root: &toml::Value) -> Result<(), AppCommandError> { + write_toml_file(&codex_config_toml_path(), root) } fn obj_as_string_map(value: Option<&Value>) -> Option> { @@ -1410,6 +1423,27 @@ fn canonical_to_codex_entry(spec: &Value) -> Result Result { + let mut entry = canonical_to_codex_entry(spec)?; + let Some(table) = entry.as_table_mut() else { + return Ok(entry); + }; + + table.insert("enabled".to_string(), toml::Value::Boolean(true)); + + if let Some(headers) = table.remove("http_headers") { + table.insert("headers".to_string(), headers); + } + + // `grok mcp add --command ...` writes stdio entries without an explicit + // `type`, while HTTP/SSE entries keep their transport type. + if table.get("type").and_then(toml::Value::as_str) == Some("stdio") { + table.remove("type"); + } + + Ok(entry) +} + fn read_claude_servers() -> Result, AppCommandError> { let path = claude_config_path(); let root = read_json_file(&path)?; @@ -2027,6 +2061,87 @@ fn remove_cline_server(id: &str) -> Result { Ok(removed) } +// --------------------------------------------------------------------------- +// Grok (~/.grok/config.toml → [mcp_servers.]) +// --------------------------------------------------------------------------- + +fn read_grok_servers() -> Result, AppCommandError> { + let root = read_toml_file(&grok_config_path())?; + let mut out = BTreeMap::new(); + + let Some(servers) = root.get("mcp_servers").and_then(toml::Value::as_table) else { + return Ok(out); + }; + + for (id, entry) in servers { + match codex_entry_to_canonical(id, entry) { + Ok(normalized) => { + out.insert(id.to_string(), normalized); + } + Err(err) => { + eprintln!("[MCP] skip invalid Grok MCP entry id={id}: {err}"); + } + } + } + + Ok(out) +} + +fn upsert_grok_server(id: &str, spec: &Value) -> Result<(), AppCommandError> { + let path = grok_config_path(); + let mut root = read_toml_file(&path)?; + let table = root.as_table_mut().ok_or_else(|| { + mcp_configuration_invalid(format!( + "invalid TOML root in {}: expected table", + path.display() + )) + })?; + + if !table + .get("mcp_servers") + .map(toml::Value::is_table) + .unwrap_or(false) + { + table.insert( + "mcp_servers".to_string(), + toml::Value::Table(toml::map::Map::new()), + ); + } + + let servers = table + .get_mut("mcp_servers") + .and_then(toml::Value::as_table_mut) + .ok_or_else(|| { + mcp_configuration_invalid(format!("invalid mcp_servers in {}", path.display())) + })?; + servers.insert(id.to_string(), canonical_to_grok_entry(spec)?); + + write_toml_file(&path, &root) +} + +fn remove_grok_server(id: &str) -> Result { + let path = grok_config_path(); + if !path.exists() { + return Ok(false); + } + + let mut root = read_toml_file(&path)?; + let Some(table) = root.as_table_mut() else { + return Ok(false); + }; + let removed = table + .get_mut("mcp_servers") + .and_then(toml::Value::as_table_mut) + .map(|servers| servers.remove(id).is_some()) + .unwrap_or(false); + + if removed { + write_toml_file(&path, &root)?; + } + + Ok(removed) +} + fn scan_local_servers() -> Result, AppCommandError> { let mut merged: BTreeMap)> = BTreeMap::new(); @@ -2072,6 +2187,13 @@ fn scan_local_servers() -> Result, AppCommandError> { entry.1.insert(McpAppType::Cline); } + for (id, spec) in read_grok_servers()? { + let entry = merged + .entry(id) + .or_insert_with(|| (spec.clone(), BTreeSet::new())); + entry.1.insert(McpAppType::Grok); + } + Ok(merged .into_iter() .map(|(id, (spec, apps))| LocalMcpServer { @@ -2095,6 +2217,7 @@ fn upsert_server_for_app(app: McpAppType, id: &str, spec: &Value) -> Result<(), McpAppType::Gemini => upsert_gemini_server(id, spec), McpAppType::OpenClaw => upsert_openclaw_server(id, spec), McpAppType::Cline => upsert_cline_server(id, spec), + McpAppType::Grok => upsert_grok_server(id, spec), } } @@ -2109,6 +2232,7 @@ pub fn read_servers_for_agent_type( AgentType::Gemini => read_gemini_servers(), AgentType::OpenClaw => read_openclaw_servers(), AgentType::Cline => read_cline_servers(), + AgentType::Grok => read_grok_servers(), } } @@ -2120,6 +2244,7 @@ fn remove_server_for_app(app: McpAppType, id: &str) -> Result remove_gemini_server(id), McpAppType::OpenClaw => remove_openclaw_server(id), McpAppType::Cline => remove_cline_server(id), + McpAppType::Grok => remove_grok_server(id), } } @@ -3982,6 +4107,48 @@ mod tests { } } + #[test] + fn grok_entry_matches_cli_toml_shape() { + let stdio = serde_json::json!({ + "type": "stdio", + "command": "node", + "args": ["server.js"], + "env": { "A": "B" }, + }); + let value = canonical_to_grok_entry(&stdio).expect("grok stdio entry"); + let table = value.as_table().expect("grok stdio table"); + assert_eq!(table.get("type"), None); + assert_eq!( + table.get("enabled").and_then(toml::Value::as_bool), + Some(true) + ); + assert_eq!( + table.get("command").and_then(toml::Value::as_str), + Some("node") + ); + + let remote = serde_json::json!({ + "type": "http", + "url": "https://mcp.example.com/mcp", + "headers": { "Authorization": "Bearer token" }, + }); + let value = canonical_to_grok_entry(&remote).expect("grok http entry"); + let table = value.as_table().expect("grok http table"); + assert_eq!( + table.get("type").and_then(toml::Value::as_str), + Some("http") + ); + assert_eq!( + table.get("enabled").and_then(toml::Value::as_bool), + Some(true) + ); + assert!(table.get("http_headers").is_none()); + assert!(table + .get("headers") + .and_then(toml::Value::as_table) + .is_some()); + } + #[test] fn transport_protocol_normalizes_aliases() { assert_eq!(transport_protocol("stdio"), Some("stdio".to_string())); diff --git a/src-tauri/src/db/service/agent_setting_service.rs b/src-tauri/src/db/service/agent_setting_service.rs index 7ee2fceb..b575ca4f 100644 --- a/src-tauri/src/db/service/agent_setting_service.rs +++ b/src-tauri/src/db/service/agent_setting_service.rs @@ -34,6 +34,7 @@ fn default_enabled(agent_type: AgentType) -> bool { | AgentType::OpenCode | AgentType::OpenClaw | AgentType::Cline + | AgentType::Grok ) } diff --git a/src-tauri/src/db/service/import_service.rs b/src-tauri/src/db/service/import_service.rs index c9cbc49f..497f6384 100644 --- a/src-tauri/src/db/service/import_service.rs +++ b/src-tauri/src/db/service/import_service.rs @@ -10,6 +10,7 @@ use crate::parsers::claude::ClaudeParser; use crate::parsers::cline::ClineParser; use crate::parsers::codex::CodexParser; use crate::parsers::gemini::GeminiParser; +use crate::parsers::grok::GrokParser; use crate::parsers::openclaw::OpenClawParser; use crate::parsers::opencode::OpenCodeParser; use crate::parsers::{path_eq_for_matching, AgentParser}; @@ -30,6 +31,7 @@ pub async fn import_local_conversations( (AgentType::Gemini, Box::new(GeminiParser::new())), (AgentType::OpenClaw, Box::new(OpenClawParser::new())), (AgentType::Cline, Box::new(ClineParser::new())), + (AgentType::Grok, Box::new(GrokParser::new())), ]; let mut matched = Vec::new(); diff --git a/src-tauri/src/models/agent.rs b/src-tauri/src/models/agent.rs index abe47136..b65387c7 100644 --- a/src-tauri/src/models/agent.rs +++ b/src-tauri/src/models/agent.rs @@ -10,6 +10,7 @@ pub enum AgentType { Gemini, OpenClaw, Cline, + Grok, } impl fmt::Display for AgentType { @@ -21,6 +22,7 @@ impl fmt::Display for AgentType { AgentType::Gemini => write!(f, "Gemini CLI"), AgentType::OpenClaw => write!(f, "OpenClaw"), AgentType::Cline => write!(f, "Cline"), + AgentType::Grok => write!(f, "Grok"), } } } diff --git a/src-tauri/src/parsers/grok.rs b/src-tauri/src/parsers/grok.rs new file mode 100644 index 00000000..18dfa6a3 --- /dev/null +++ b/src-tauri/src/parsers/grok.rs @@ -0,0 +1,324 @@ +use std::fs; +use std::path::{Path, PathBuf}; + +use chrono::{DateTime, Duration, Utc}; +use serde::Deserialize; +use walkdir::WalkDir; + +use crate::models::{ + AgentType, ContentBlock, ConversationDetail, ConversationSummary, MessageTurn, TurnRole, +}; +use crate::parsers::{compute_session_stats, folder_name_from_path, AgentParser, ParseError}; + +pub struct GrokParser { + base_dir: PathBuf, +} + +impl GrokParser { + pub fn new() -> Self { + Self { + base_dir: dirs::home_dir() + .unwrap_or_else(|| PathBuf::from(".")) + .join(".grok") + .join("sessions"), + } + } + + #[cfg(test)] + fn with_base_dir(base_dir: PathBuf) -> Self { + Self { base_dir } + } + + fn session_dir_by_id(&self, conversation_id: &str) -> Option { + let direct = self.base_dir.join(conversation_id); + if direct.join("chat_history.jsonl").exists() { + return Some(direct); + } + + WalkDir::new(&self.base_dir) + .min_depth(1) + .max_depth(3) + .into_iter() + .filter_map(Result::ok) + .find_map(|entry| { + if !entry.file_type().is_dir() { + return None; + } + if entry.file_name().to_string_lossy() != conversation_id { + return None; + } + let path = entry.into_path(); + path.join("chat_history.jsonl").exists().then_some(path) + }) + } + + fn parse_summary( + &self, + session_dir: &Path, + conversation_id: &str, + ) -> Result { + let path = session_dir.join("summary.json"); + let raw = fs::read_to_string(&path)?; + let summary: GrokSummaryFile = serde_json::from_str(&raw)?; + let cwd = summary.info.cwd.filter(|s| !s.trim().is_empty()); + let title = summary + .generated_title + .or(summary.session_summary) + .filter(|s| !s.trim().is_empty()); + let started_at = summary.created_at.unwrap_or_else(Utc::now); + let ended_at = summary.updated_at.or(summary.last_active_at); + + Ok(ConversationSummary { + id: summary + .info + .id + .unwrap_or_else(|| conversation_id.to_string()), + agent_type: AgentType::Grok, + folder_name: cwd.as_ref().map(|p| folder_name_from_path(p)), + folder_path: cwd, + title, + started_at, + ended_at, + message_count: summary + .num_chat_messages + .or(summary.num_messages) + .unwrap_or(0), + model: summary.current_model_id, + git_branch: summary.head_branch, + }) + } + + fn parse_conversation_detail( + &self, + session_dir: &Path, + conversation_id: &str, + ) -> Result { + let summary = self.parse_summary(session_dir, conversation_id)?; + let turns = + parse_chat_history(&session_dir.join("chat_history.jsonl"), summary.started_at)?; + let session_stats = compute_session_stats(&turns); + Ok(ConversationDetail { + summary, + turns, + session_stats, + }) + } +} + +impl AgentParser for GrokParser { + fn list_conversations(&self) -> Result, ParseError> { + if !self.base_dir.exists() { + return Ok(Vec::new()); + } + + let mut out = Vec::new(); + for entry in WalkDir::new(&self.base_dir) + .min_depth(2) + .max_depth(3) + .into_iter() + .filter_map(Result::ok) + { + if !entry.file_type().is_file() || entry.file_name() != "summary.json" { + continue; + } + let Some(session_dir) = entry.path().parent() else { + continue; + }; + if !session_dir.join("chat_history.jsonl").exists() { + continue; + } + let id = session_dir + .file_name() + .map(|s| s.to_string_lossy().to_string()) + .unwrap_or_default(); + if let Ok(summary) = self.parse_summary(session_dir, &id) { + out.push(summary); + } + } + out.sort_by(|a, b| b.started_at.cmp(&a.started_at)); + Ok(out) + } + + fn get_conversation(&self, conversation_id: &str) -> Result { + let session_dir = self + .session_dir_by_id(conversation_id) + .ok_or_else(|| ParseError::ConversationNotFound(conversation_id.to_string()))?; + self.parse_conversation_detail(&session_dir, conversation_id) + } +} + +#[derive(Debug, Deserialize)] +struct GrokSummaryFile { + info: GrokSummaryInfo, + #[serde(default)] + session_summary: Option, + #[serde(default)] + generated_title: Option, + #[serde(default)] + created_at: Option>, + #[serde(default)] + updated_at: Option>, + #[serde(default)] + last_active_at: Option>, + #[serde(default)] + num_messages: Option, + #[serde(default)] + num_chat_messages: Option, + #[serde(default)] + current_model_id: Option, + #[serde(default)] + head_branch: Option, +} + +#[derive(Debug, Deserialize)] +struct GrokSummaryInfo { + #[serde(default)] + id: Option, + #[serde(default)] + cwd: Option, +} + +#[derive(Debug, Deserialize)] +struct GrokChatRecord { + #[serde(rename = "type")] + kind: String, + #[serde(default)] + synthetic_reason: Option, + #[serde(default)] + content: Option, + #[serde(default)] + model_id: Option, +} + +fn parse_chat_history( + path: &Path, + started_at: DateTime, +) -> Result, ParseError> { + let raw = fs::read_to_string(path)?; + let mut turns = Vec::new(); + + for line in raw.lines() { + let trimmed = line.trim(); + if trimmed.is_empty() { + continue; + } + let record: GrokChatRecord = serde_json::from_str(trimmed)?; + let Some(text) = record_text(&record).map(strip_user_query_wrapper) else { + continue; + }; + if text.trim().is_empty() { + continue; + } + + let role = match record.kind.as_str() { + "user" if record.synthetic_reason.is_none() && !is_grok_context_message(&text) => { + TurnRole::User + } + "assistant" => TurnRole::Assistant, + _ => continue, + }; + let idx = i32::try_from(turns.len()).unwrap_or(i32::MAX); + turns.push(MessageTurn { + id: format!("turn-{}", turns.len()), + role, + blocks: vec![ContentBlock::Text { text }], + timestamp: started_at + Duration::seconds(i64::from(idx)), + usage: None, + duration_ms: None, + model: record.model_id, + completed_at: None, + }); + } + + Ok(turns) +} + +fn record_text(record: &GrokChatRecord) -> Option { + match record.content.as_ref()? { + serde_json::Value::String(s) => Some(s.clone()), + serde_json::Value::Array(items) => { + let parts = items + .iter() + .filter_map(|item| item.get("text").and_then(|v| v.as_str())) + .map(str::trim) + .filter(|s| !s.is_empty()) + .collect::>(); + (!parts.is_empty()).then(|| parts.join("\n\n")) + } + _ => None, + } +} + +fn strip_user_query_wrapper(text: String) -> String { + let trimmed = text.trim(); + if let Some(inner) = trimmed + .strip_prefix("") + .and_then(|s| s.strip_suffix("")) + { + return inner.trim().to_string(); + } + trimmed.to_string() +} + +fn is_grok_context_message(text: &str) -> bool { + let trimmed = text.trim_start(); + trimmed.starts_with("") + || trimmed.starts_with("") + || trimmed.starts_with("") +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::models::ContentBlock; + + #[test] + fn parses_chat_history_and_filters_context_messages() { + let temp = tempfile::tempdir().unwrap(); + let session_dir = temp.path().join("cwd").join("session-1"); + fs::create_dir_all(&session_dir).unwrap(); + fs::write( + session_dir.join("summary.json"), + r#"{ + "info": {"id": "session-1", "cwd": "/tmp/project"}, + "generated_title": "Smoke Test", + "created_at": "2026-05-15T19:44:35.810223Z", + "updated_at": "2026-05-15T19:44:43.430756Z", + "num_chat_messages": 7, + "current_model_id": "grok-build", + "head_branch": "main" + }"#, + ) + .unwrap(); + fs::write( + session_dir.join("chat_history.jsonl"), + [ + r#"{"type":"system","content":"ignore"}"#, + r#"{"type":"user","content":[{"type":"text","text":"\nignore\n"}]}"#, + r#"{"type":"user","synthetic_reason":"project_instructions","content":[{"type":"text","text":"ignore"}]}"#, + r#"{"type":"user","content":[{"type":"text","text":"\nHello Grok\n"}]}"#, + r#"{"type":"assistant","model_id":"grok-build","content":"Hello user"}"#, + ] + .join("\n"), + ) + .unwrap(); + + let detail = GrokParser::with_base_dir(temp.path().to_path_buf()) + .get_conversation("session-1") + .unwrap(); + + assert_eq!(detail.summary.id, "session-1"); + assert_eq!(detail.summary.folder_path.as_deref(), Some("/tmp/project")); + assert_eq!(detail.turns.len(), 2); + assert!(matches!(detail.turns[0].role, TurnRole::User)); + assert!(matches!(detail.turns[1].role, TurnRole::Assistant)); + match &detail.turns[0].blocks[0] { + ContentBlock::Text { text } => assert_eq!(text, "Hello Grok"), + other => panic!("expected text block, got {other:?}"), + } + match &detail.turns[1].blocks[0] { + ContentBlock::Text { text } => assert_eq!(text, "Hello user"), + other => panic!("expected text block, got {other:?}"), + } + } +} diff --git a/src-tauri/src/parsers/mod.rs b/src-tauri/src/parsers/mod.rs index 0cecf35f..fb7462db 100644 --- a/src-tauri/src/parsers/mod.rs +++ b/src-tauri/src/parsers/mod.rs @@ -2,6 +2,7 @@ pub mod claude; pub mod cline; pub mod codex; pub mod gemini; +pub mod grok; pub mod openclaw; pub mod opencode; diff --git a/src/components/agent-icon.tsx b/src/components/agent-icon.tsx index 762490cf..73487b5c 100644 --- a/src/components/agent-icon.tsx +++ b/src/components/agent-icon.tsx @@ -4,7 +4,7 @@ import type { AgentType } from "@/lib/types" import { AGENT_COLORS } from "@/lib/types" import { cn } from "@/lib/utils" -import { Cline, GeminiCLI, OpenClaw, OpenCode } from "@lobehub/icons" +import { Cline, GeminiCLI, Grok, OpenClaw, OpenCode } from "@lobehub/icons" interface AgentIconProps { agentType: AgentType @@ -91,6 +91,7 @@ const COLOR_ICONS: Partial> = { const MONO_ICONS: Partial> = { open_code: OpenCode, cline: Cline, + grok: Grok, } // Text-color versions for Mono icons diff --git a/src/components/settings/acp-agent-settings.tsx b/src/components/settings/acp-agent-settings.tsx index f8a50222..8bade08a 100644 --- a/src/components/settings/acp-agent-settings.tsx +++ b/src/components/settings/acp-agent-settings.tsx @@ -7176,7 +7176,7 @@ supports_websockets = true`} - ) : ( + ) : selectedAgent.agent_type === "grok" ? null : (