From 85cbee6f82235d60b53ff86a59dfd75b280767f3 Mon Sep 17 00:00:00 2001 From: Ads Dawson <104169244+GangGreenTemperTatum@users.noreply.github.com> Date: Fri, 31 Jan 2025 14:29:38 -0500 Subject: [PATCH 1/3] chore: do not fail serve on container pulls --- src/runtime/docker.rs | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/src/runtime/docker.rs b/src/runtime/docker.rs index acce9ca..9641de2 100644 --- a/src/runtime/docker.rs +++ b/src/runtime/docker.rs @@ -73,7 +73,7 @@ async fn run_command(command: &str, args: &[&str]) -> anyhow::Result<()> { } pub(crate) async fn pull_image(image: &str, platform: Option) -> anyhow::Result<()> { - run_command( + let result = run_command( "sh", &[ "-c", @@ -87,7 +87,13 @@ pub(crate) async fn pull_image(image: &str, platform: Option) -> anyhow: ), ], ) - .await + .await; + + if let Err(e) = result { + log::error!("Docker pull encountered an error: {}", e); + } + + Ok(()) } pub(crate) async fn build_image(name: &str, path: &str) -> anyhow::Result<()> { From 0e31db63d32dda4c42ec2817b56b20d71bc32014 Mon Sep 17 00:00:00 2001 From: Ads Dawson <104169244+GangGreenTemperTatum@users.noreply.github.com> Date: Sun, 2 Feb 2025 13:56:40 -0500 Subject: [PATCH 2/3] fix: use clone and consistent pull function build calls across robopages --- src/book/mod.rs | 61 ++++++++++++++++++++++++++++++++++--------- src/book/runtime.rs | 2 +- src/cli/serve.rs | 37 +++++++++++++++++++------- src/runtime/docker.rs | 13 +++------ 4 files changed, 80 insertions(+), 33 deletions(-) diff --git a/src/book/mod.rs b/src/book/mod.rs index e378cff..80d0439 100644 --- a/src/book/mod.rs +++ b/src/book/mod.rs @@ -1,4 +1,4 @@ -use std::collections::{BTreeMap, HashMap}; +use std::collections::{BTreeMap, HashMap, HashSet}; use camino::Utf8PathBuf; use glob::glob; @@ -27,7 +27,15 @@ macro_rules! eval_if_in_filter { }; } -#[derive(Debug, Serialize, Deserialize)] +#[derive(Debug, Clone, Serialize, Deserialize)] +pub enum ExecutionContext { + #[serde(rename = "cmdline")] + CommandLine(Vec), + #[serde(rename = "platforms")] + PlatformSpecific(BTreeMap>), +} + +#[derive(Debug, Clone, Serialize, Deserialize)] pub struct Parameter { #[serde(rename = "type")] pub param_type: String, @@ -42,7 +50,7 @@ fn default_required() -> bool { true } -#[derive(Debug, Serialize, Deserialize)] +#[derive(Debug, Clone, Serialize, Deserialize)] pub struct Container { #[serde(flatten)] pub source: ContainerSource, @@ -143,7 +151,7 @@ impl Container { // TODO: add optional parsers to reduce output tokens -#[derive(Debug, Serialize, Deserialize)] +#[derive(Debug, Clone, Serialize, Deserialize)] pub struct Function { pub description: String, pub parameters: BTreeMap, @@ -153,7 +161,7 @@ pub struct Function { pub execution: runtime::ExecutionContext, } -#[derive(Debug, Serialize, Deserialize)] +#[derive(Debug, Clone, Serialize, Deserialize)] pub struct Page { #[serde(skip_serializing_if = "String::is_empty")] #[serde(default = "String::new")] @@ -185,9 +193,10 @@ impl Page { } } -#[derive(Debug)] +#[derive(Debug, Clone)] pub struct Book { pub pages: BTreeMap, + pub failed_containers: HashSet, } impl Book { @@ -313,7 +322,14 @@ impl Book { pages.insert(page_path, page); } - Ok(Self { pages }) + Ok(Self { + pages, + failed_containers: HashSet::new(), + }) + } + + pub fn mark_failed_container(&mut self, func_name: String) { + self.failed_containers.insert(func_name); } pub fn size(&self) -> usize { @@ -335,20 +351,36 @@ impl Book { Err(anyhow::anyhow!("function {} not found", name)) } - pub fn as_tools<'a, T>(&'a self, filter: Option) -> Vec + // Modify as_tools to filter out failed functions + pub fn as_tools(&self, filter: Option) -> Vec where - Vec: std::convert::From<&'a Page>, + for<'a> Vec: From<&'a Page>, { let mut tools = Vec::new(); - for (page_path, page) in &self.pages { + // Create filtered page in its own scope with proper ownership + let filtered_page = { + // Filter out failed functions + let filtered_functions = page.functions + .iter() + .filter(|(name, _)| !self.failed_containers.contains(*name)) + .map(|(name, func)| (name.clone(), func.clone())) + .collect(); + + Page { + name: page.name.clone(), + description: page.description.clone(), + functions: filtered_functions, + categories: page.categories.clone(), + } + }; + eval_if_in_filter!( page_path, filter, - tools.extend(<&Page as Into>>::into(page)) + tools.extend(Vec::::from(&filtered_page)) ); } - tools } } @@ -381,7 +413,10 @@ mod tests { }, ); pages.insert(Utf8PathBuf::from("test_page"), page); - Book { pages } + Book { + pages, + failed_containers: HashSet::new(), + } } #[test] diff --git a/src/book/runtime.rs b/src/book/runtime.rs index 1f9caeb..7f9e1a5 100644 --- a/src/book/runtime.rs +++ b/src/book/runtime.rs @@ -114,7 +114,7 @@ impl std::fmt::Display for ExecutionFlavor { } } -#[derive(Debug, Serialize, Deserialize)] +#[derive(Debug, Clone, Serialize, Deserialize)] pub enum ExecutionContext { #[serde(rename = "cmdline")] CommandLine(Vec), diff --git a/src/cli/serve.rs b/src/cli/serve.rs index ea10723..a2de9d2 100644 --- a/src/cli/serve.rs +++ b/src/cli/serve.rs @@ -1,5 +1,7 @@ use std::collections::HashMap; +use std::collections::HashSet; use std::sync::Arc; +use tokio::sync::Mutex; use actix_cors::Cors; use actix_web::web; @@ -20,7 +22,7 @@ use super::ServeArgs; struct AppState { max_running_tasks: usize, - book: Arc, + book: Arc>, ssh: Option, } @@ -38,13 +40,13 @@ async fn serve_pages_impl( match flavor { Flavor::Nerve => { - Ok(HttpResponse::Ok().json(state.book.as_tools::(filter))) + Ok(HttpResponse::Ok().json(state.book.lock().await.as_tools::(filter))) } Flavor::Rigging => { - Ok(HttpResponse::Ok().json(state.book.as_tools::(filter))) + Ok(HttpResponse::Ok().json(state.book.lock().await.as_tools::(filter))) } // default to openai - _ => Ok(HttpResponse::Ok().json(state.book.as_tools::(filter))), + _ => Ok(HttpResponse::Ok().json(state.book.lock().await.as_tools::(filter))), } } @@ -67,10 +69,11 @@ async fn process_calls( state: web::Data>, calls: web::Json>, ) -> actix_web::Result { + let book = state.book.lock().await; match runtime::execute( state.ssh.clone(), false, - state.book.clone(), + Arc::new(book.clone()), calls.0, state.max_running_tasks, ) @@ -98,16 +101,30 @@ pub(crate) async fn serve(args: ServeArgs) -> anyhow::Result<()> { None }; - let book = Arc::new(Book::from_path(args.path, args.filter)?); + let book = Book::from_path(args.path, args.filter)?; + let book = Arc::new(Mutex::new(book)); + if !args.lazy { - for page in book.pages.values() { - for (func_name, func) in page.functions.iter() { + let mut book_guard = book.lock().await; + let mut failed_containers = HashSet::new(); + + // First collect all failures + for (_, page) in &book_guard.pages { + for (func_name, func) in &page.functions { if let Some(container) = &func.container { log::info!("pre building container for function {} ...", func_name); - container.resolve().await?; + if let Err(e) = container.resolve().await { + log::error!("Failed to resolve container for function {}: {}", func_name, e); + failed_containers.insert(func_name.clone()); + } } } } + + // Then update the failed containers + for func_name in failed_containers { + book_guard.mark_failed_container(func_name); + } } let max_running_tasks = if args.workers == 0 { @@ -118,7 +135,7 @@ pub(crate) async fn serve(args: ServeArgs) -> anyhow::Result<()> { log::info!( "serving {} pages on http://{} with {max_running_tasks} max running tasks", - book.size(), + book.lock().await.size(), &args.address, ); diff --git a/src/runtime/docker.rs b/src/runtime/docker.rs index 9641de2..94681b5 100644 --- a/src/runtime/docker.rs +++ b/src/runtime/docker.rs @@ -10,7 +10,7 @@ use tokio::{ task, }; -#[derive(Debug, Serialize, Deserialize)] +#[derive(Debug, Serialize, Deserialize, Clone)] pub enum ContainerSource { #[serde(rename = "image")] Image(String), @@ -73,7 +73,7 @@ async fn run_command(command: &str, args: &[&str]) -> anyhow::Result<()> { } pub(crate) async fn pull_image(image: &str, platform: Option) -> anyhow::Result<()> { - let result = run_command( + run_command( "sh", &[ "-c", @@ -87,13 +87,8 @@ pub(crate) async fn pull_image(image: &str, platform: Option) -> anyhow: ), ], ) - .await; - - if let Err(e) = result { - log::error!("Docker pull encountered an error: {}", e); - } - - Ok(()) + .await + .map_err(|e| anyhow::anyhow!("Docker pull encountered an error: {}: {}", image, e)) } pub(crate) async fn build_image(name: &str, path: &str) -> anyhow::Result<()> { From 1b38d07f186e9e12320af5facc200deaea1c79f0 Mon Sep 17 00:00:00 2001 From: Ads Dawson <104169244+GangGreenTemperTatum@users.noreply.github.com> Date: Sun, 2 Feb 2025 14:09:48 -0500 Subject: [PATCH 3/3] fix: try fix ci E0063 --- src/runtime/mod.rs | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/src/runtime/mod.rs b/src/runtime/mod.rs index a99c599..28871c9 100644 --- a/src/runtime/mod.rs +++ b/src/runtime/mod.rs @@ -217,6 +217,14 @@ mod tests { use super::*; use std::collections::BTreeMap; + use std::collections::HashSet; + + fn create_test_book() -> Arc { + Arc::new(Book { + pages: BTreeMap::new(), + failed_containers: HashSet::new(), + }) + } #[tokio::test] async fn test_execute_call() { @@ -257,6 +265,7 @@ mod tests { map.insert(camino::Utf8PathBuf::from("test_page"), mock_page); map }, + failed_containers: HashSet::new(), }); let result = execute_call(None, false, 10, book, call).await.unwrap(); @@ -327,6 +336,7 @@ mod tests { map.insert(camino::Utf8PathBuf::from("test_page"), mock_page); map }, + failed_containers: HashSet::new(), }); let results = execute(None, false, book, calls, 10).await.unwrap(); @@ -352,6 +362,7 @@ mod tests { ); map }, + failed_containers: HashSet::new(), }); let calls = vec![openai::Call { @@ -397,6 +408,7 @@ mod tests { ); map }, + failed_containers: HashSet::new(), }); let calls = vec![openai::Call {