diff --git a/Cargo.lock b/Cargo.lock index 9e70751..15df36c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -40,7 +40,7 @@ checksum = "e9d4ee0d472d1cd2e28c97dfa124b3d8d992e10eb0a035f33f5d12e3a177ba3b" [[package]] name = "amplifier-core" -version = "1.1.0" +version = "1.1.1" dependencies = [ "chrono", "log", @@ -63,7 +63,7 @@ dependencies = [ [[package]] name = "amplifier-core-node" -version = "1.0.10" +version = "1.1.1" dependencies = [ "amplifier-core", "log", @@ -76,7 +76,7 @@ dependencies = [ [[package]] name = "amplifier-core-py" -version = "1.1.0" +version = "1.1.1" dependencies = [ "amplifier-core", "log", diff --git a/bindings/node/Cargo.toml b/bindings/node/Cargo.toml index e4c1da4..a04ecd8 100644 --- a/bindings/node/Cargo.toml +++ b/bindings/node/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "amplifier-core-node" -version = "1.0.10" +version = "1.1.1" edition = "2021" description = "Napi-RS bridge for amplifier-core Rust kernel" license = "MIT" diff --git a/bindings/node/__tests__/coordinator.test.ts b/bindings/node/__tests__/coordinator.test.ts index 747b07d..fff41e4 100644 --- a/bindings/node/__tests__/coordinator.test.ts +++ b/bindings/node/__tests__/coordinator.test.ts @@ -30,21 +30,25 @@ describe('JsCoordinator', () => { expect(result).toBeNull() }) - // createHookRegistry() creates a NEW detached instance each call — this is the - // known limitation documented by the rename from `.hooks` getter. Use a - // shared JsHookRegistry if you need persistent hook registration. - it('createHookRegistry() returns a JsHookRegistry with listHandlers', () => { + // hooks is now a getter that returns the coordinator's real Arc. + it('hooks getter returns a JsHookRegistry with listHandlers', () => { const coord = new JsCoordinator(emptyConfig) - const hooks = coord.createHookRegistry() + const hooks = coord.hooks expect(hooks).toBeDefined() expect(typeof hooks.listHandlers).toBe('function') }) - it('createHookRegistry creates a new instance each call (pins detached behavior)', () => { + it('hooks getter shares state — handlers registered on one call are visible on next', () => { const coord = new JsCoordinator(emptyConfig) - const h1 = coord.createHookRegistry() - const h2 = coord.createHookRegistry() - expect(h1).not.toBe(h2) + const h1 = coord.hooks + // Register a handler on h1 + h1.register('test:event', (_event: string, _data: string) => { + return JSON.stringify({ action: 'Continue' }) + }, 0, 'test-handler') + // h2 should see the same handler + const h2 = coord.hooks + const handlers = h2.listHandlers() + expect(handlers['test:event']).toContain('test-handler') }) it('provides access to cancellation subsystem (coord.cancellation.isCancelled === false)', () => { diff --git a/bindings/node/__tests__/integration.test.ts b/bindings/node/__tests__/integration.test.ts index fb75a83..9b51b78 100644 --- a/bindings/node/__tests__/integration.test.ts +++ b/bindings/node/__tests__/integration.test.ts @@ -18,8 +18,8 @@ describe('Full session lifecycle', () => { expect(session.sessionId).toBeTruthy() expect(session.isInitialized).toBe(false) - // Access coordinator (createCoordinator returns a new instance from cached config) - const coord = session.createCoordinator() + // Access coordinator (getter returns session's real Arc) + const coord = session.coordinator expect(coord).toBeDefined() // Register capability and verify roundtrip diff --git a/bindings/node/__tests__/session.test.ts b/bindings/node/__tests__/session.test.ts index e43e052..f5b1a83 100644 --- a/bindings/node/__tests__/session.test.ts +++ b/bindings/node/__tests__/session.test.ts @@ -34,22 +34,26 @@ describe('JsAmplifierSession', () => { expect(session.status).toBe('running') }) - // createCoordinator() creates a NEW Coordinator from cached config each call — - // this is the known limitation documented by the rename from `.coordinator` getter. - it('createCoordinator() returns a coordinator built from session config', () => { + // coordinator is now a getter that returns the session's real Arc. + it('coordinator getter returns the session coordinator', () => { const session = new JsAmplifierSession(validConfig) - const coord = session.createCoordinator() + const coord = session.coordinator expect(coord).toBeDefined() // Verify coordinator was constructed from the session's config, not a default const coordConfig = JSON.parse(coord.config) expect(coordConfig).toHaveProperty('session') }) - it('createCoordinator creates a new instance each call (pins detached behavior)', () => { + it('coordinator getter returns the same instance on repeated calls', () => { const session = new JsAmplifierSession(validConfig) - const c1 = session.createCoordinator() - const c2 = session.createCoordinator() - expect(c1).not.toBe(c2) + const c1 = session.coordinator + const c2 = session.coordinator + // Both should wrap the same underlying Arc. + // We verify by registering a capability on one and reading it from the other. + c1.registerCapability('test-cap', JSON.stringify(true)) + const result = c2.getCapability('test-cap') + expect(result).not.toBeNull() + expect(JSON.parse(result as string)).toBe(true) }) it('setInitialized marks session as initialized', () => { diff --git a/bindings/node/index.d.ts b/bindings/node/index.d.ts index ba18601..b06fa5d 100644 --- a/bindings/node/index.d.ts +++ b/bindings/node/index.d.ts @@ -162,22 +162,13 @@ export declare class JsCoordinator { registerCapability(name: string, valueJson: string): void getCapability(name: string): string | null /** - * Creates a new **detached** (empty) JsHookRegistry. + * The coordinator's hook registry — shared via `Arc`, not copied. * - * ⚠吅 **Each call returns a brand-new, empty registry** — hooks registered - * on one instance are invisible to the next. This is a known limitation: - * `Coordinator` owns its `HookRegistry` by value, not behind `Arc`, so - * the binding cannot share state across calls. - * - * The method name (`createHookRegistry`) intentionally signals "creates new - * instance" — a getter property would imply referential stability in JS. - * - * **Workaround:** create a `JsHookRegistry` directly and hold a reference. - * - * Future TODO #1: restructure the kernel to hold `Arc` inside - * `Coordinator` so this method can share the same registry instance. + * Returns a `JsHookRegistry` wrapping the coordinator's real + * `Arc` obtained via `hooks_shared()`. Hooks registered + * on the returned instance are visible to the Coordinator and vice versa. */ - createHookRegistry(): JsHookRegistry + get hooks(): JsHookRegistry get cancellation(): JsCancellationToken get config(): string resetTurn(): void @@ -190,9 +181,9 @@ export declare class JsCoordinator { * Lifecycle: `new AmplifierSession(config) → initialize() → execute(prompt) → cleanup()`. * Wires together Coordinator, HookRegistry, and CancellationToken. * - * Known limitation: `coordinator` getter creates a separate Coordinator instance - * because the kernel Session owns its Coordinator by value, not behind Arc. - * Sharing requires restructuring the Rust kernel — tracked as Future TODO #1. + * The `coordinator` getter returns the session's real `Arc`, + * and `coordinator.hooks` returns the real `Arc` — both + * shared, not copied. */ export declare class JsAmplifierSession { constructor(configJson: string, sessionId?: string | undefined | null, parentId?: string | undefined | null) @@ -212,23 +203,15 @@ export declare class JsAmplifierSession { */ get status(): string /** - * Creates a new **fresh** JsCoordinator from this session's cached config. - * - * ⚠吅 **Each call allocates a new Coordinator** — capabilities registered on - * one instance are invisible to the next. This is a known limitation: - * `Session` owns its `Coordinator` by value, not behind `Arc`, so the - * binding cannot expose the session's live coordinator. - * - * The method name (`createCoordinator`) intentionally signals "creates new - * instance" — a getter property would imply referential stability in JS. + * The session's coordinator — shared via `Arc`, not copied. * - * **Workaround:** call `createCoordinator()` once, hold the returned instance, - * and register capabilities on it before passing it to other APIs. + * Returns a `JsCoordinator` wrapping the session's real `Arc`. + * Repeated calls return the same underlying coordinator instance. * - * Future TODO #1: restructure the kernel to hold `Arc` inside - * `Session` so this method can return a handle to the session's actual coordinator. + * Takes `&mut self` because the first call caches the coordinator internally. + * This is safe because NAPI JS objects are single-threaded — no concurrent access. */ - createCoordinator(): JsCoordinator + get coordinator(): JsCoordinator setInitialized(): void cleanup(): Promise } diff --git a/bindings/node/src/lib.rs b/bindings/node/src/lib.rs index 8251161..5ae23e0 100644 --- a/bindings/node/src/lib.rs +++ b/bindings/node/src/lib.rs @@ -385,18 +385,6 @@ impl JsHookRegistry { } } - /// Creates a new **detached** (empty) registry. - /// - /// Unlike `JsCancellationToken::from_inner`, HookRegistry cannot be cheaply - /// cloned or wrapped from a reference, so this always creates an empty - /// registry. When Coordinator manages ownership, this should accept - /// `Arc` to share state. - pub fn new_detached() -> Self { - Self { - inner: Arc::new(amplifier_core::HookRegistry::new()), - } - } - /// Register a hook handler for the given event name. /// /// ## Handler signature @@ -519,28 +507,16 @@ impl JsCoordinator { } } - /// Creates a new **detached** (empty) JsHookRegistry. - /// - /// ⚠️ **Each call returns a brand-new, empty registry** — hooks registered - /// on one instance are invisible to the next. This is a known limitation: - /// `Coordinator` owns its `HookRegistry` by value, not behind `Arc`, so - /// the binding cannot share state across calls. - /// - /// The method name (`createHookRegistry`) intentionally signals "creates new - /// instance" — a getter property would imply referential stability in JS. + /// The coordinator's hook registry — shared via `Arc`, not copied. /// - /// **Workaround:** create a `JsHookRegistry` directly and hold a reference. - /// - /// Future TODO #1: restructure the kernel to hold `Arc` inside - /// `Coordinator` so this method can share the same registry instance. - #[napi] - pub fn create_hook_registry(&self) -> JsHookRegistry { - log::warn!( - "JsCoordinator::createHookRegistry() — returns a new detached HookRegistry; \ - hooks registered on one call are NOT visible via the Coordinator's internal \ - registry. Hold the returned instance directly. (Future TODO #1)" - ); - JsHookRegistry::new_detached() + /// Returns a `JsHookRegistry` wrapping the coordinator's real + /// `Arc` obtained via `hooks_shared()`. Hooks registered + /// on the returned instance are visible to the Coordinator and vice versa. + #[napi(getter)] + pub fn hooks(&self) -> JsHookRegistry { + JsHookRegistry { + inner: self.inner.hooks_shared(), + } } #[napi(getter)] @@ -579,15 +555,15 @@ impl JsCoordinator { /// Lifecycle: `new AmplifierSession(config) → initialize() → execute(prompt) → cleanup()`. /// Wires together Coordinator, HookRegistry, and CancellationToken. /// -/// Known limitation: `coordinator` getter creates a separate Coordinator instance -/// because the kernel Session owns its Coordinator by value, not behind Arc. -/// Sharing requires restructuring the Rust kernel — tracked as Future TODO #1. +/// The `coordinator` getter returns the session's real `Arc`, +/// and `coordinator.hooks` returns the real `Arc` — both +/// shared, not copied. #[napi] pub struct JsAmplifierSession { inner: Arc>, cached_session_id: String, cached_parent_id: Option, - cached_config: HashMap, + cached_coordinator: Option, } #[napi] @@ -601,12 +577,9 @@ impl JsAmplifierSession { let value: serde_json::Value = serde_json::from_str(&config_json) .map_err(|e| Error::from_reason(format!("Invalid config JSON: {e}")))?; - let config = amplifier_core::SessionConfig::from_value(value.clone()) + let config = amplifier_core::SessionConfig::from_value(value) .map_err(|e| Error::from_reason(e.to_string()))?; - let cached_config: HashMap = serde_json::from_value(value) - .map_err(|e| Error::from_reason(format!("invalid JSON: {e}")))?; - let session = amplifier_core::Session::new(config, session_id.clone(), parent_id.clone()); let cached_session_id = session.session_id().to_string(); @@ -614,7 +587,7 @@ impl JsAmplifierSession { inner: Arc::new(Mutex::new(session)), cached_session_id, cached_parent_id: parent_id, - cached_config, + cached_coordinator: None, }) } @@ -657,42 +630,45 @@ impl JsAmplifierSession { } } - /// Creates a new **fresh** JsCoordinator from this session's cached config. - /// - /// ⚠️ **Each call allocates a new Coordinator** — capabilities registered on - /// one instance are invisible to the next. This is a known limitation: - /// `Session` owns its `Coordinator` by value, not behind `Arc`, so the - /// binding cannot expose the session's live coordinator. - /// - /// The method name (`createCoordinator`) intentionally signals "creates new - /// instance" — a getter property would imply referential stability in JS. + /// The session's coordinator — shared via `Arc`, not copied. /// - /// **Workaround:** call `createCoordinator()` once, hold the returned instance, - /// and register capabilities on it before passing it to other APIs. + /// Returns a `JsCoordinator` wrapping the session's real `Arc`. + /// Repeated calls return the same underlying coordinator instance. /// - /// Future TODO #1: restructure the kernel to hold `Arc` inside - /// `Session` so this method can return a handle to the session's actual coordinator. - #[napi] - pub fn create_coordinator(&self) -> JsCoordinator { - log::warn!( - "JsAmplifierSession::createCoordinator() — returns a new Coordinator built from \ - cached config; capabilities registered on one call are NOT visible on the next. \ - Hold the returned instance directly. (Future TODO #1)" - ); - JsCoordinator { - inner: Arc::new(amplifier_core::Coordinator::new(self.cached_config.clone())), + /// Takes `&mut self` because the first call caches the coordinator internally. + /// This is safe because NAPI JS objects are single-threaded — no concurrent access. + #[napi(getter)] + pub fn coordinator(&mut self) -> JsCoordinator { + if let Some(ref cached) = self.cached_coordinator { + return JsCoordinator { + inner: Arc::clone(&cached.inner), + }; } + // First call: extract the Arc from the session. + // try_lock is safe here — the Mutex is only held during async execute/cleanup. + let coord_arc = match self.inner.try_lock() { + Ok(session) => session.coordinator_shared(), + Err(_) => { + log::warn!( + "JsAmplifierSession::coordinator() — session lock held, \ + creating coordinator from default config as fallback" + ); + Arc::new(amplifier_core::Coordinator::new(Default::default())) + } + }; + let js_coord = JsCoordinator { inner: coord_arc }; + self.cached_coordinator = Some(JsCoordinator { + inner: Arc::clone(&js_coord.inner), + }); + js_coord } #[napi] pub fn set_initialized(&self) { match self.inner.try_lock() { Ok(session) => session.set_initialized(), - // State mutation failed — unlike read-only getters, this warrants a warning. - // Lock contention only occurs during async cleanup(), so this is unlikely - // in practice, but callers should know the mutation didn't happen. - Err(_) => eprintln!( - "amplifier-core-node: set_initialized() skipped — session lock held (cleanup in progress?)" + Err(_) => log::warn!( + "JsAmplifierSession::set_initialized() skipped — session lock held (cleanup in progress?)" ), } } diff --git a/bindings/python/src/lib.rs b/bindings/python/src/lib.rs index 49051d7..5052235 100644 --- a/bindings/python/src/lib.rs +++ b/bindings/python/src/lib.rs @@ -23,7 +23,7 @@ use pyo3::prelude::*; use pyo3::types::{PyDict, PyList}; use serde_json::Value; -use amplifier_core::errors::HookError; +use amplifier_core::errors::{AmplifierError, HookError, SessionError}; use amplifier_core::models::{HookAction, HookResult}; use amplifier_core::traits::HookHandler; @@ -74,12 +74,19 @@ impl HookHandler for PyHookHandlerBridge { data: Value, ) -> Pin> + Send + '_>> { let event = event.to_string(); - // Clone the Py reference inside the GIL to safely move into async block - let callable = Python::try_attach(|py| Ok::<_, PyErr>(self.callable.clone_ref(py))) - .unwrap() - .unwrap(); Box::pin(async move { + // Clone the Py reference inside the GIL to safely use in this async block + let callable = Python::try_attach(|py| Ok::<_, PyErr>(self.callable.clone_ref(py))) + .ok_or_else(|| HookError::HandlerFailed { + message: "Failed to attach to Python runtime".to_string(), + handler_name: None, + })? + .map_err(|e| HookError::HandlerFailed { + message: format!("Failed to clone Python callable reference: {e}"), + handler_name: None, + })?; + // Step 1: Call the Python handler (inside GIL) — returns either a // sync result or a coroutine object, plus whether it's a coroutine. let (is_coro, py_result_or_coro) = @@ -184,6 +191,197 @@ impl HookHandler for PyHookHandlerBridge { } } +// --------------------------------------------------------------------------- +// PyApprovalProviderBridge — wraps a Python ApprovalSystem as a Rust ApprovalProvider +// --------------------------------------------------------------------------- + +/// Bridges a Python `ApprovalSystem` object into the Rust [`ApprovalProvider`] trait. +/// +/// The Python `ApprovalSystem` protocol has: +/// `request_approval(prompt, options, timeout, default) -> str` +/// +/// The Rust `ApprovalProvider` trait has: +/// `request_approval(ApprovalRequest) -> Result` +/// +/// This bridge adapts between the two interfaces. +struct PyApprovalProviderBridge { + py_obj: Py, +} + +// Safety: Py is Send+Sync (PyO3 handles GIL acquisition). +unsafe impl Send for PyApprovalProviderBridge {} +unsafe impl Sync for PyApprovalProviderBridge {} + +impl amplifier_core::traits::ApprovalProvider for PyApprovalProviderBridge { + fn request_approval( + &self, + request: amplifier_core::models::ApprovalRequest, + ) -> Pin< + Box< + dyn Future< + Output = Result< + amplifier_core::models::ApprovalResponse, + amplifier_core::errors::AmplifierError, + >, + > + Send + + '_, + >, + > { + Box::pin(async move { + // Clone the Py reference inside the GIL to safely use in this async block + let py_obj = Python::try_attach(|py| Ok::<_, PyErr>(self.py_obj.clone_ref(py))) + .ok_or_else(|| { + AmplifierError::Session(SessionError::Other { + message: "Failed to attach to Python runtime".to_string(), + }) + })? + .map_err(|e| { + AmplifierError::Session(SessionError::Other { + message: format!("Failed to clone Python object reference: {e}"), + }) + })?; + + // Step 1: Build Python call args from the ApprovalRequest + let (is_coro, py_result_or_coro) = + Python::try_attach(|py| -> PyResult<(bool, Py)> { + // Adapt Rust ApprovalRequest to Python ApprovalSystem.request_approval() args: + // prompt: str, options: list[str], timeout: float, default: str + let prompt = format!("{}: {}", request.tool_name, request.action); + let options = vec!["approve", "deny"]; + let timeout = request.timeout.unwrap_or(300.0); + let default = "deny"; + + let call_result = py_obj.call_method( + py, + "request_approval", + (prompt, options, timeout, default), + None, + )?; + let bound = call_result.bind(py); + + let inspect = py.import("inspect")?; + let is_coro: bool = inspect.call_method1("iscoroutine", (bound,))?.extract()?; + + Ok((is_coro, call_result)) + }) + .ok_or_else(|| { + AmplifierError::Session(SessionError::Other { + message: "Failed to attach to Python runtime".to_string(), + }) + })? + .map_err(|e| { + AmplifierError::Session(SessionError::Other { + message: format!("Python approval call error: {e}"), + }) + })?; + + // Step 2: Await if coroutine + let py_result: Py = if is_coro { + let future = Python::try_attach(|py| { + pyo3_async_runtimes::tokio::into_future(py_result_or_coro.into_bound(py)) + }) + .ok_or_else(|| { + AmplifierError::Session(SessionError::Other { + message: "Failed to attach for coroutine conversion".to_string(), + }) + })? + .map_err(|e| { + AmplifierError::Session(SessionError::Other { + message: format!("Coroutine conversion error: {e}"), + }) + })?; + future.await.map_err(|e| { + AmplifierError::Session(SessionError::Other { + message: format!("Async approval error: {e}"), + }) + })? + } else { + py_result_or_coro + }; + + // Step 3: Parse result string → ApprovalResponse + let approved = Python::try_attach(|py| -> PyResult { + let result_str: String = py_result.extract(py)?; + Ok(result_str.to_lowercase().contains("approve")) + }) + .ok_or_else(|| { + AmplifierError::Session(SessionError::Other { + message: "Failed to attach to parse approval result".to_string(), + }) + })? + .map_err(|e| { + AmplifierError::Session(SessionError::Other { + message: format!("Failed to parse approval result: {e}"), + }) + })?; + + Ok(amplifier_core::models::ApprovalResponse { + approved, + reason: None, + remember: false, + }) + }) + } +} + +// --------------------------------------------------------------------------- +// PyDisplayServiceBridge — wraps a Python DisplaySystem as a Rust DisplayService +// --------------------------------------------------------------------------- + +/// Bridges a Python `DisplaySystem` object into the Rust [`DisplayService`] trait. +/// +/// The Python `DisplaySystem` protocol has: +/// `show_message(message, level, source)` +/// +/// The Rust `DisplayService` trait has: +/// `show_message(&self, message: &str, level: &str, source: &str) -> Pin>` +/// +/// Display is fire-and-forget — errors are logged but do not propagate. +struct PyDisplayServiceBridge { + py_obj: Py, +} + +// Safety: Py is Send+Sync (PyO3 handles GIL acquisition). +unsafe impl Send for PyDisplayServiceBridge {} +unsafe impl Sync for PyDisplayServiceBridge {} + +impl amplifier_core::traits::DisplayService for PyDisplayServiceBridge { + fn show_message( + &self, + message: &str, + level: &str, + source: &str, + ) -> Pin> + Send + '_>> { + let message = message.to_string(); + let level = level.to_string(); + let source = source.to_string(); + let py_obj = Python::try_attach(|py| self.py_obj.clone_ref(py)); + + Box::pin(async move { + let py_obj = py_obj.ok_or_else(|| { + AmplifierError::Session(SessionError::Other { + message: "Failed to attach to Python runtime for display (clone)".to_string(), + }) + })?; + + Python::try_attach(|py| -> PyResult<()> { + py_obj.call_method(py, "show_message", (&message, &level, &source), None)?; + Ok(()) + }) + .ok_or_else(|| { + AmplifierError::Session(SessionError::Other { + message: "Failed to attach to Python runtime for display (call)".to_string(), + }) + })? + .map_err(|e| { + AmplifierError::Session(SessionError::Other { + message: format!("Python display call error: {e}"), + }) + }) + }) + } +} + // --------------------------------------------------------------------------- // PySession — wraps amplifier_core::Session (Milestone 3) // --------------------------------------------------------------------------- @@ -2033,6 +2231,26 @@ impl PyCoordinator { /// Set the approval system. #[setter] fn set_approval_system(&mut self, value: Py) { + // Set or clear the Rust-side approval provider based on whether value is None + match Python::try_attach(|py| -> PyResult<()> { + if value.bind(py).is_none() { + self.inner.clear_approval_provider(); + } else { + let bridge = Arc::new(PyApprovalProviderBridge { + py_obj: value.clone_ref(py), + }); + self.inner.set_approval_provider(bridge); + } + Ok(()) + }) { + Some(Ok(())) => {} + Some(Err(e)) => { + log::warn!("Failed to set approval provider bridge: {e}"); + } + None => { + log::warn!("Could not attach to Python runtime while setting approval provider"); + } + } self.approval_system_obj = value; } @@ -2050,6 +2268,26 @@ impl PyCoordinator { /// Set the display system. #[setter] fn set_display_system(&mut self, value: Py) { + // Set or clear the Rust-side display service based on whether value is None + match Python::try_attach(|py| -> PyResult<()> { + if value.bind(py).is_none() { + // No clear method exists; setting None just keeps Python-side ref + } else { + let bridge = Arc::new(PyDisplayServiceBridge { + py_obj: value.clone_ref(py), + }); + self.inner.set_display_service(bridge); + } + Ok(()) + }) { + Some(Ok(())) => {} + Some(Err(e)) => { + log::warn!("Failed to set display service bridge: {e}"); + } + None => { + log::warn!("Could not attach to Python runtime while setting display service"); + } + } self.display_system_obj = value; } @@ -2148,6 +2386,12 @@ impl PyCoordinator { .collect(); dict.set_item("capabilities", PyList::new(py, &cap_keys)?)?; + // has_approval_provider: whether a Rust-side approval provider is mounted + dict.set_item("has_approval_provider", self.inner.has_approval_provider())?; + + // has_display_service: whether a Rust-side display service is mounted + dict.set_item("has_display_service", self.inner.has_display_service())?; + Ok(dict) } } @@ -2672,7 +2916,7 @@ fn resolve_module(py: Python<'_>, path: String) -> PyResult> { /// use, prefer `load_and_mount_wasm` which mounts into a real coordinator. #[pyfunction] fn load_wasm_from_path(py: Python<'_>, path: String) -> PyResult> { - let mut manifest = amplifier_core::module_resolver::resolve_module(std::path::Path::new(&path)) + let manifest = amplifier_core::module_resolver::resolve_module(std::path::Path::new(&path)) .map_err(|e| PyErr::new::(format!("{e}")))?; if manifest.transport != amplifier_core::transport::Transport::Wasm { @@ -2793,6 +3037,644 @@ impl PyWasmTool { } } +// --------------------------------------------------------------------------- +// PyWasmProvider — thin Python wrapper around a Rust Arc +// --------------------------------------------------------------------------- + +/// Thin Python wrapper around a Rust `Arc` loaded from WASM. +/// +/// Bridges the Rust `Arc` trait object into Python's provider +/// protocol, so WASM providers can be mounted into a coordinator's +/// `mount_points["providers"]` dict alongside native Python provider modules. +/// +/// Implements the Python Provider protocol: `name`, `get_info`, `list_models`, +/// `complete`, `parse_tool_calls`. Created automatically by +/// `load_and_mount_wasm()` when a WASM provider module is detected. +#[pyclass(name = "WasmProvider")] +struct PyWasmProvider { + inner: Arc, +} + +// Safety: Arc is Send+Sync (required by the Provider trait bound). +unsafe impl Send for PyWasmProvider {} +unsafe impl Sync for PyWasmProvider {} + +#[pymethods] +impl PyWasmProvider { + /// The provider's unique name (e.g., "openai"). + #[getter] + fn name(&self) -> &str { + self.inner.name() + } + + /// Return provider metadata as a Python dict. + /// + /// Serialises `ProviderInfo` through a JSON round-trip so the caller + /// receives a plain Python dict with all fields. + fn get_info(&self, py: Python<'_>) -> PyResult> { + let info = self.inner.get_info(); + let json_str = serde_json::to_string(&info).map_err(|e| { + PyErr::new::(format!("Failed to serialize ProviderInfo: {e}")) + })?; + let json_mod = py.import("json")?; + let dict = json_mod.call_method1("loads", (&json_str,))?; + Ok(dict.unbind()) + } + + /// List models available from this provider. + /// + /// Async method — returns a coroutine that resolves to a list of dicts, + /// each representing a `ModelInfo`. + fn list_models<'py>(&self, py: Python<'py>) -> PyResult> { + let inner = self.inner.clone(); + + wrap_future_as_coroutine( + py, + pyo3_async_runtimes::tokio::future_into_py(py, async move { + let models = inner.list_models().await.map_err(|e| { + PyErr::new::(format!("list_models failed: {e}")) + })?; + + let json_str = serde_json::to_string(&models).map_err(|e| { + PyErr::new::(format!("Failed to serialize model list: {e}")) + })?; + + Python::try_attach(|py| -> PyResult> { + let json_mod = py.import("json")?; + let list = json_mod.call_method1("loads", (&json_str,))?; + Ok(list.unbind()) + }) + .ok_or_else(|| { + PyErr::new::("Failed to attach to Python runtime") + })? + }), + ) + } + + /// Generate a completion from a chat request. + /// + /// Async method — takes a request (dict or Pydantic model), serialises it + /// to a Rust `ChatRequest`, calls the inner provider, and returns the + /// `ChatResponse` as a Python dict. + fn complete<'py>( + &self, + py: Python<'py>, + request: Bound<'py, PyAny>, + ) -> PyResult> { + let inner = self.inner.clone(); + + // Convert Python request to serde_json::Value + let json_mod = py.import("json")?; + let serializable = try_model_dump(&request); + let json_str: String = json_mod + .call_method1("dumps", (&serializable,))? + .extract()?; + let chat_request: amplifier_core::messages::ChatRequest = serde_json::from_str(&json_str) + .map_err(|e| { + PyErr::new::(format!("Invalid ChatRequest JSON: {e}")) + })?; + + wrap_future_as_coroutine( + py, + pyo3_async_runtimes::tokio::future_into_py(py, async move { + let response = inner.complete(chat_request).await.map_err(|e| { + PyErr::new::(format!("Provider complete failed: {e}")) + })?; + + let result_json = serde_json::to_string(&response).map_err(|e| { + PyErr::new::(format!( + "Failed to serialize ChatResponse: {e}" + )) + })?; + + Python::try_attach(|py| -> PyResult> { + let json_mod = py.import("json")?; + let dict = json_mod.call_method1("loads", (&result_json,))?; + Ok(dict.unbind()) + }) + .ok_or_else(|| { + PyErr::new::("Failed to attach to Python runtime") + })? + }), + ) + } + + /// Extract tool calls from a provider response. + /// + /// Sync method — takes a response (dict or Pydantic model), deserialises + /// it as `ChatResponse`, calls `parse_tool_calls`, and returns a list of + /// dicts representing `ToolCall` structs. + fn parse_tool_calls(&self, py: Python<'_>, response: Bound<'_, PyAny>) -> PyResult> { + let json_mod = py.import("json")?; + let serializable = try_model_dump(&response); + let json_str: String = json_mod + .call_method1("dumps", (&serializable,))? + .extract()?; + let chat_response: amplifier_core::messages::ChatResponse = serde_json::from_str(&json_str) + .map_err(|e| { + PyErr::new::(format!("Invalid ChatResponse JSON: {e}")) + })?; + + let tool_calls = self.inner.parse_tool_calls(&chat_response); + + let result_json = serde_json::to_string(&tool_calls).map_err(|e| { + PyErr::new::(format!("Failed to serialize tool calls: {e}")) + })?; + let list = json_mod.call_method1("loads", (&result_json,))?; + Ok(list.unbind()) + } + + fn __repr__(&self) -> String { + format!("", self.inner.name()) + } +} + +// --------------------------------------------------------------------------- +// PyWasmHook — thin Python wrapper around a Rust Arc +// --------------------------------------------------------------------------- + +/// Thin Python wrapper around a Rust `Arc` loaded from WASM. +/// +/// Bridges the Rust `Arc` trait object into Python, +/// so WASM hook modules can be used from the Python session. +/// +/// Implements the Python hook protocol: `handle(event, data)` (async). +/// Created automatically by `load_and_mount_wasm()` when a WASM hook +/// module is detected. +#[pyclass(name = "WasmHook")] +struct PyWasmHook { + inner: Arc, +} + +// Safety: Arc is Send+Sync (required by the HookHandler trait bound). +unsafe impl Send for PyWasmHook {} +unsafe impl Sync for PyWasmHook {} + +#[pymethods] +impl PyWasmHook { + /// Handle a hook event. + /// + /// Async method — takes an event name and data (dict or Pydantic model), + /// serialises through JSON, calls the inner handler, and returns the + /// `HookResult` as a Python dict. + fn handle<'py>( + &self, + py: Python<'py>, + event: String, + data: Bound<'py, PyAny>, + ) -> PyResult> { + let inner = self.inner.clone(); + + let json_mod = py.import("json")?; + let serializable = try_model_dump(&data); + let json_str: String = json_mod + .call_method1("dumps", (&serializable,))? + .extract()?; + let value: Value = serde_json::from_str(&json_str).map_err(|e| { + PyErr::new::(format!("Invalid JSON for hook data: {e}")) + })?; + + wrap_future_as_coroutine( + py, + pyo3_async_runtimes::tokio::future_into_py(py, async move { + let result = inner.handle(&event, value).await.map_err(|e| { + PyErr::new::(format!("Hook handle failed: {e}")) + })?; + + let result_json = serde_json::to_string(&result).map_err(|e| { + PyErr::new::(format!("Failed to serialize HookResult: {e}")) + })?; + + Python::try_attach(|py| -> PyResult> { + let json_mod = py.import("json")?; + let dict = json_mod.call_method1("loads", (&result_json,))?; + Ok(dict.unbind()) + }) + .ok_or_else(|| { + PyErr::new::("Failed to attach to Python runtime") + })? + }), + ) + } + + fn __repr__(&self) -> String { + "".to_string() + } +} + +// --------------------------------------------------------------------------- +// PyWasmContext — thin Python wrapper around a Rust Arc +// --------------------------------------------------------------------------- + +/// Thin Python wrapper around a Rust `Arc` loaded from WASM. +/// +/// Bridges the Rust `Arc` trait object into Python's +/// context protocol, so WASM context modules can be mounted into a +/// coordinator's `mount_points["context"]` slot. +/// +/// Implements the Python context protocol: `add_message`, `get_messages`, +/// `get_messages_for_request`, `set_messages`, `clear`. Created automatically +/// by `load_and_mount_wasm()` when a WASM context module is detected. +#[pyclass(name = "WasmContext")] +struct PyWasmContext { + inner: Arc, +} + +// Safety: Arc is Send+Sync (required by the ContextManager trait bound). +unsafe impl Send for PyWasmContext {} +unsafe impl Sync for PyWasmContext {} + +#[pymethods] +impl PyWasmContext { + /// Append a message to the context history. + /// + /// Async method — takes a message (dict or Pydantic model), serialises + /// through JSON, and calls the inner context manager. + fn add_message<'py>( + &self, + py: Python<'py>, + message: Bound<'py, PyAny>, + ) -> PyResult> { + let inner = self.inner.clone(); + + let json_mod = py.import("json")?; + let serializable = try_model_dump(&message); + let json_str: String = json_mod + .call_method1("dumps", (&serializable,))? + .extract()?; + let value: Value = serde_json::from_str(&json_str) + .map_err(|e| PyErr::new::(format!("Invalid JSON for message: {e}")))?; + + wrap_future_as_coroutine( + py, + pyo3_async_runtimes::tokio::future_into_py(py, async move { + inner.add_message(value).await.map_err(|e| { + PyErr::new::(format!("add_message failed: {e}")) + })?; + Python::try_attach(|py| -> PyResult> { Ok(py.None()) }).ok_or_else( + || PyErr::new::("Failed to attach to Python runtime"), + )? + }), + ) + } + + /// Get all messages (raw, uncompacted). + /// + /// Async method — returns a coroutine that resolves to a list of dicts. + fn get_messages<'py>(&self, py: Python<'py>) -> PyResult> { + let inner = self.inner.clone(); + + wrap_future_as_coroutine( + py, + pyo3_async_runtimes::tokio::future_into_py(py, async move { + let messages = inner.get_messages().await.map_err(|e| { + PyErr::new::(format!("get_messages failed: {e}")) + })?; + + let json_str = serde_json::to_string(&messages).map_err(|e| { + PyErr::new::(format!("Failed to serialize messages: {e}")) + })?; + + Python::try_attach(|py| -> PyResult> { + let json_mod = py.import("json")?; + let list = json_mod.call_method1("loads", (&json_str,))?; + Ok(list.unbind()) + }) + .ok_or_else(|| { + PyErr::new::("Failed to attach to Python runtime") + })? + }), + ) + } + + /// Get messages ready for an LLM request, compacted if necessary. + /// + /// Async method — takes an optional request dict (currently ignores + /// token_budget and provider for WASM context managers), and returns + /// a list of message dicts. + fn get_messages_for_request<'py>( + &self, + py: Python<'py>, + _request: Bound<'py, PyAny>, + ) -> PyResult> { + let inner = self.inner.clone(); + + wrap_future_as_coroutine( + py, + pyo3_async_runtimes::tokio::future_into_py(py, async move { + // WASM context managers don't receive provider/budget yet — + // pass None for both parameters. + let messages = inner + .get_messages_for_request(None, None) + .await + .map_err(|e| { + PyErr::new::(format!( + "get_messages_for_request failed: {e}" + )) + })?; + + let json_str = serde_json::to_string(&messages).map_err(|e| { + PyErr::new::(format!("Failed to serialize messages: {e}")) + })?; + + Python::try_attach(|py| -> PyResult> { + let json_mod = py.import("json")?; + let list = json_mod.call_method1("loads", (&json_str,))?; + Ok(list.unbind()) + }) + .ok_or_else(|| { + PyErr::new::("Failed to attach to Python runtime") + })? + }), + ) + } + + /// Replace the entire message list. + /// + /// Async method — takes a list of message dicts, serialises through JSON, + /// and calls the inner context manager. + fn set_messages<'py>( + &self, + py: Python<'py>, + messages: Bound<'py, PyAny>, + ) -> PyResult> { + let inner = self.inner.clone(); + + let json_mod = py.import("json")?; + let json_str: String = json_mod.call_method1("dumps", (&messages,))?.extract()?; + let values: Vec = serde_json::from_str(&json_str).map_err(|e| { + PyErr::new::(format!("Invalid JSON for messages: {e}")) + })?; + + wrap_future_as_coroutine( + py, + pyo3_async_runtimes::tokio::future_into_py(py, async move { + inner.set_messages(values).await.map_err(|e| { + PyErr::new::(format!("set_messages failed: {e}")) + })?; + Python::try_attach(|py| -> PyResult> { Ok(py.None()) }).ok_or_else( + || PyErr::new::("Failed to attach to Python runtime"), + )? + }), + ) + } + + /// Clear all messages from context. + /// + /// Async method — returns a coroutine that resolves to None. + fn clear<'py>(&self, py: Python<'py>) -> PyResult> { + let inner = self.inner.clone(); + + wrap_future_as_coroutine( + py, + pyo3_async_runtimes::tokio::future_into_py(py, async move { + inner + .clear() + .await + .map_err(|e| PyErr::new::(format!("clear failed: {e}")))?; + Python::try_attach(|py| -> PyResult> { Ok(py.None()) }).ok_or_else( + || PyErr::new::("Failed to attach to Python runtime"), + )? + }), + ) + } + + fn __repr__(&self) -> String { + "".to_string() + } +} + +// --------------------------------------------------------------------------- +// PyWasmOrchestrator — thin Python wrapper around a Rust Arc +// --------------------------------------------------------------------------- + +/// Thin Python wrapper around a Rust `Arc` loaded from WASM. +/// +/// Bridges the Rust `Arc` trait object into Python's +/// orchestrator protocol, so WASM orchestrator modules can be mounted +/// into a coordinator's `mount_points["orchestrator"]` slot. +/// +/// Implements the Python orchestrator protocol: `execute(prompt, ...)` (async). +/// Created automatically by `load_and_mount_wasm()` when a WASM orchestrator +/// module is detected. +#[pyclass(name = "WasmOrchestrator")] +struct PyWasmOrchestrator { + inner: Arc, +} + +// Safety: Arc is Send+Sync (required by the Orchestrator trait bound). +unsafe impl Send for PyWasmOrchestrator {} +unsafe impl Sync for PyWasmOrchestrator {} + +#[pymethods] +impl PyWasmOrchestrator { + /// Execute the WASM orchestrator with a prompt. + /// + /// # Why all 6 parameters are accepted + /// + /// `_session_exec.run_orchestrator()` always passes all 6 keyword arguments + /// (`prompt`, `context`, `providers`, `tools`, `hooks`, `coordinator`) to + /// every orchestrator — Python and WASM alike. If this method's signature + /// did not accept them, Python would raise `TypeError: execute() got an + /// unexpected keyword argument …` at call time. + /// + /// # Why 5 parameters are discarded + /// + /// WASM guests cannot receive arbitrary Python objects across the sandbox + /// boundary. Instead, they access kernel services (context, providers, + /// tools, hooks, coordinator) via **`kernel-service` host imports** defined + /// in the WIT interface. The Python-side objects are therefore accepted + /// here solely for signature compatibility and then dropped. + /// + /// # Future enhancement + /// + /// Forward relevant session state (e.g. context messages, tool manifests) + /// to WASM guests by plumbing them through the `kernel-service` host + /// imports, so that WASM orchestrators can interact with the same kernel + /// services available to Python orchestrators. + #[pyo3(signature = (prompt, context=None, providers=None, tools=None, hooks=None, coordinator=None))] + #[allow(clippy::too_many_arguments)] + fn execute<'py>( + &self, + py: Python<'py>, + prompt: String, + context: Option>, + providers: Option>, + tools: Option>, + hooks: Option>, + coordinator: Option>, + ) -> PyResult> { + let inner = self.inner.clone(); + // Protocol conformance: these params are required by the unified dispatch + // path in `_session_exec.run_orchestrator()` which always passes all 6 + // keyword arguments. WASM guests access kernel services (context, + // providers, tools, hooks, coordinator) via host imports defined in the + // WIT `kernel-service` interface, not via Python parameters. + let _ = (context, providers, tools, hooks, coordinator); + + wrap_future_as_coroutine( + py, + pyo3_async_runtimes::tokio::future_into_py(py, async move { + // Provide minimal defaults for the required trait parameters. + // WASM orchestrators currently only use `prompt`. + let empty_context: Arc = + Arc::new(NullContextManager); + let empty_providers: HashMap> = + HashMap::new(); + let empty_tools: HashMap> = + HashMap::new(); + let null_hooks = Value::Null; + let null_coordinator = Value::Null; + + let result = inner + .execute( + prompt, + empty_context, + empty_providers, + empty_tools, + null_hooks, + null_coordinator, + ) + .await + .map_err(|e| { + PyErr::new::(format!("Orchestrator execute failed: {e}")) + })?; + + Python::try_attach(|py| -> PyResult> { + Ok(result.into_pyobject(py)?.into_any().unbind()) + }) + .ok_or_else(|| { + PyErr::new::("Failed to attach to Python runtime") + })? + }), + ) + } + + fn __repr__(&self) -> String { + "".to_string() + } +} + +/// Minimal no-op context manager used as a placeholder when calling WASM +/// orchestrators that don't actually use the context parameter. +struct NullContextManager; + +impl amplifier_core::traits::ContextManager for NullContextManager { + fn add_message( + &self, + _message: Value, + ) -> Pin> + Send + '_>> { + Box::pin(async { Ok(()) }) + } + + fn get_messages_for_request( + &self, + _token_budget: Option, + _provider: Option>, + ) -> Pin, amplifier_core::ContextError>> + Send + '_>> + { + Box::pin(async { Ok(vec![]) }) + } + + fn get_messages( + &self, + ) -> Pin, amplifier_core::ContextError>> + Send + '_>> + { + Box::pin(async { Ok(vec![]) }) + } + + fn set_messages( + &self, + _messages: Vec, + ) -> Pin> + Send + '_>> { + Box::pin(async { Ok(()) }) + } + + fn clear( + &self, + ) -> Pin> + Send + '_>> { + Box::pin(async { Ok(()) }) + } +} + +// --------------------------------------------------------------------------- +// PyWasmApproval — thin Python wrapper around a Rust Arc +// --------------------------------------------------------------------------- + +/// Thin Python wrapper around a Rust `Arc` loaded from WASM. +/// +/// Bridges the Rust `Arc` trait object into Python, +/// so WASM approval modules can be used from the Python session. +/// +/// Implements the Python approval protocol: `request_approval(request)` (async). +/// Created automatically by `load_and_mount_wasm()` when a WASM approval +/// module is detected. +#[pyclass(name = "WasmApproval")] +struct PyWasmApproval { + inner: Arc, +} + +// Safety: Arc is Send+Sync (required by the ApprovalProvider trait bound). +unsafe impl Send for PyWasmApproval {} +unsafe impl Sync for PyWasmApproval {} + +#[pymethods] +impl PyWasmApproval { + /// Request approval for an action. + /// + /// Async method — takes a request (dict or Pydantic model), deserialises + /// it as `ApprovalRequest`, calls the inner approval provider, and returns + /// the `ApprovalResponse` as a Python dict. + fn request_approval<'py>( + &self, + py: Python<'py>, + request: Bound<'py, PyAny>, + ) -> PyResult> { + let inner = self.inner.clone(); + + let json_mod = py.import("json")?; + let serializable = try_model_dump(&request); + let json_str: String = json_mod + .call_method1("dumps", (&serializable,))? + .extract()?; + let approval_request: amplifier_core::models::ApprovalRequest = + serde_json::from_str(&json_str).map_err(|e| { + PyErr::new::(format!("Invalid ApprovalRequest JSON: {e}")) + })?; + + wrap_future_as_coroutine( + py, + pyo3_async_runtimes::tokio::future_into_py(py, async move { + let response = inner + .request_approval(approval_request) + .await + .map_err(|e| { + PyErr::new::(format!("request_approval failed: {e}")) + })?; + + let result_json = serde_json::to_string(&response).map_err(|e| { + PyErr::new::(format!( + "Failed to serialize ApprovalResponse: {e}" + )) + })?; + + Python::try_attach(|py| -> PyResult> { + let json_mod = py.import("json")?; + let dict = json_mod.call_method1("loads", (&result_json,))?; + Ok(dict.unbind()) + }) + .ok_or_else(|| { + PyErr::new::("Failed to attach to Python runtime") + })? + }), + ) + } + + fn __repr__(&self) -> String { + "".to_string() + } +} + // --------------------------------------------------------------------------- // load_and_mount_wasm — load WASM module and mount into a real coordinator // --------------------------------------------------------------------------- @@ -2823,7 +3705,7 @@ fn load_and_mount_wasm( coordinator: &PyCoordinator, path: String, ) -> PyResult> { - let mut manifest = amplifier_core::module_resolver::resolve_module(std::path::Path::new(&path)) + let manifest = amplifier_core::module_resolver::resolve_module(std::path::Path::new(&path)) .map_err(|e| PyErr::new::(format!("{e}")))?; if manifest.transport != amplifier_core::transport::Transport::Wasm { @@ -2868,11 +3750,73 @@ fn load_and_mount_wasm( dict.set_item("status", "delegate_to_python")?; dict.set_item("package_name", package_name)?; } - _ => { - // Hook, Context, Approval, Provider, Orchestrator — - // loaded and validated, but not auto-mounted. The Python - // caller should handle mounting based on module_type. + amplifier_core::module_resolver::LoadedModule::Provider(provider) => { + let provider_name = provider.name().to_string(); + // Wrap in PyWasmProvider and mount into coordinator's mount_points["providers"] + let wrapper = Py::new(py, PyWasmProvider { inner: provider })?; + let mp = coordinator.mount_points.bind(py); + let providers_any = mp.get_item("providers")?.ok_or_else(|| { + PyErr::new::("mount_points missing 'providers'") + })?; + let providers_dict = providers_any.cast::()?; + providers_dict.set_item(&provider_name, &wrapper)?; + dict.set_item("status", "mounted")?; + dict.set_item("name", &provider_name)?; + } + amplifier_core::module_resolver::LoadedModule::Hook(hook) => { + // Register the WASM hook with the coordinator's Rust-side hook + // registry so it participates in `emit()` dispatch. + // + // Ask the module which events it wants to subscribe to via the + // `HookHandler::get_subscriptions` trait method. WASM modules + // compiled with the current WIT return their declared subscriptions; + // old modules without `get-subscriptions` fall back to a wildcard + // subscription inside `WasmHookBridge::get_subscriptions()`. + // + // NOTE: `GrpcHookBridge` uses the trait default (wildcard) here. + // Its async `get_subscriptions` RPC with UNIMPLEMENTED fallback is + // invoked through a separate async registration path for gRPC hooks. + let config = serde_json::json!({}); + let subscriptions_result: Vec<(String, i32, String)> = + hook.get_subscriptions(&config); + + let hooks_registry = coordinator.inner.hooks_shared(); + for (event, priority, name) in &subscriptions_result { + let _ = hooks_registry.register(event, hook.clone(), *priority, Some(name.clone())); + } + + dict.set_item("status", "mounted")?; + dict.set_item("subscriptions_count", subscriptions_result.len())?; + } + amplifier_core::module_resolver::LoadedModule::Context(context) => { + // Wrap in PyWasmContext and mount into coordinator's mount_points["context"] + let wrapper = Py::new(py, PyWasmContext { inner: context })?; + let mp = coordinator.mount_points.bind(py); + mp.set_item("context", &wrapper)?; + dict.set_item("status", "mounted")?; + } + amplifier_core::module_resolver::LoadedModule::Orchestrator(orchestrator) => { + // Wrap in PyWasmOrchestrator and mount into coordinator's mount_points["orchestrator"] + log::warn!( + "WASM orchestrator mounted — context/providers/tools/hooks/coordinator \ + are not forwarded to WASM guests in this version. \ + The WASM guest accesses kernel services via host imports instead." + ); + let wrapper = Py::new( + py, + PyWasmOrchestrator { + inner: orchestrator, + }, + )?; + let mp = coordinator.mount_points.bind(py); + mp.set_item("orchestrator", &wrapper)?; + dict.set_item("status", "mounted")?; + } + amplifier_core::module_resolver::LoadedModule::Approval(approval) => { + // Wrap in PyWasmApproval — returned to caller for use + let wrapper = Py::new(py, PyWasmApproval { inner: approval })?; dict.set_item("status", "loaded")?; + dict.set_item("wrapper", wrapper)?; } } @@ -2898,6 +3842,11 @@ fn _engine(m: &Bound<'_, PyModule>) -> PyResult<()> { m.add_class::()?; m.add_class::()?; m.add_class::()?; + m.add_class::()?; + m.add_class::()?; + m.add_class::()?; + m.add_class::()?; + m.add_class::()?; m.add_function(wrap_pyfunction!(classify_error_message, m)?)?; m.add_function(wrap_pyfunction!(compute_delay, m)?)?; m.add_function(wrap_pyfunction!(resolve_module, m)?)?; @@ -3135,6 +4084,30 @@ mod tests { fn _assert_type_compiles(_: &PyWasmTool) {} } + /// Verify PyWasmHook wrapper type exists. + #[test] + fn py_wasm_hook_type_exists() { + fn _assert_type_compiles(_: &PyWasmHook) {} + } + + /// Verify PyWasmContext wrapper type exists. + #[test] + fn py_wasm_context_type_exists() { + fn _assert_type_compiles(_: &PyWasmContext) {} + } + + /// Verify PyWasmOrchestrator wrapper type exists. + #[test] + fn py_wasm_orchestrator_type_exists() { + fn _assert_type_compiles(_: &PyWasmOrchestrator) {} + } + + /// Verify PyWasmApproval wrapper type exists. + #[test] + fn py_wasm_approval_type_exists() { + fn _assert_type_compiles(_: &PyWasmApproval) {} + } + /// Document the contract for load_and_mount_wasm: /// /// - Accepts a PyCoordinator reference and a filesystem path diff --git a/bindings/python/tests/test_dispatch_integration.py b/bindings/python/tests/test_dispatch_integration.py deleted file mode 100644 index f34079e..0000000 --- a/bindings/python/tests/test_dispatch_integration.py +++ /dev/null @@ -1,19 +0,0 @@ -"""Test that _session_init.py can route through loader_dispatch.""" - -import asyncio - - -def test_dispatch_functions_importable(): - """The dispatch functions are importable from the right locations.""" - from amplifier_core.loader_dispatch import _detect_transport - from amplifier_core.loader_dispatch import load_module - - assert callable(load_module) - assert callable(_detect_transport) - - -def test_session_init_still_works(): - """_session_init.initialize_session is still importable and async.""" - from amplifier_core._session_init import initialize_session - - assert asyncio.iscoroutinefunction(initialize_session) diff --git a/bindings/python/tests/test_loader_dispatch.py b/bindings/python/tests/test_loader_dispatch.py deleted file mode 100644 index aff5611..0000000 --- a/bindings/python/tests/test_loader_dispatch.py +++ /dev/null @@ -1,149 +0,0 @@ -"""Tests for the polyglot loader dispatch module.""" - -import os -import sys -import tempfile -from unittest.mock import MagicMock, patch - -import pytest - - -def test_dispatch_module_exists(): - """The loader_dispatch module is importable.""" - from amplifier_core import loader_dispatch - - assert hasattr(loader_dispatch, "load_module") - - -def test_dispatch_no_toml_falls_back_to_python(): - """Without amplifier.toml, dispatch falls through to Python loader.""" - from amplifier_core.loader_dispatch import _detect_transport - - with tempfile.TemporaryDirectory() as tmpdir: - transport = _detect_transport(tmpdir) - assert transport == "python" - - -def test_dispatch_detects_grpc_transport(): - """amplifier.toml with transport=grpc is detected.""" - from amplifier_core.loader_dispatch import _detect_transport - - with tempfile.TemporaryDirectory() as tmpdir: - toml_path = os.path.join(tmpdir, "amplifier.toml") - with open(toml_path, "w") as f: - f.write('[module]\nname = "test"\ntype = "tool"\ntransport = "grpc"\n') - transport = _detect_transport(tmpdir) - assert transport == "grpc" - - -def test_dispatch_detects_python_transport(): - """amplifier.toml with transport=python is detected.""" - from amplifier_core.loader_dispatch import _detect_transport - - with tempfile.TemporaryDirectory() as tmpdir: - toml_path = os.path.join(tmpdir, "amplifier.toml") - with open(toml_path, "w") as f: - f.write('[module]\nname = "test"\ntype = "tool"\ntransport = "python"\n') - transport = _detect_transport(tmpdir) - assert transport == "python" - - -def test_dispatch_detects_native_transport(): - """amplifier.toml with transport=native is detected.""" - from amplifier_core.loader_dispatch import _detect_transport - - with tempfile.TemporaryDirectory() as tmpdir: - toml_path = os.path.join(tmpdir, "amplifier.toml") - with open(toml_path, "w") as f: - f.write('[module]\nname = "test"\ntype = "tool"\ntransport = "native"\n') - transport = _detect_transport(tmpdir) - assert transport == "native" - - -def test_dispatch_defaults_to_python_when_transport_missing(): - """amplifier.toml without transport key defaults to python.""" - from amplifier_core.loader_dispatch import _detect_transport - - with tempfile.TemporaryDirectory() as tmpdir: - toml_path = os.path.join(tmpdir, "amplifier.toml") - with open(toml_path, "w") as f: - f.write('[module]\nname = "test"\ntype = "tool"\n') - transport = _detect_transport(tmpdir) - assert transport == "python" - - -def test_dispatch_reads_grpc_endpoint(): - """amplifier.toml grpc section provides endpoint.""" - from amplifier_core.loader_dispatch import _read_module_meta - - with tempfile.TemporaryDirectory() as tmpdir: - toml_path = os.path.join(tmpdir, "amplifier.toml") - with open(toml_path, "w") as f: - f.write( - '[module]\nname = "my-tool"\ntype = "tool"\ntransport = "grpc"\n\n[grpc]\nendpoint = "localhost:50052"\n' - ) - meta = _read_module_meta(tmpdir) - assert meta["module"]["transport"] == "grpc" - assert meta["grpc"]["endpoint"] == "localhost:50052" - - -@pytest.mark.asyncio -async def test_load_module_uses_rust_loader_for_wasm_transport(): - """load_module imports load_and_mount_wasm and returns a deferred mount callable when Rust resolver detects wasm.""" - from amplifier_core.loader_dispatch import load_module - - fake_engine = MagicMock() - fake_engine.resolve_module.return_value = {"transport": "wasm", "name": "test-wasm"} - - coordinator = MagicMock() - coordinator.loader = None - - with tempfile.TemporaryDirectory() as tmpdir: - with patch.dict(sys.modules, {"amplifier_core._engine": fake_engine}): - result = await load_module("test-wasm", {}, tmpdir, coordinator) - - assert callable(result) - # load_and_mount_wasm is NOT called during load_module — it's deferred to mount time. - # The mount function captures load_and_mount_wasm and calls it when invoked. - - -@pytest.mark.asyncio -async def test_load_module_wasm_without_rust_engine_raises_not_implemented(): - """load_module raises NotImplementedError for wasm when Rust engine is not available.""" - from amplifier_core.loader_dispatch import load_module - - coordinator = MagicMock() - coordinator.loader = None - - with tempfile.TemporaryDirectory() as tmpdir: - # Write an amplifier.toml so Python fallback detects wasm - toml_path = os.path.join(tmpdir, "amplifier.toml") - with open(toml_path, "w") as f: - f.write('[module]\nname = "test"\ntype = "tool"\ntransport = "wasm"\n') - - # Setting sys.modules entry to None makes any "from pkg import X" raise ImportError - with patch.dict(sys.modules, {"amplifier_core._engine": None}): - with pytest.raises(NotImplementedError, match="Rust engine"): - await load_module("test-wasm", {}, tmpdir, coordinator) - - -@pytest.mark.asyncio -async def test_load_module_falls_back_when_rust_resolver_raises(): - """load_module falls back to Python transport detection when Rust resolver raises.""" - from amplifier_core.loader_dispatch import load_module - - fake_engine = MagicMock() - fake_engine.resolve_module.side_effect = RuntimeError("resolver blew up") - - coordinator = MagicMock() - coordinator.loader = None - - with tempfile.TemporaryDirectory() as tmpdir: - # No amplifier.toml → Python detection returns "python" → tries Python loader - with patch.dict(sys.modules, {"amplifier_core._engine": fake_engine}): - # Python loader itself will fail (no real coordinator), but we just need - # to confirm it tried the Python fallback path (not raise from Rust error). - # TypeError is raised when the MagicMock coordinator's source_resolver - # returns a MagicMock that can't be awaited. - with pytest.raises((TypeError, ValueError)): - await load_module("test-mod", {}, tmpdir, coordinator) diff --git a/crates/amplifier-core/src/bridges/grpc_hook.rs b/crates/amplifier-core/src/bridges/grpc_hook.rs index c2896dc..5a7fa7d 100644 --- a/crates/amplifier-core/src/bridges/grpc_hook.rs +++ b/crates/amplifier-core/src/bridges/grpc_hook.rs @@ -33,6 +33,20 @@ use crate::traits::HookHandler; /// A bridge that wraps a remote gRPC `HookService` as a native [`HookHandler`]. /// +/// ## GetSubscriptions RPC +/// +/// The proto `HookService` exposes a `GetSubscriptions` RPC that the host +/// calls at mount time to discover which events a hook module wants to +/// receive and at what priority. The host then registers those +/// subscriptions in its own hook registry so the module does not need to +/// call back into the kernel. +/// +/// A future `RegisterHook` RPC on `KernelService` will allow bidirectional +/// registration where the module pushes subscriptions to the kernel instead +/// of (or in addition to) the host pulling them. +/// +/// ## Mutex note +/// /// The client is held behind a [`tokio::sync::Mutex`] because /// `HookServiceClient` methods take `&mut self` and we need to hold /// the lock across `.await` points. @@ -50,6 +64,72 @@ impl GrpcHookBridge { }) } + /// Default wildcard subscription used as a fallback when `GetSubscriptions` + /// is unavailable or fails: receives every event at priority 0. + pub(crate) const WILDCARD_SUBSCRIPTION: (&'static str, i32, &'static str) = + ("*", 0, "grpc-hook"); + + /// Convert a gRPC `GetSubscriptions` RPC result into a subscription list. + /// + /// ## Fallback rules + /// - **Success**: returns the server-provided subscriptions. + /// - **UNIMPLEMENTED** (gRPC code 12): old servers that predate this RPC + /// respond with `UNIMPLEMENTED`; fall back silently to a single wildcard + /// subscription so the hook still receives all events. + /// - **Any other error**: log a warning and fall back to wildcard. + pub(crate) fn subscriptions_from_result( + result: Result, tonic::Status>, + ) -> Vec<(String, i32, String)> { + let wildcard = || { + vec![( + Self::WILDCARD_SUBSCRIPTION.0.to_string(), + Self::WILDCARD_SUBSCRIPTION.1, + Self::WILDCARD_SUBSCRIPTION.2.to_string(), + )] + }; + match result { + Ok(resp) => resp + .into_inner() + .subscriptions + .into_iter() + .map(|s| (s.event, s.priority, s.name)) + .collect(), + Err(status) if status.code() == tonic::Code::Unimplemented => { + // Old server that doesn't implement GetSubscriptions — use wildcard silently. + wildcard() + } + Err(status) => { + log::warn!( + "GrpcHookBridge: GetSubscriptions failed ({}), falling back to wildcard subscription", + status + ); + wildcard() + } + } + } + + /// Query the remote hook service for its event subscriptions. + /// + /// Returns a list of `(event, priority, name)` tuples to register with the + /// local hook registry. Call this once at mount time. + /// + /// ## Backward compatibility + /// + /// Old gRPC hook servers that predate the `GetSubscriptions` RPC respond + /// with gRPC `UNIMPLEMENTED` (code 12). This method handles that + /// gracefully by returning `[("*", 0, "grpc-hook")]` — a wildcard + /// subscription that causes the hook to receive every event. + pub async fn get_subscriptions(&self) -> Vec<(String, i32, String)> { + let request = amplifier_module::GetSubscriptionsRequest { + config_json: "{}".to_string(), + }; + let result = { + let mut client = self.client.lock().await; + client.get_subscriptions(request).await + }; + Self::subscriptions_from_result(result) + } + /// Convert a proto `HookResult` to a native [`models::HookResult`]. pub(crate) fn proto_to_native_hook_result( proto: amplifier_module::HookResult, @@ -419,4 +499,118 @@ mod tests { // Should return None (parse failure logged but still returns None) assert_eq!(result.data, None); } + + // ---- GetSubscriptions proto types exist ---- + + /// Verify the generated GetSubscriptionsRequest has the expected config_json field. + #[test] + fn get_subscriptions_request_type_exists() { + let req = amplifier_module::GetSubscriptionsRequest { + config_json: "{}".to_string(), + }; + assert_eq!(req.config_json, "{}"); + } + + /// Verify the generated EventSubscription has event, priority, and name fields. + #[test] + fn event_subscription_type_exists() { + let sub = amplifier_module::EventSubscription { + event: "before_completion".to_string(), + priority: 100, + name: "my-hook".to_string(), + }; + assert_eq!(sub.event, "before_completion"); + assert_eq!(sub.priority, 100); + assert_eq!(sub.name, "my-hook"); + } + + /// Verify the generated GetSubscriptionsResponse holds a vec of EventSubscription. + #[test] + fn get_subscriptions_response_type_exists() { + let resp = amplifier_module::GetSubscriptionsResponse { + subscriptions: vec![amplifier_module::EventSubscription { + event: "after_tool_call".to_string(), + priority: 50, + name: "audit-hook".to_string(), + }], + }; + assert_eq!(resp.subscriptions.len(), 1); + assert_eq!(resp.subscriptions[0].event, "after_tool_call"); + } + + // ---- GetSubscriptions fallback behaviour ---- + + /// UNIMPLEMENTED (code 12) must return the wildcard fallback subscription. + /// This is the key backward-compatibility guarantee: old hook servers that + /// predate the GetSubscriptions RPC will still work. + #[test] + fn get_subscriptions_unimplemented_returns_wildcard() { + let status = tonic::Status::unimplemented("not implemented"); + let result: Result< + tonic::Response, + tonic::Status, + > = Err(status); + let subs = GrpcHookBridge::subscriptions_from_result(result); + assert_eq!(subs.len(), 1, "expected exactly one wildcard subscription"); + assert_eq!(subs[0].0, "*", "event should be wildcard"); + assert_eq!(subs[0].1, 0, "priority should be 0"); + assert_eq!(subs[0].2, "grpc-hook", "name should be grpc-hook"); + } + + /// A successful response should return the server-provided subscriptions. + #[test] + fn get_subscriptions_success_returns_proto_subscriptions() { + let response = amplifier_module::GetSubscriptionsResponse { + subscriptions: vec![amplifier_module::EventSubscription { + event: "before_completion".to_string(), + priority: 10, + name: "my-hook".to_string(), + }], + }; + let result = Ok(tonic::Response::new(response)); + let subs = GrpcHookBridge::subscriptions_from_result(result); + assert_eq!(subs.len(), 1); + assert_eq!(subs[0].0, "before_completion"); + assert_eq!(subs[0].1, 10); + assert_eq!(subs[0].2, "my-hook"); + } + + /// Any non-UNIMPLEMENTED error should also fall back to wildcard. + #[test] + fn get_subscriptions_other_error_returns_wildcard() { + let status = tonic::Status::internal("server exploded"); + let result: Result< + tonic::Response, + tonic::Status, + > = Err(status); + let subs = GrpcHookBridge::subscriptions_from_result(result); + assert_eq!(subs.len(), 1, "expected exactly one wildcard subscription"); + assert_eq!(subs[0].0, "*"); + assert_eq!(subs[0].1, 0); + assert_eq!(subs[0].2, "grpc-hook"); + } + + /// Multiple subscriptions from a successful response are all returned. + #[test] + fn get_subscriptions_success_returns_all_subscriptions() { + let response = amplifier_module::GetSubscriptionsResponse { + subscriptions: vec![ + amplifier_module::EventSubscription { + event: "before_completion".to_string(), + priority: 10, + name: "hook-a".to_string(), + }, + amplifier_module::EventSubscription { + event: "after_tool_call".to_string(), + priority: 5, + name: "hook-b".to_string(), + }, + ], + }; + let result = Ok(tonic::Response::new(response)); + let subs = GrpcHookBridge::subscriptions_from_result(result); + assert_eq!(subs.len(), 2); + assert_eq!(subs[0].0, "before_completion"); + assert_eq!(subs[1].0, "after_tool_call"); + } } diff --git a/crates/amplifier-core/src/bridges/wasm_hook.rs b/crates/amplifier-core/src/bridges/wasm_hook.rs index d4771ae..c29af39 100644 --- a/crates/amplifier-core/src/bridges/wasm_hook.rs +++ b/crates/amplifier-core/src/bridges/wasm_hook.rs @@ -23,6 +23,25 @@ use super::wasm_tool::create_linker_and_store; /// The WIT interface name used by `cargo component` for hook handler exports. const INTERFACE_NAME: &str = "amplifier:modules/hook-handler@1.0.0"; +/// Shorthand for the common boxed-error result used throughout WASM bridges. +type WasmResult = Result>; + +/// Rust mirror of the WIT `event-subscription` record exported by hook modules. +/// +/// Used exclusively for lifting the Component Model return value of +/// `get-subscriptions`. Converted to `(String, i32, String)` tuples at the +/// public API boundary. +#[derive(wasmtime::component::ComponentType, wasmtime::component::Lift, Debug, Clone)] +#[component(record)] +struct WasmEventSubscription { + #[component(name = "event")] + event: String, + #[component(name = "priority")] + priority: i32, + #[component(name = "name")] + name: String, +} + /// Helper: call the `handle` export on a fresh component instance. /// /// The envelope bytes must be a JSON-serialized object: @@ -31,7 +50,7 @@ fn call_handle( engine: &Engine, component: &Component, envelope_bytes: Vec, -) -> Result, Box> { +) -> WasmResult> { let (linker, mut store) = create_linker_and_store(engine, &super::WasmLimits::default())?; let instance = linker.instantiate(&mut store, component)?; @@ -48,6 +67,42 @@ fn call_handle( } } +/// Helper: call the `get-subscriptions` export on a fresh component instance. +/// +/// `config_bytes` must be a JSON-serialized configuration blob (from bundle YAML). +/// Returns a vec of `(event, priority, name)` tuples describing the hook's +/// desired subscriptions. +fn call_get_subscriptions( + engine: &Engine, + component: &Component, + config_bytes: Vec, +) -> WasmResult> { + let (linker, mut store) = create_linker_and_store(engine, &super::WasmLimits::default())?; + let instance = linker.instantiate(&mut store, component)?; + + let func = super::get_typed_func::<(Vec,), (Vec,)>( + &instance, + &mut store, + "get-subscriptions", + INTERFACE_NAME, + )?; + let (subs,) = func.call(&mut store, (config_bytes,))?; + Ok(subs + .into_iter() + .map(|s| (s.event, s.priority, s.name)) + .collect()) +} + +/// Default wildcard subscription returned when `get-subscriptions` is absent. +/// +/// Old WASM hook modules compiled against the previous WIT (before +/// `get-subscriptions` was added) will not export the function. We fall back +/// to a single `"*"` subscription so those modules still receive every event, +/// preserving backward compatibility. +fn wildcard_subscriptions() -> Vec<(String, i32, String)> { + vec![("*".to_string(), 0, "wasm-hook".to_string())] +} + /// A bridge that loads a WASM Component and exposes it as a native [`HookHandler`]. /// /// The component is compiled once and can be instantiated for each hook invocation. @@ -61,19 +116,73 @@ impl WasmHookBridge { /// Load a WASM hook component from raw bytes. /// /// Compiles the Component and caches it for reuse across `handle()` calls. - pub fn from_bytes( - wasm_bytes: &[u8], - engine: Arc, - ) -> Result> { + pub fn from_bytes(wasm_bytes: &[u8], engine: Arc) -> WasmResult { let component = Component::new(&engine, wasm_bytes)?; Ok(Self { engine, component }) } + /// Convert a raw `call_get_subscriptions` result into a subscription list. + /// + /// Applies the graceful-degradation rules: + /// - `Ok(subs)` → return the module's declared subscriptions. + /// - `Err` where the message contains `"not found"` → the module was + /// compiled without `get-subscriptions`; log at `debug` and return the + /// wildcard fallback. + /// - Any other `Err` (runtime trap, bad data, …) → log at `warn` and + /// return the wildcard fallback. + /// + /// This method is `pub(crate)` so that unit tests can exercise the + /// fallback logic directly without needing a real WASM fixture. + pub(crate) fn subscriptions_from_result( + result: WasmResult>, + ) -> Vec<(String, i32, String)> { + match result { + Ok(subs) => subs, + Err(e) if e.to_string().contains("not found") => { + log::debug!( + "get-subscriptions not exported by WASM module (old module without the \ + function), falling back to wildcard subscription: {e}" + ); + wildcard_subscriptions() + } + Err(e) => { + log::warn!( + "get-subscriptions call failed, falling back to wildcard subscription: {e}" + ); + wildcard_subscriptions() + } + } + } + + /// Query the component for its event subscriptions. + /// + /// Instantiates the component and calls `get-subscriptions` with the given + /// JSON config (serialized to bytes). If the export is absent (old module) + /// or returns an error, this method falls back to a single wildcard + /// subscription `[("*", 0, "wasm-hook")]` rather than propagating the + /// error. + /// + /// Returns a vec of `(event, priority, name)` tuples. + pub fn get_subscriptions(&self, config: &serde_json::Value) -> Vec<(String, i32, String)> { + let config_bytes = match serde_json::to_vec(config) { + Ok(b) => b, + Err(e) => { + log::warn!( + "failed to serialize config for get-subscriptions, \ + falling back to wildcard: {e}" + ); + return wildcard_subscriptions(); + } + }; + Self::subscriptions_from_result(call_get_subscriptions( + &self.engine, + &self.component, + config_bytes, + )) + } + /// Convenience: load a WASM hook component from a file path. - pub fn from_file( - path: &Path, - engine: Arc, - ) -> Result> { + pub fn from_file(path: &Path, engine: Arc) -> WasmResult { let bytes = std::fs::read(path).map_err(|e| format!("failed to read {}: {e}", path.display()))?; Self::from_bytes(&bytes, engine) @@ -116,6 +225,11 @@ impl HookHandler for WasmHookBridge { Ok(hook_result) }) } + + fn get_subscriptions(&self, config: &serde_json::Value) -> Vec<(String, i32, String)> { + // Delegate to the inherent method which applies the graceful-fallback logic. + WasmHookBridge::get_subscriptions(self, config) + } } #[cfg(test)] @@ -170,6 +284,51 @@ mod tests { Arc::new(Engine::new(&config).expect("engine creation failed")) } + /// Missing-export error triggers the wildcard fallback (old WASM modules). + /// + /// Mirrors `GrpcHookBridge::get_subscriptions_unimplemented_returns_wildcard`: + /// when the error message indicates the function is not exported, we fall + /// back gracefully instead of propagating the error. + #[test] + fn get_subscriptions_falls_back_to_wildcard_when_export_missing() { + let err: Box = + format!("export function 'get-subscriptions' not found in '{INTERFACE_NAME}'").into(); + let subs = WasmHookBridge::subscriptions_from_result(Err(err)); + assert_eq!(subs.len(), 1, "expected exactly one wildcard subscription"); + assert_eq!(subs[0].0, "*", "event should be wildcard"); + assert_eq!(subs[0].1, 0, "priority should be 0"); + assert_eq!(subs[0].2, "wasm-hook", "name should be wasm-hook"); + } + + /// Any runtime error (not just missing export) also returns the wildcard + /// fallback — we prefer leniency over hard failure during registration. + #[test] + fn get_subscriptions_falls_back_to_wildcard_on_runtime_error() { + let err: Box = + "WASM trap: out of bounds memory access".into(); + let subs = WasmHookBridge::subscriptions_from_result(Err(err)); + assert_eq!(subs.len(), 1, "expected wildcard fallback on runtime error"); + assert_eq!(subs[0].0, "*"); + assert_eq!(subs[0].1, 0); + assert_eq!(subs[0].2, "wasm-hook"); + } + + #[test] + fn deny_hook_get_subscriptions_returns_expected() { + let engine = make_engine(); + let bytes = deny_hook_wasm_bytes(); + let bridge = WasmHookBridge::from_bytes(&bytes, engine).expect("from_bytes should succeed"); + + let config = serde_json::json!({}); + let subs = bridge.get_subscriptions(&config); + + assert_eq!(subs.len(), 1, "deny-hook declares exactly one subscription"); + let (event, priority, name) = &subs[0]; + assert_eq!(event, "tool:pre"); + assert_eq!(*priority, 0); + assert_eq!(name, "deny-all"); + } + #[tokio::test] async fn deny_hook_returns_deny_action() { let engine = make_engine(); diff --git a/crates/amplifier-core/src/coordinator.rs b/crates/amplifier-core/src/coordinator.rs index 9d996b9..0e32a44 100644 --- a/crates/amplifier-core/src/coordinator.rs +++ b/crates/amplifier-core/src/coordinator.rs @@ -27,7 +27,9 @@ use serde_json::Value; use crate::cancellation::CancellationToken; use crate::hooks::HookRegistry; -use crate::traits::{ContextManager, Orchestrator, Provider, Tool}; +use crate::traits::{ + ApprovalProvider, ContextManager, DisplayService, Orchestrator, Provider, Tool, +}; // --------------------------------------------------------------------------- // Type aliases for cleanup and contributor callbacks @@ -77,7 +79,7 @@ pub struct Coordinator { tools: Mutex>>, // -- Subsystems -- - hooks: HookRegistry, + hooks: Arc, cancellation: CancellationToken, // -- Capabilities & contributions -- @@ -90,6 +92,10 @@ pub struct Coordinator { // -- Config -- config: HashMap, + // -- App-layer services -- + approval_provider: Mutex>>, + display_service: Mutex>>, + // -- Turn tracking -- current_turn_injections: Mutex, } @@ -102,12 +108,14 @@ impl Coordinator { context: Mutex::new(None), providers: Mutex::new(HashMap::new()), tools: Mutex::new(HashMap::new()), - hooks: HookRegistry::new(), + hooks: Arc::new(HookRegistry::new()), cancellation: CancellationToken::new(), capabilities: Mutex::new(HashMap::new()), channels: Mutex::new(HashMap::new()), cleanup_functions: Mutex::new(Vec::new()), config, + approval_provider: Mutex::new(None), + display_service: Mutex::new(None), current_turn_injections: Mutex::new(0), } } @@ -210,6 +218,45 @@ impl Coordinator { self.context.lock().unwrap().is_some() } + // -- App-layer service: ApprovalProvider -- + + /// Set the approval provider (single slot). + pub fn set_approval_provider(&self, provider: Arc) { + *self.approval_provider.lock().unwrap() = Some(provider); + } + + /// Clear the approval provider. + pub fn clear_approval_provider(&self) { + *self.approval_provider.lock().unwrap() = None; + } + + /// Get the approval provider, if mounted. + pub fn approval_provider(&self) -> Option> { + self.approval_provider.lock().unwrap().clone() + } + + /// Whether an approval provider is mounted. + pub fn has_approval_provider(&self) -> bool { + self.approval_provider.lock().unwrap().is_some() + } + + // -- App-layer service: DisplayService -- + + /// Set the display service (single slot). + pub fn set_display_service(&self, service: Arc) { + *self.display_service.lock().unwrap() = Some(service); + } + + /// Get the display service, if mounted. + pub fn display_service(&self) -> Option> { + self.display_service.lock().unwrap().clone() + } + + /// Whether a display service is mounted. + pub fn has_display_service(&self) -> bool { + self.display_service.lock().unwrap().is_some() + } + /// Names of all registered capabilities. pub fn capability_names(&self) -> Vec { self.capabilities.lock().unwrap().keys().cloned().collect() @@ -218,7 +265,8 @@ impl Coordinator { /// Return a JSON-compatible dict of all coordinator state for serialization/introspection. /// /// Returns a `HashMap` with keys: `tools`, `providers`, `has_orchestrator`, - /// `has_context`, `capabilities` — matching the universal Coordinator API. + /// `has_context`, `capabilities`, `has_approval_provider`, + /// `has_display_service` — matching the universal Coordinator API. pub fn to_dict(&self) -> HashMap { let mut dict = HashMap::new(); dict.insert("tools".to_string(), serde_json::json!(self.tool_names())); @@ -238,6 +286,14 @@ impl Coordinator { "capabilities".to_string(), serde_json::json!(self.capability_names()), ); + dict.insert( + "has_approval_provider".to_string(), + serde_json::json!(self.has_approval_provider()), + ); + dict.insert( + "has_display_service".to_string(), + serde_json::json!(self.has_display_service()), + ); dict } @@ -248,6 +304,32 @@ impl Coordinator { &self.hooks } + /// Shared ownership of the hook registry. + /// + /// Returns a clone of the `Arc`, enabling binding layers + /// (Node, Go, etc.) to hold long-lived shared references to the same + /// registry instance that the Coordinator uses internally. + /// + /// The existing [`hooks()`](Self::hooks) method continues to return + /// `&HookRegistry` via `Arc::Deref` — all existing call sites are + /// unchanged. + /// + /// # Examples + /// + /// ```rust + /// use std::sync::Arc; + /// use amplifier_core::coordinator::Coordinator; + /// + /// let coord = Coordinator::new_for_test(); + /// let shared: Arc = coord.hooks_shared(); + /// + /// // Both point to the same registry + /// assert_eq!(coord.hooks().list_handlers(None).len(), shared.list_handlers(None).len()); + /// ``` + pub fn hooks_shared(&self) -> Arc { + Arc::clone(&self.hooks) + } + /// Reference to the cancellation token. pub fn cancellation(&self) -> &CancellationToken { &self.cancellation @@ -704,4 +786,108 @@ mod tests { assert_eq!(results.len(), 1); assert_eq!(results[0], serde_json::json!({"key": "value"})); } + + #[tokio::test] + async fn hooks_shared_returns_arc_to_same_registry() { + let coord = Coordinator::new_for_test(); + + // Obtain shared Arc to the hook registry + let shared_hooks = coord.hooks_shared(); + + // Register a handler on the shared clone + let handler = Arc::new(crate::testing::FakeHookHandler::new()); + let _ = shared_hooks.register( + "test:shared", + handler.clone(), + 0, + Some("shared-handler".into()), + ); + + // Emit via the original coordinator's hooks() — the handler MUST fire + // because hooks_shared() returns the same registry, not a copy. + coord + .hooks() + .emit("test:shared", serde_json::json!({"from": "coordinator"})) + .await; + + let events = handler.recorded_events(); + assert_eq!( + events.len(), + 1, + "handler registered on hooks_shared() clone must fire when emitting via hooks()" + ); + assert_eq!(events[0].0, "test:shared"); + } + + // --------------------------------------------------------------- + // ApprovalProvider get/set + // --------------------------------------------------------------- + + #[test] + fn approval_provider_none_initially() { + let coord = Coordinator::new_for_test(); + assert!(coord.approval_provider().is_none()); + } + + #[test] + fn set_and_get_approval_provider() { + let coord = Coordinator::new_for_test(); + let provider = Arc::new(crate::testing::FakeApprovalProvider::approving()); + coord.set_approval_provider(provider); + assert!(coord.approval_provider().is_some()); + } + + #[test] + fn to_dict_includes_has_approval_provider() { + let coord = Coordinator::new_for_test(); + let dict = coord.to_dict(); + assert_eq!(dict["has_approval_provider"], serde_json::json!(false)); + + let provider = Arc::new(crate::testing::FakeApprovalProvider::approving()); + coord.set_approval_provider(provider); + let dict = coord.to_dict(); + assert_eq!(dict["has_approval_provider"], serde_json::json!(true)); + } + + // --------------------------------------------------------------- + // DisplayService get/set + // --------------------------------------------------------------- + + #[test] + fn display_service_none_initially() { + let coord = Coordinator::new_for_test(); + assert!(coord.display_service().is_none()); + } + + #[test] + fn set_and_get_display_service() { + let coord = Coordinator::new_for_test(); + let display = Arc::new(crate::testing::FakeDisplayService::new()); + coord.set_display_service(display); + assert!(coord.display_service().is_some()); + } + + #[tokio::test] + async fn display_service_records_messages() { + let display = Arc::new(crate::testing::FakeDisplayService::new()); + display.show_message("hello", "info", "test").await.unwrap(); + let messages = display.recorded_messages(); + assert_eq!(messages.len(), 1); + assert_eq!( + messages[0], + ("hello".to_string(), "info".to_string(), "test".to_string()) + ); + } + + #[test] + fn to_dict_includes_has_display_service() { + let coord = Coordinator::new_for_test(); + let dict = coord.to_dict(); + assert_eq!(dict["has_display_service"], serde_json::json!(false)); + + let display = Arc::new(crate::testing::FakeDisplayService::new()); + coord.set_display_service(display); + let dict = coord.to_dict(); + assert_eq!(dict["has_display_service"], serde_json::json!(true)); + } } diff --git a/crates/amplifier-core/src/generated/amplifier.module.rs b/crates/amplifier-core/src/generated/amplifier.module.rs index 615c8eb..64363db 100644 --- a/crates/amplifier-core/src/generated/amplifier.module.rs +++ b/crates/amplifier-core/src/generated/amplifier.module.rs @@ -539,6 +539,26 @@ pub struct HookHandleRequest { #[prost(string, tag = "2")] pub data_json: ::prost::alloc::string::String, } +/// GetSubscriptions: allows a hook module to declare which events it handles. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct GetSubscriptionsRequest { + #[prost(string, tag = "1")] + pub config_json: ::prost::alloc::string::String, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct GetSubscriptionsResponse { + #[prost(message, repeated, tag = "1")] + pub subscriptions: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct EventSubscription { + #[prost(string, tag = "1")] + pub event: ::prost::alloc::string::String, + #[prost(int32, tag = "2")] + pub priority: i32, + #[prost(string, tag = "3")] + pub name: ::prost::alloc::string::String, +} #[derive(Clone, PartialEq, ::prost::Message)] pub struct CompleteWithProviderRequest { #[prost(string, tag = "1")] @@ -1850,6 +1870,36 @@ pub mod hook_service_client { .insert(GrpcMethod::new("amplifier.module.HookService", "Handle")); self.inner.unary(req, path, codec).await } + /// Return the event subscriptions this hook wants to receive. + /// The host calls this at mount time and registers the subscriptions itself. + /// A future RegisterHook RPC on KernelService will allow bidirectional + /// registration where the module pushes subscriptions to the kernel. + pub async fn get_subscriptions( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic::codec::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/amplifier.module.HookService/GetSubscriptions", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert( + GrpcMethod::new("amplifier.module.HookService", "GetSubscriptions"), + ); + self.inner.unary(req, path, codec).await + } } } /// Generated client implementations. @@ -3710,6 +3760,17 @@ pub mod hook_service_server { &self, request: tonic::Request, ) -> std::result::Result, tonic::Status>; + /// Return the event subscriptions this hook wants to receive. + /// The host calls this at mount time and registers the subscriptions itself. + /// A future RegisterHook RPC on KernelService will allow bidirectional + /// registration where the module pushes subscriptions to the kernel. + async fn get_subscriptions( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; } /// Hook module contract — event interception. #[derive(Debug)] @@ -3833,6 +3894,51 @@ pub mod hook_service_server { }; Box::pin(fut) } + "/amplifier.module.HookService/GetSubscriptions" => { + #[allow(non_camel_case_types)] + struct GetSubscriptionsSvc(pub Arc); + impl< + T: HookService, + > tonic::server::UnaryService + for GetSubscriptionsSvc { + type Response = super::GetSubscriptionsResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::get_subscriptions(&inner, request).await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = GetSubscriptionsSvc(inner); + let codec = tonic::codec::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } _ => { Box::pin(async move { let mut response = http::Response::new(empty_body()); diff --git a/crates/amplifier-core/src/grpc_server.rs b/crates/amplifier-core/src/grpc_server.rs index fa79723..54d7573 100644 --- a/crates/amplifier-core/src/grpc_server.rs +++ b/crates/amplifier-core/src/grpc_server.rs @@ -577,7 +577,7 @@ mod tests { let coord = Arc::new(Coordinator::new(Default::default())); let handler = Arc::new(FakeHookHandler::new()); - coord + let _ = coord .hooks() .register("test:event", handler.clone(), 0, Some("test-hook".into())); let service = KernelServiceImpl::new(coord); @@ -607,7 +607,7 @@ mod tests { ..Default::default() }; let handler = Arc::new(FakeHookHandler::with_result(deny_result)); - coord + let _ = coord .hooks() .register("test:event", handler, 0, Some("deny-hook".into())); let service = KernelServiceImpl::new(coord); @@ -699,7 +699,7 @@ mod tests { ..Default::default() }; let handler = Arc::new(FakeHookHandler::with_result(result_with_data)); - coord + let _ = coord .hooks() .register("collect:event", handler, 0, Some("data-hook".into())); let service = KernelServiceImpl::new(coord); @@ -740,7 +740,7 @@ mod tests { ..Default::default() }; let handler = Arc::new(FakeHookHandler::with_result(result_with_data)); - coord.hooks().register( + let _ = coord.hooks().register( "multi:event", handler, i as i32, diff --git a/crates/amplifier-core/src/hooks.rs b/crates/amplifier-core/src/hooks.rs index af3e6e6..b1d91ad 100644 --- a/crates/amplifier-core/src/hooks.rs +++ b/crates/amplifier-core/src/hooks.rs @@ -675,8 +675,8 @@ mod tests { // Register low priority first, high priority second -- should execute // high first because lower number = higher priority. - registry.register("test:event", h2, 10, Some("low-priority".into())); - registry.register("test:event", h1, 5, Some("high-priority".into())); + let _ = registry.register("test:event", h2, 10, Some("low-priority".into())); + let _ = registry.register("test:event", h1, 5, Some("high-priority".into())); registry.emit("test:event", serde_json::json!({})).await; let order = log.lock().await; @@ -697,8 +697,8 @@ mod tests { })); let never_called = Arc::new(CountingHandler::new()); - registry.register("test:event", deny_handler, 0, Some("denier".into())); - registry.register( + let _ = registry.register("test:event", deny_handler, 0, Some("denier".into())); + let _ = registry.register( "test:event", never_called.clone(), 10, @@ -730,8 +730,8 @@ mod tests { })); // inject runs first (priority 0), ask runs second (priority 10) - registry.register("test:event", inject, 0, None); - registry.register("test:event", ask, 10, None); + let _ = registry.register("test:event", inject, 0, None); + let _ = registry.register("test:event", ask, 10, None); let result = registry.emit("test:event", serde_json::json!({})).await; assert_eq!(result.action, HookAction::AskUser); @@ -748,7 +748,7 @@ mod tests { key: "added", value: "true", }); - registry.register("test:event", modifier, 0, None); + let _ = registry.register("test:event", modifier, 0, None); let result = registry .emit("test:event", serde_json::json!({"original": true})) @@ -771,8 +771,8 @@ mod tests { value: "2", }); - registry.register("test:event", m1, 0, None); - registry.register("test:event", m2, 10, None); + let _ = registry.register("test:event", m1, 0, None); + let _ = registry.register("test:event", m2, 10, None); let result = registry.emit("test:event", serde_json::json!({})).await; let data = result.data.unwrap(); @@ -798,8 +798,8 @@ mod tests { ..Default::default() })); - registry.register("test:event", i1, 0, None); - registry.register("test:event", i2, 10, None); + let _ = registry.register("test:event", i1, 0, None); + let _ = registry.register("test:event", i2, 10, None); let result = registry.emit("test:event", serde_json::json!({})).await; assert_eq!(result.action, HookAction::InjectContext); @@ -839,7 +839,7 @@ mod tests { })); let capture = Arc::new(CaptureHandler::new()); - registry.register("test:event", capture.clone(), 0, None); + let _ = registry.register("test:event", capture.clone(), 0, None); registry .emit("test:event", serde_json::json!({"custom": true})) @@ -857,7 +857,7 @@ mod tests { })); let capture = Arc::new(CaptureHandler::new()); - registry.register("test:event", capture.clone(), 0, None); + let _ = registry.register("test:event", capture.clone(), 0, None); registry .emit("test:event", serde_json::json!({"key": "override"})) @@ -876,8 +876,8 @@ mod tests { let failing = Arc::new(FailingHandler); let succeeding = Arc::new(CountingHandler::new()); - registry.register("test:event", failing, 0, None); - registry.register("test:event", succeeding.clone(), 10, None); + let _ = registry.register("test:event", failing, 0, None); + let _ = registry.register("test:event", succeeding.clone(), 10, None); let result = registry.emit("test:event", serde_json::json!({})).await; assert_eq!(result.action, HookAction::Continue); @@ -894,8 +894,8 @@ mod tests { let h1 = Arc::new(DataHandler(serde_json::json!("result-1"))); let h2 = Arc::new(DataHandler(serde_json::json!("result-2"))); - registry.register("test:event", h1, 0, None); - registry.register("test:event", h2, 10, None); + let _ = registry.register("test:event", h1, 0, None); + let _ = registry.register("test:event", h2, 10, None); let results = registry .emit_and_collect( @@ -931,8 +931,8 @@ mod tests { ..Default::default() })); - registry.register("test:event", failing, 0, Some("failing_handler".into())); - registry.register("test:event", simple, 10, Some("simple_handler".into())); + let _ = registry.register("test:event", failing, 0, Some("failing_handler".into())); + let _ = registry.register("test:event", simple, 10, Some("simple_handler".into())); let results = registry .emit_and_collect( @@ -956,8 +956,8 @@ mod tests { ..Default::default() })); - registry.register("test:event", slow, 0, Some("slow_handler".into())); - registry.register("test:event", simple, 10, Some("fast_handler".into())); + let _ = registry.register("test:event", slow, 0, Some("slow_handler".into())); + let _ = registry.register("test:event", simple, 10, Some("fast_handler".into())); let results = registry .emit_and_collect( @@ -978,8 +978,8 @@ mod tests { async fn list_handlers_returns_names() { let registry = HookRegistry::new(); let h = Arc::new(SimpleHandler(HookResult::default())); - registry.register("tool:pre", h.clone(), 0, Some("my-hook".into())); - registry.register("tool:post", h, 0, Some("other-hook".into())); + let _ = registry.register("tool:pre", h.clone(), 0, Some("my-hook".into())); + let _ = registry.register("tool:post", h, 0, Some("other-hook".into())); let handlers = registry.list_handlers(None); assert!(handlers.contains_key("tool:pre")); @@ -991,8 +991,8 @@ mod tests { async fn list_handlers_filters_by_event() { let registry = HookRegistry::new(); let h = Arc::new(SimpleHandler(HookResult::default())); - registry.register("tool:pre", h.clone(), 0, Some("my-hook".into())); - registry.register("tool:post", h, 0, Some("other-hook".into())); + let _ = registry.register("tool:pre", h.clone(), 0, Some("my-hook".into())); + let _ = registry.register("tool:post", h, 0, Some("other-hook".into())); let handlers = registry.list_handlers(Some("tool:pre")); assert!(handlers.contains_key("tool:pre")); @@ -1007,7 +1007,7 @@ mod tests { async fn test_emit_stamps_timestamp() { let registry = HookRegistry::new(); let capture = Arc::new(CaptureHandler::new()); - registry.register("test:event", capture.clone(), 0, None); + let _ = registry.register("test:event", capture.clone(), 0, None); registry .emit("test:event", serde_json::json!({"key": "value"})) @@ -1027,7 +1027,7 @@ mod tests { async fn test_emit_timestamp_is_infrastructure_owned() { let registry = HookRegistry::new(); let capture = Arc::new(CaptureHandler::new()); - registry.register("test:event", capture.clone(), 0, None); + let _ = registry.register("test:event", capture.clone(), 0, None); // Caller tries to supply their own timestamp — infrastructure must overwrite it registry @@ -1054,7 +1054,7 @@ mod tests { async fn test_emit_and_collect_does_not_stamp_timestamp() { let registry = HookRegistry::new(); let capture = Arc::new(CaptureHandler::new()); - registry.register("test:event", capture.clone(), 0, None); + let _ = registry.register("test:event", capture.clone(), 0, None); registry .emit_and_collect( @@ -1080,7 +1080,7 @@ mod tests { async fn handlers_only_called_for_registered_event() { let registry = HookRegistry::new(); let counter = Arc::new(CountingHandler::new()); - registry.register("tool:pre", counter.clone(), 0, None); + let _ = registry.register("tool:pre", counter.clone(), 0, None); // Emit a different event registry.emit("tool:post", serde_json::json!({})).await; @@ -1157,7 +1157,7 @@ mod tests { let registry = HookRegistry::new(); let failing = Arc::new(FailingHandler); - registry.register( + let _ = registry.register( "test:log_error_event", failing, 0, @@ -1197,7 +1197,7 @@ mod tests { key: "injected_key", value: "injected_value", }); - registry.register( + let _ = registry.register( "test:modify_no_warn", modifier, 0, diff --git a/crates/amplifier-core/src/module_resolver.rs b/crates/amplifier-core/src/module_resolver.rs index 7fd445b..d051c0c 100644 --- a/crates/amplifier-core/src/module_resolver.rs +++ b/crates/amplifier-core/src/module_resolver.rs @@ -12,7 +12,6 @@ //! 4. Error use std::path::{Path, PathBuf}; -#[cfg(feature = "wasm")] use std::sync::Arc; use crate::models::ModuleType; @@ -419,7 +418,40 @@ pub enum ModuleResolverError { /// Returned by [`load_module`] after dispatch to the appropriate transport bridge. /// The `PythonDelegated` variant is a signal to the Python host that it should /// load the module itself via importlib. -#[cfg(feature = "wasm")] +/// +/// # Examples +/// +/// Dispatch on transport to mount a module on the coordinator: +/// +/// ```rust +/// use amplifier_core::module_resolver::LoadedModule; +/// use amplifier_core::coordinator::Coordinator; +/// use amplifier_core::testing::FakeTool; +/// use std::sync::Arc; +/// +/// let coord = Coordinator::new_for_test(); +/// +/// // Simulate a loaded tool module +/// let loaded = LoadedModule::Tool(Arc::new(FakeTool::new("echo", "echoes"))); +/// +/// match loaded { +/// LoadedModule::Tool(tool) => { +/// let name = tool.name().to_string(); +/// coord.mount_tool(&name, tool); +/// } +/// LoadedModule::Provider(p) => { +/// let name = p.name().to_string(); +/// coord.mount_provider(&name, p); +/// } +/// LoadedModule::PythonDelegated { package_name } => { +/// // Signal to Python host: import this package +/// println!("Python host should import: {package_name}"); +/// } +/// _ => { /* handle other variants */ } +/// } +/// +/// assert_eq!(coord.tool_names(), vec!["echo".to_string()]); +/// ``` pub enum LoadedModule { /// A loaded tool module. Tool(Arc), @@ -440,7 +472,6 @@ pub enum LoadedModule { }, } -#[cfg(feature = "wasm")] impl LoadedModule { /// Returns the variant name as a static string (for diagnostics). pub fn variant_name(&self) -> &'static str { @@ -625,6 +656,7 @@ pub fn scan_for_wasm_file(dir: &Path) -> Option { #[cfg(test)] mod tests { use super::*; + use std::sync::Arc; #[test] fn module_manifest_can_be_constructed() { @@ -1451,4 +1483,55 @@ artifact = "module.wasm" "error should include the expected (wrong) hash: {err_msg}" ); } + + // --- Feature-gate independence tests --- + + /// resolve_module() works for Python packages without the wasm feature. + /// This test runs unconditionally (no #[cfg(feature = "wasm")]). + #[test] + fn resolve_module_python_without_wasm_feature() { + let dir = tempfile::tempdir().expect("create temp dir"); + std::fs::write(dir.path().join("__init__.py"), b"# package").expect("write"); + + let manifest = resolve_module(dir.path()).expect("should resolve Python package"); + assert_eq!(manifest.transport, Transport::Python); + assert_eq!(manifest.module_type, ModuleType::Tool); + } + + /// resolve_module() works for gRPC modules (via amplifier.toml) without the wasm feature. + #[test] + fn resolve_module_grpc_via_toml_without_wasm_feature() { + let dir = tempfile::tempdir().expect("create temp dir"); + let toml_content = r#" +[module] +transport = "grpc" +type = "provider" + +[grpc] +endpoint = "http://localhost:50051" +"#; + std::fs::write(dir.path().join("amplifier.toml"), toml_content).expect("write toml"); + + let manifest = resolve_module(dir.path()).expect("should resolve gRPC module"); + assert_eq!(manifest.transport, Transport::Grpc); + assert_eq!(manifest.module_type, ModuleType::Provider); + match &manifest.artifact { + ModuleArtifact::GrpcEndpoint(ep) => assert_eq!(ep, "http://localhost:50051"), + _ => panic!("expected GrpcEndpoint artifact"), + } + } + + /// LoadedModule variants can be constructed and matched without wasm feature. + #[test] + fn loaded_module_variants_accessible_without_wasm_feature() { + let tool: Arc = + Arc::new(crate::testing::FakeTool::new("test", "test tool")); + let loaded = LoadedModule::Tool(tool); + assert_eq!(loaded.variant_name(), "Tool"); + + let loaded_py = LoadedModule::PythonDelegated { + package_name: "my_module".to_string(), + }; + assert_eq!(loaded_py.variant_name(), "PythonDelegated"); + } } diff --git a/crates/amplifier-core/src/session.rs b/crates/amplifier-core/src/session.rs index 2fa515c..dddba3c 100644 --- a/crates/amplifier-core/src/session.rs +++ b/crates/amplifier-core/src/session.rs @@ -652,7 +652,7 @@ mod tests { // Register a hook handler to capture events let handler = Arc::new(FakeHookHandler::new()); - session.coordinator().hooks().register( + let _ = session.coordinator().hooks().register( events::SESSION_START, handler.clone(), 0, @@ -685,7 +685,7 @@ mod tests { .mount_provider("test", Arc::new(FakeProvider::new("test", "hi"))); let handler = Arc::new(FakeHookHandler::new()); - session.coordinator().hooks().register( + let _ = session.coordinator().hooks().register( events::SESSION_RESUME, handler.clone(), 0, @@ -711,7 +711,7 @@ mod tests { let session = Session::new(config, None, None); let handler = Arc::new(FakeHookHandler::new()); - session.coordinator().hooks().register( + let _ = session.coordinator().hooks().register( events::SESSION_END, handler.clone(), 0, diff --git a/crates/amplifier-core/src/testing.rs b/crates/amplifier-core/src/testing.rs index dc720a6..0a49a9e 100644 --- a/crates/amplifier-core/src/testing.rs +++ b/crates/amplifier-core/src/testing.rs @@ -29,7 +29,9 @@ use serde_json::Value; use crate::errors::{AmplifierError, ContextError, HookError, ProviderError, ToolError}; use crate::messages::{ChatRequest, ChatResponse, ContentBlock, ToolCall, ToolSpec}; use crate::models::{HookResult, ModelInfo, ProviderInfo, ToolResult}; -use crate::traits::{ApprovalProvider, ContextManager, HookHandler, Orchestrator, Provider, Tool}; +use crate::traits::{ + ApprovalProvider, ContextManager, DisplayService, HookHandler, Orchestrator, Provider, Tool, +}; // --------------------------------------------------------------------------- // EchoTool @@ -497,6 +499,49 @@ impl ApprovalProvider for FakeApprovalProvider { } } +// --------------------------------------------------------------------------- +// FakeDisplayService +// --------------------------------------------------------------------------- + +/// A fake display service that records `(message, level, source)` tuples for test assertions. +pub struct FakeDisplayService { + messages: Mutex>, // (message, level, source) +} + +impl FakeDisplayService { + pub fn new() -> Self { + Self { + messages: Mutex::new(Vec::new()), + } + } + + pub fn recorded_messages(&self) -> Vec<(String, String, String)> { + self.messages.lock().unwrap().clone() + } +} + +impl Default for FakeDisplayService { + fn default() -> Self { + Self::new() + } +} + +impl DisplayService for FakeDisplayService { + fn show_message( + &self, + message: &str, + level: &str, + source: &str, + ) -> Pin> + Send + '_>> { + self.messages.lock().unwrap().push(( + message.to_string(), + level.to_string(), + source.to_string(), + )); + Box::pin(async { Ok(()) }) + } +} + // --------------------------------------------------------------------------- // Tests // --------------------------------------------------------------------------- diff --git a/crates/amplifier-core/src/traits.rs b/crates/amplifier-core/src/traits.rs index 39482aa..d2df760 100644 --- a/crates/amplifier-core/src/traits.rs +++ b/crates/amplifier-core/src/traits.rs @@ -18,6 +18,7 @@ //! four primary module types that session/coordinator manages. //! - [`HookHandler`] participates in the hook dispatch pipeline. //! - [`ApprovalProvider`] provides UI-driven approval gates. +//! - [`DisplayService`] provides UI-driven message display. //! //! All data types referenced here are defined in [`crate::models`], //! [`crate::messages`], and [`crate::errors`]. @@ -354,6 +355,23 @@ pub trait HookHandler: Send + Sync { event: &str, data: Value, ) -> Pin> + Send + '_>>; + + /// Return the event subscriptions this hook wants to receive. + /// + /// `config` is the module's JSON configuration (from bundle YAML). The + /// return value is a list of `(event, priority, name)` tuples. + /// + /// # Default behaviour + /// + /// Returns a single wildcard subscription `[("*", 0, "hook")]` so that + /// existing implementors automatically receive every event without + /// needing to override this method. WASM hooks compiled against the + /// current WIT will override this to return their declared subscriptions; + /// old WASM modules without the `get-subscriptions` export fall back to + /// the same wildcard via [`crate::bridges::wasm_hook::WasmHookBridge`]. + fn get_subscriptions(&self, _config: &serde_json::Value) -> Vec<(String, i32, String)> { + vec![("*".to_string(), 0, "hook".to_string())] + } } // --------------------------------------------------------------------------- @@ -395,6 +413,39 @@ pub trait ApprovalProvider: Send + Sync { ) -> Pin> + Send + '_>>; } +// --------------------------------------------------------------------------- +// DisplayService +// --------------------------------------------------------------------------- + +/// Interface for UI components that display messages to the user. +/// +/// When modules or the kernel need to show status messages, warnings, or +/// informational text to the user, they call the registered `DisplayService`. +/// +/// # Object safety +/// +/// This trait is object-safe: `Arc` is the standard storage type. +pub trait DisplayService: Send + Sync { + /// Display a message to the user. + /// + /// # Arguments + /// + /// * `message` — The text to display. + /// * `level` — Severity or kind (e.g. `"info"`, `"warning"`, `"error"`). + /// * `source` — Origin of the message (e.g. module or component name). + /// + /// # Returns + /// + /// `Ok(())` when the message has been presented. + /// `Err(AmplifierError)` on infrastructure failure. + fn show_message( + &self, + message: &str, + level: &str, + source: &str, + ) -> Pin> + Send + '_>>; +} + // --------------------------------------------------------------------------- // Tests // --------------------------------------------------------------------------- @@ -414,5 +465,6 @@ mod tests { fn _assert_context(_: Arc) {} fn _assert_hook(_: Arc) {} fn _assert_approval(_: Arc) {} + fn _assert_display(_: Arc) {} } } diff --git a/crates/amplifier-core/src/transport.rs b/crates/amplifier-core/src/transport.rs index a8da86c..deccb3e 100644 --- a/crates/amplifier-core/src/transport.rs +++ b/crates/amplifier-core/src/transport.rs @@ -2,7 +2,7 @@ use std::sync::Arc; -use crate::traits::{Orchestrator, Tool}; +use crate::traits::{ApprovalProvider, ContextManager, HookHandler, Orchestrator, Provider, Tool}; /// Supported transport types. #[derive(Debug, Clone, PartialEq)] @@ -27,6 +27,10 @@ impl Transport { } /// Load a tool module via gRPC transport. +/// +/// # Arguments +/// +/// * `endpoint` — gRPC endpoint URL (e.g. `"http://localhost:50051"`). pub async fn load_grpc_tool( endpoint: &str, ) -> Result, Box> { @@ -52,6 +56,111 @@ pub async fn load_grpc_orchestrator( Ok(Arc::new(bridge)) } +/// Load a provider module via gRPC transport. +/// +/// Connects to a remote `ProviderService` and returns an `Arc` +/// that is indistinguishable from a local provider. +/// +/// # Arguments +/// +/// * `endpoint` — gRPC endpoint URL (e.g. `"http://localhost:50051"`). +/// +/// # Examples +/// +/// ```rust,no_run +/// # async fn example() -> Result<(), Box> { +/// use amplifier_core::transport::load_grpc_provider; +/// +/// let provider = load_grpc_provider("http://localhost:50051").await?; +/// println!("Connected to provider: {}", provider.name()); +/// # Ok(()) +/// # } +/// ``` +pub async fn load_grpc_provider( + endpoint: &str, +) -> Result, Box> { + let bridge = crate::bridges::grpc_provider::GrpcProviderBridge::connect(endpoint).await?; + Ok(Arc::new(bridge)) +} + +/// Load a hook handler module via gRPC transport. +/// +/// Connects to a remote `HookService` and returns an `Arc` +/// that is indistinguishable from a local hook handler. +/// +/// # Arguments +/// +/// * `endpoint` — gRPC endpoint URL (e.g. `"http://localhost:50051"`). +/// +/// # Examples +/// +/// ```rust,no_run +/// # async fn example() -> Result<(), Box> { +/// use amplifier_core::transport::load_grpc_hook; +/// +/// let hook = load_grpc_hook("http://localhost:50051").await?; +/// # Ok(()) +/// # } +/// ``` +pub async fn load_grpc_hook( + endpoint: &str, +) -> Result, Box> { + let bridge = crate::bridges::grpc_hook::GrpcHookBridge::connect(endpoint).await?; + Ok(Arc::new(bridge)) +} + +/// Load a context manager module via gRPC transport. +/// +/// Connects to a remote `ContextService` and returns an `Arc` +/// that is indistinguishable from a local context manager. +/// +/// # Arguments +/// +/// * `endpoint` — gRPC endpoint URL (e.g. `"http://localhost:50051"`). +/// +/// # Examples +/// +/// ```rust,no_run +/// # async fn example() -> Result<(), Box> { +/// use amplifier_core::transport::load_grpc_context; +/// +/// let context = load_grpc_context("http://localhost:50051").await?; +/// # Ok(()) +/// # } +/// ``` +pub async fn load_grpc_context( + endpoint: &str, +) -> Result, Box> { + let bridge = crate::bridges::grpc_context::GrpcContextBridge::connect(endpoint).await?; + Ok(Arc::new(bridge)) +} + +/// Load an approval provider module via gRPC transport. +/// +/// Connects to a remote `ApprovalService` and returns an `Arc` +/// that is indistinguishable from a local approval provider. +/// +/// # Arguments +/// +/// * `endpoint` — gRPC endpoint URL (e.g. `"http://localhost:50051"`). +/// +/// # Examples +/// +/// ```rust,no_run +/// # async fn example() -> Result<(), Box> { +/// use amplifier_core::transport::load_grpc_approval; +/// +/// let approval = load_grpc_approval("http://localhost:50051").await?; +/// # Ok(()) +/// # } +/// ``` +pub async fn load_grpc_approval( + endpoint: &str, +) -> Result, Box> { + let bridge = crate::bridges::grpc_approval::GrpcApprovalBridge::connect(endpoint).await?; + Ok(Arc::new(bridge)) +} + /// Load a native Rust tool module (zero-overhead, no bridge). pub fn load_native_tool(tool: impl Tool + 'static) -> Arc { Arc::new(tool) @@ -72,7 +181,7 @@ pub fn load_wasm_tool( pub fn load_wasm_hook( wasm_bytes: &[u8], engine: Arc, -) -> Result, Box> { +) -> Result, Box> { let bridge = crate::bridges::wasm_hook::WasmHookBridge::from_bytes(wasm_bytes, engine)?; Ok(Arc::new(bridge)) } @@ -82,7 +191,7 @@ pub fn load_wasm_hook( pub fn load_wasm_context( wasm_bytes: &[u8], engine: Arc, -) -> Result, Box> { +) -> Result, Box> { let bridge = crate::bridges::wasm_context::WasmContextBridge::from_bytes(wasm_bytes, engine)?; Ok(Arc::new(bridge)) } @@ -92,7 +201,7 @@ pub fn load_wasm_context( pub fn load_wasm_approval( wasm_bytes: &[u8], engine: Arc, -) -> Result, Box> { +) -> Result, Box> { let bridge = crate::bridges::wasm_approval::WasmApprovalBridge::from_bytes(wasm_bytes, engine)?; Ok(Arc::new(bridge)) } @@ -102,7 +211,7 @@ pub fn load_wasm_approval( pub fn load_wasm_provider( wasm_bytes: &[u8], engine: Arc, -) -> Result, Box> { +) -> Result, Box> { let bridge = crate::bridges::wasm_provider::WasmProviderBridge::from_bytes(wasm_bytes, engine)?; Ok(Arc::new(bridge)) } @@ -202,4 +311,51 @@ mod tests { let orch = super::load_wasm_orchestrator(&wasm_bytes, engine.inner(), coordinator); assert!(orch.is_ok()); } + + // --------------------------------------------------------------- + // gRPC transport functions — compile-time + type verification + // --------------------------------------------------------------- + + /// Verify load_grpc_provider exists and returns the correct type. + /// Uses a non-listening endpoint so connect() will fail — we only + /// care that the function exists and has the right signature. + #[tokio::test] + async fn load_grpc_provider_returns_result_arc_dyn_provider() { + let result = super::load_grpc_provider("http://[::1]:59001").await; + // Connection to non-listening port should fail + assert!( + result.is_err(), + "expected connection error to non-listening port" + ); + } + + /// Verify load_grpc_hook exists and returns the correct type. + #[tokio::test] + async fn load_grpc_hook_returns_result_arc_dyn_hook_handler() { + let result = super::load_grpc_hook("http://[::1]:59002").await; + assert!( + result.is_err(), + "expected connection error to non-listening port" + ); + } + + /// Verify load_grpc_context exists and returns the correct type. + #[tokio::test] + async fn load_grpc_context_returns_result_arc_dyn_context_manager() { + let result = super::load_grpc_context("http://[::1]:59003").await; + assert!( + result.is_err(), + "expected connection error to non-listening port" + ); + } + + /// Verify load_grpc_approval exists and returns the correct type. + #[tokio::test] + async fn load_grpc_approval_returns_result_arc_dyn_approval_provider() { + let result = super::load_grpc_approval("http://[::1]:59004").await; + assert!( + result.is_err(), + "expected connection error to non-listening port" + ); + } } diff --git a/crates/amplifier-guest/src/lib.rs b/crates/amplifier-guest/src/lib.rs index a89ac95..c738506 100644 --- a/crates/amplifier-guest/src/lib.rs +++ b/crates/amplifier-guest/src/lib.rs @@ -167,6 +167,12 @@ macro_rules! export_tool { pub trait HookHandler { /// Handles a lifecycle event, returning an action the host should take. fn handle(&self, event: &str, data: Value) -> Result; + + /// Returns the events this hook wants to receive, along with priority + /// and a human-readable name for each subscription. + /// + /// `config` is the module's JSON configuration (from bundle YAML). + fn get_subscriptions(&self, config: Value) -> Vec; } /// Exports a [`HookHandler`] implementation as WASM guest entry points. @@ -215,6 +221,20 @@ macro_rules! export_hook { $crate::__macro_support::serde_json::to_vec(&result) .map_err(|e| e.to_string()) } + + fn get_subscriptions(config: ::std::vec::Vec) -> ::std::vec::Vec { + let config_val: $crate::Value = + $crate::__macro_support::serde_json::from_slice(&config) + .unwrap_or($crate::Value::Null); + let subs = <$hook_type as $crate::HookHandler>::get_subscriptions(get_hook(), config_val); + subs.into_iter() + .map(|s| bindings::exports::amplifier::modules::hook_handler::EventSubscription { + event: s.event, + priority: s.priority, + name: s.name, + }) + .collect() + } } #[cfg(target_arch = "wasm32")] @@ -804,6 +824,14 @@ mod hook_handler_tests { _ => Ok(HookResult::default()), } } + + fn get_subscriptions(&self, _config: Value) -> Vec { + vec![EventSubscription { + event: "before_tool".to_string(), + priority: 10, + name: "test-hook".to_string(), + }] + } } #[test] @@ -832,6 +860,36 @@ mod hook_handler_tests { let hr = result.unwrap(); assert_eq!(hr.action, HookAction::Continue); } + + #[test] + fn test_hook_handler_get_subscriptions() { + let hook = TestHook; + let config = json!({"enabled": true}); + let subs = hook.get_subscriptions(config); + assert_eq!(subs.len(), 1); + assert_eq!(subs[0].event, "before_tool"); + assert_eq!(subs[0].priority, 10); + assert_eq!(subs[0].name, "test-hook"); + } + + #[test] + fn test_hook_handler_get_subscriptions_empty() { + #[derive(Default)] + struct EmptySubsHook; + + impl HookHandler for EmptySubsHook { + fn handle(&self, _event: &str, _data: Value) -> Result { + Ok(HookResult::default()) + } + fn get_subscriptions(&self, _config: Value) -> Vec { + vec![] + } + } + + let hook = EmptySubsHook; + let subs = hook.get_subscriptions(json!({})); + assert!(subs.is_empty()); + } } #[cfg(test)] @@ -846,6 +904,10 @@ mod hook_macro_tests { fn handle(&self, _event: &str, _data: Value) -> Result { Ok(HookResult::default()) } + + fn get_subscriptions(&self, _config: Value) -> Vec { + vec![] + } } export_hook!(MacroTestHook); diff --git a/crates/amplifier-guest/src/types.rs b/crates/amplifier-guest/src/types.rs index 1f98560..ada17d4 100644 --- a/crates/amplifier-guest/src/types.rs +++ b/crates/amplifier-guest/src/types.rs @@ -36,6 +36,14 @@ impl Default for ToolResult { } } +/// A subscription declaring which event a hook wants to receive. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct EventSubscription { + pub event: String, + pub priority: i32, + pub name: String, +} + /// Action a hook handler can take in response to a lifecycle event. #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] #[serde(rename_all = "snake_case")] @@ -636,6 +644,47 @@ mod tests { assert_eq!(deserialized, original); } + // --- EventSubscription tests --- + + #[test] + fn test_event_subscription_creation() { + let sub = EventSubscription { + event: "before_tool".to_string(), + priority: 10, + name: "my-hook".to_string(), + }; + assert_eq!(sub.event, "before_tool"); + assert_eq!(sub.priority, 10); + assert_eq!(sub.name, "my-hook"); + } + + #[test] + fn test_event_subscription_serde_roundtrip() { + let sub = EventSubscription { + event: "after_tool".to_string(), + priority: -5, + name: "cleanup-hook".to_string(), + }; + let json_str = serde_json::to_string(&sub).unwrap(); + let deserialized: EventSubscription = serde_json::from_str(&json_str).unwrap(); + assert_eq!(deserialized.event, "after_tool"); + assert_eq!(deserialized.priority, -5); + assert_eq!(deserialized.name, "cleanup-hook"); + } + + #[test] + fn test_event_subscription_clone() { + let sub = EventSubscription { + event: "before_completion".to_string(), + priority: 0, + name: "observer".to_string(), + }; + let cloned = sub.clone(); + assert_eq!(cloned.event, sub.event); + assert_eq!(cloned.priority, sub.priority); + assert_eq!(cloned.name, sub.name); + } + // --- Re-export test --- #[test] diff --git a/docs/plans/2026-03-09-session-init-polyglot-dispatch-design.md b/docs/plans/2026-03-09-session-init-polyglot-dispatch-design.md new file mode 100644 index 0000000..27a37fc --- /dev/null +++ b/docs/plans/2026-03-09-session-init-polyglot-dispatch-design.md @@ -0,0 +1,193 @@ +# Python Session Init Polyglot Dispatch + +> Make WASM and gRPC modules loadable from bundle config in the Python host, so bundle authors can declare polyglot modules alongside Python ones and session init auto-detects and loads them. + +**Status:** Approved +**Date:** 2026-03-09 +**Prerequisites:** Unified Module Storage & Arc Sharing Fix, Phase 3 (WASM loading), Phase 4 (module resolver) + +--- + +## 1. Goal + +Make WASM and gRPC modules loadable from bundle config in the Python host, so bundle authors can declare polyglot modules alongside Python ones and session init auto-detects and loads them. Also provide an explicit Python API for loading specific WASM modules into a running session (already works via `load_and_mount_wasm(coordinator, path)`). + +--- + +## 2. Background + +The Python host has all the pieces for polyglot module loading but they aren't wired together: + +- `_session_init.py` loads all modules via `loader.load()` at 5 call sites (orchestrator, context, providers, tools, hooks) — Python-only today +- `loader_dispatch.py` exists as a complete polyglot router but is orphaned — nothing calls it in production +- Rust `resolve_module()` (transport detection) and `load_and_mount_wasm()` (WASM loading + coordinator mounting) are already exposed to Python via PyO3 +- `session.py:AmplifierSession.initialize()` and `_session_init.py:initialize_session()` contain ~200 lines of near-identical module loading logic + +The previous attempt to wire `loader_dispatch.py` into `_session_init.py` was reverted (PR #39) due to bugs: dict `source_hint` TypeError crash, SESSION_FORK events silently dropped, untested Rust FFI on critical path. This design takes a fundamentally different approach. + +`amplifier-core` always ships as compiled wheels with Rust extensions — no pure-Python install path exists. + +--- + +## 3. Architecture: Absorb Dispatch Into the Loader + +The key architectural insight: `loader_dispatch.py` was at the WRONG abstraction boundary. It sat between two interfaces that don't agree on types (`source_hint` opaque URI vs `source_path` resolved filesystem path). The right integration point is INSIDE `loader.py` at the exact moment where a `source_hint` has already been resolved to a filesystem path, but before Python importlib loading. + +``` +┌─────────────────────────────────────────────────────────┐ +│ _session_init.py / session.py │ ← Transport-unaware +│ loader.load(module_id, config, source_hint, coordinator)│ (unchanged API + coordinator param) +└──────────────────────────┬──────────────────────────────┘ + │ + ▼ +┌─────────────────────────────────────────────────────────┐ +│ loader.py:ModuleLoader._resolve_source() │ ← URI → path (Python policy) +│ ModuleSourceResolver.resolve(source_hint) │ via mountable resolver module +└──────────────────────────┬──────────────────────────────┘ + │ filesystem path + ▼ +┌─────────────────────────────────────────────────────────┐ +│ Rust resolve_module(path) [via PyO3] │ ← path → transport (Rust mechanism) +│ Returns: {transport, module_type, artifact} │ single source of truth +└──────────┬───────────┬──────────┬───────────────────────┘ + │ │ │ + python wasm grpc + │ │ │ + ▼ ▼ ▼ +┌──────────────┐ ┌──────────┐ ┌──────────────┐ +│ _load_entry │ │ load_and │ │ load_grpc │ +│ _point() / │ │ _mount │ │ _module() │ +│ _filesystem()│ │ _wasm() │ │ │ +│ [Python] │ │ [Rust] │ │ [Python+Rust]│ +└──────────────┘ └──────────┘ └──────────────┘ +``` + +Transport is invisible to `_session_init.py` — it calls `loader.load()` the same way for all module types. The loader handles dispatch internally, after source resolution. + +--- + +## 4. Components + +### 4.1 Deduplicate Session Init + +`session.py:AmplifierSession.initialize()` delegates to `_session_init.initialize_session()`. Eliminates ~200 lines of duplicated loading logic. The pure-Python `AmplifierSession` and the Rust `PySession` both call the same function. + +```python +# session.py:AmplifierSession.initialize() +async def initialize(self) -> None: + if self._initialized: + return + from ._session_init import initialize_session + await initialize_session( + self.config, self.coordinator, self.session_id, self.parent_id + ) + self._initialized = True +``` + +**What gets deleted:** ~200 lines of duplicated loading logic in `session.py` (config parsing, load loops for all 5 module types, multi-instance provider remapping, SESSION_FORK emission, `_safe_exception_str` helper). + +**What stays:** `_session_init.initialize_session()` becomes the single implementation. Its signature and behavior are unchanged. + +### 4.2 Add Transport Dispatch Inside `loader.py` + +After `ModuleSourceResolver` resolves a `source_hint` to a filesystem path, but before the Python importlib loading, call Rust `resolve_module()` to detect transport and branch. + +The logic (~15 lines inside `loader.py:ModuleLoader.load()`): + +```python +# After: module_path = await resolver.resolve(...) +# Before: existing _load_entry_point / _load_filesystem + +from amplifier_core._engine import resolve_module + +manifest = resolve_module(str(module_path)) +transport = manifest.get("transport", "python") + +if transport == "wasm": + return self._make_wasm_mount(module_path, coordinator) +elif transport == "grpc": + return self._make_grpc_mount(module_path, config, coordinator) +# else: fall through to existing Python loading (unchanged) +``` + +**Key design decisions:** + +- **Rust `resolve_module()` is the single source of truth** for transport detection — no Python reimplementation +- **No `try/except ImportError` fallback** — Rust extensions always ship in wheels, there is no pure-Python install path +- **The existing Python loading path is the `else` branch** — zero changes to how Python modules load today +- **`loader.load()` gains `coordinator=None`** — backward compatible, existing callers that don't pass it work unchanged +- **`_session_init.py` passes `coordinator=coordinator`** at its 5 call sites — this is the only change to session init, which stays transport-unaware + +### 4.3 Delete `loader_dispatch.py` + +`loader_dispatch.py` (131 lines) gets deleted. It has three fundamental problems that can't be fixed incrementally: + +1. **Interface mismatch** — expects `source_path` (resolved filesystem path) but callers have `source_hint` (opaque URI). This caused the dict `source_hint` TypeError crash in PR #39. +2. **Duplicates Rust logic** — Python `_detect_transport()` and `_read_module_meta()` reimplement what Rust `resolve_module()` already does (with WASM introspection, security checks, SHA-256 verification). +3. **Violates CORE_DEVELOPMENT_PRINCIPLES §5** — "Don't duplicate logic across languages." + +Its transport routing logic moves into `_make_wasm_mount` and `_make_grpc_mount` helpers on `ModuleLoader`. Its tests (`test_loader_dispatch_wasm.py`) get refactored to test the new dispatch path inside `loader.load()` — same test logic, different entry point. + +`loader_grpc.py` stays — it contains the actual `GrpcToolBridge` implementation that speaks proto. + +### 4.4 Handle All 6 WASM Module Types + +`load_and_mount_wasm()` in the Rust PyO3 bindings currently auto-mounts tools only (wraps in `PyWasmTool`, puts into `mount_points["tools"]`). For all other module types, it returns `status: "loaded"` without mounting. + +Extend `load_and_mount_wasm()` to auto-mount all 6 module types. New `PyWasm*` wrappers, each implementing the corresponding Python Protocol: + +| Module Type | Rust Bridge | Python Wrapper | Mount Target | +|-------------|-------------|----------------|-------------| +| Tool | `Arc` | `PyWasmTool` (exists) | `mount_points["tools"]` | +| Hook | `Arc` | `PyWasmHook` (new) | `coordinator.hooks.register()` | +| Provider | `Arc` | `PyWasmProvider` (new) | `mount_points["providers"]` | +| Context | `Arc` | `PyWasmContext` (new) | `mount_points["context"]` | +| Orchestrator | `Arc` | `PyWasmOrchestrator` (new) | `mount_points["orchestrator"]` | +| Approval | `Arc` | `PyWasmApproval` (new) | Not stored in coordinator (Python-side concern) | + +Each `PyWasm*` wrapper follows the same pattern as `PyWasmTool`: holds the `Arc`, exposes the Python Protocol methods (sync or async via `pyo3-async-runtimes`), and mounts into the coordinator's `mount_points` dict. + +The `_make_wasm_mount` helper in `loader.py` then just calls `load_and_mount_wasm(coordinator, path)` and returns a cleanup function — Rust handles all the wrapping and mounting. + +### 4.5 Documentation Strategy — Docstrings as Source of Truth + +Same principle as the unified module storage design — no API usage examples in design docs that rot. + +**What lives in code:** +- `/// # Examples` doc-tests on new Rust `PyWasm*` types +- Python docstrings on `loader.load()`'s new `coordinator` parameter +- Python docstrings on `_make_wasm_mount` and `_make_grpc_mount` helpers + +**What this design doc covers:** +- Why `loader_dispatch.py` was deleted (wrong layer, duplicated Rust logic) +- The transport dispatch architecture (Rust `resolve_module()` as single source of truth) +- The `session.py` → `_session_init.py` deduplication decision +- The 6 `PyWasm*` wrapper types and their Python Protocol conformance + +--- + +## 5. Python Backward Compatibility + +- `loader.load()` gains `coordinator=None` — existing callers that don't pass it work unchanged +- `_session_init.py` continues to call `loader.load()` — just passes `coordinator` as a new keyword arg +- The Python loading path is the default `else` branch — zero behavior changes for Python modules +- `session.py:initialize()` delegates to the same `_session_init.initialize_session()` it was already near-duplicating + +--- + +## 6. Rejected Alternative + +**Wiring `loader_dispatch.py` into `_session_init.py`** — This was the previous approach (reverted from PR #39). Rejected because: + +1. **Wrong abstraction boundary** — `_session_init` works with `source_hint` (opaque URI), `loader_dispatch` expects `source_path` (resolved filesystem path). Interface mismatch caused the dict `source_hint` TypeError crash. +2. **Transport leaks into session init** — violates CORE_DEVELOPMENT_PRINCIPLES §8: "Transport is invisible to developers." +3. **Duplicates Rust logic in Python** — `_detect_transport()` and `_read_module_meta()` reimplement `resolve_module()`. +4. **Two integration surfaces** — both `session.py` and `_session_init.py` would need wiring (vs. one change inside `loader.py`). + +--- + +## 7. Open Questions / Future Work + +1. **Non-tool WASM cleanup functions** — Do WASM hooks/providers/context/orchestrators need cleanup? `PyWasmTool` returns no cleanup fn. If WASM modules hold resources (gRPC connections, file handles), cleanup may be needed. +2. **WASM module hot-reload** — Future TODO #6. Not part of this design. +3. **gRPC adapter for Python-from-Rust-host** — Edge-layer project documented in the unified module storage design. Not part of this design. diff --git a/docs/plans/2026-03-09-unified-module-storage-design.md b/docs/plans/2026-03-09-unified-module-storage-design.md new file mode 100644 index 0000000..218c1d5 --- /dev/null +++ b/docs/plans/2026-03-09-unified-module-storage-design.md @@ -0,0 +1,214 @@ +# Unified Module Storage & Arc Sharing Fix + +> Make Rust the single source of truth for module storage across all non-Python transports, fix Arc sharing for binding layers, and establish the Python-from-Rust-host pattern. + +**Status:** Approved +**Date:** 2026-03-09 +**Prerequisites:** PR #39 (medium-priority fixes), Phase 2 (Napi-RS bindings), Phase 3 (WASM loading), Phase 4 (module resolver) + +--- + +## 1. Goal + +Make Rust the single source of truth for module storage across all non-Python transports, fix the Arc sharing problem for binding layers, and establish the architectural pattern for loading Python modules from non-Python hosts — all while preserving 100% backward compatibility for the existing Python ecosystem. + +--- + +## 2. Background + +The Rust `Coordinator` stores modules in typed fields (`tools: Mutex>>`, etc.) while the Python bindings maintain a parallel `mount_points: Py` for Python module dispatch. Two problems block non-Python hosts from using the kernel effectively: + +1. **Arc sharing is broken.** `HookRegistry` is owned by value inside `Coordinator`, so binding layers (Node, Go, etc.) can't obtain shared ownership. The Node bindings work around this by creating disconnected copies on each getter call — a fundamentally broken pattern. + +2. **gRPC transport is incomplete.** Only `load_grpc_tool()` and `load_grpc_orchestrator()` exist. The other four module types (provider, hook, context, approval) have no gRPC loading path, blocking polyglot module bundles. + +3. **Module resolver is locked behind `wasm` feature.** A Rust host wanting only gRPC + native modules must pull in the entire wasmtime dependency chain just to access `resolve_module()`. + +4. **No documented pattern for Python-from-Rust.** The gRPC bridges already solve this, but no architectural guidance exists for non-Python hosts encountering Python modules. + +--- + +## 3. Backward Compatibility Constraint + +**Python backward compat is sacred and non-negotiable.** The following contract is preserved unchanged: + +- `coordinator.mount_points` — mutable dict with 6 keys, has a setter for wholesale replacement +- `coordinator.mount_points["tools"]["name"] = obj` — direct dict mutation +- `coordinator.get("providers")` / `coordinator.get("tools")` — returns typed dicts +- `coordinator.mount(point, module, name=)` / `coordinator.unmount(point, name=)` — async mount/unmount +- `coordinator.hooks` property, `coordinator.hooks.register(event, handler)` +- All community module patterns (anthropic provider, shell-hook, approval hooks) + +**No backward compat needed for non-Python bindings** — nobody is using Node, Go, C#, or C++ bindings yet. Retcon freely to the correct final shape. + +--- + +## 4. Architecture: Two Clean Storage Paths + +The design explicitly maintains two independent, non-overlapping storage paths: + +``` +┌─────────────────────────────────────────────────────────┐ +│ Rust Typed Storage │ +│ HashMap> │ +│ HashMap> │ +│ ... │ +│ Serves: Rust-native, WASM, gRPC, future Go/C#/C++ │ +├─────────────────────────────────────────────────────────┤ +│ Python mount_points Dict │ +│ PyDict with 6 keys, dict protocol semantics │ +│ Serves: existing Python ecosystem (unchanged) │ +└─────────────────────────────────────────────────────────┘ +``` + +No module is mounted in both simultaneously in production. The Coordinator is transport-agnostic — `Arc` is `Arc` whether the module is native Rust, WASM, or gRPC. + +**Why not unify?** The Python `mount_points` dict is a deeply entrenched de facto public API with dict protocol semantics, direct mutation, identity guarantees, and wholesale replacement. Migrating it to Rust would break the Python ecosystem for no runtime benefit, and the "bridge sandwich" (Python→Rust→Python) for the Orchestrator trait is a showstopper. + +--- + +## 5. Components + +### 5.1 `Arc` in Coordinator + +**Change:** In `coordinator.rs`, change `hooks: HookRegistry` to `hooks: Arc`. + +**What changes:** +- Constructor wraps in `Arc::new(HookRegistry::new())` — 1 line +- New accessor: `hooks_shared(&self) -> Arc` — clones the Arc for shared ownership + +**What doesn't change:** +- Existing `hooks(&self) -> &HookRegistry` accessor works unchanged via `Arc::Deref` +- All ~16 existing call sites use `&HookRegistry` — zero source changes +- Python bindings unaffected (they create their own HookRegistry) +- HookRegistry internals already use `Arc>` — outer Arc is consistent + +### 5.2 Fix Node Bindings + +Delete factory methods, replace with getters that share the real instances. + +**The fix:** +- `JsAmplifierSession.coordinator` — getter returning `JsCoordinator` wrapping the Session's real `Arc` via `coordinator_shared()` +- `JsCoordinator.hooks` — getter returning `JsHookRegistry` wrapping the real `Arc` via `hooks_shared()` +- Delete `create_coordinator()` and `create_hook_registry()` factory methods +- Delete all "Future TODO #1" workaround comments and warning log messages +- Delete cached-config reconstruction logic + +**Pattern for all future bindings** (Go, C#, etc.): getters that share the real Arc, never factory methods that create disconnected copies. + +### 5.3 Complete gRPC Transport Symmetry + +**The gap:** `transport.rs` has `load_grpc_tool()` and `load_grpc_orchestrator()` but is missing four module types. + +**Add:** +- `pub async fn load_grpc_provider(endpoint: &str) -> Result>` +- `pub async fn load_grpc_hook(endpoint: &str) -> Result>` +- `pub async fn load_grpc_context(endpoint: &str) -> Result>` +- `pub async fn load_grpc_approval(endpoint: &str) -> Result>` + +Each is ~3-5 lines delegating to the corresponding `GrpcXxxBridge::connect()`. Completes the transport surface so any host language can load any module type over gRPC. + +### 5.4 Decouple `LoadedModule` from `wasm` Feature Gate + +**Split into feature-gated tiers:** + +**Always available (no feature gate):** +- `resolve_module()` — detects transport type from path +- `ModuleManifest`, `ModuleArtifact`, `Transport` types +- `LoadedModule` variants for all module types +- gRPC and native loading paths + +**Behind `#[cfg(feature = "wasm")]` only:** +- WASM component metadata parsing +- `load_wasm_*` functions +- wasmtime `Engine` parameter on `load_module()` +- WASM-specific detection in `resolve_module()` (`.wasm` file scanning) + +**Result:** `cargo add amplifier-core` (no features) gives access to `resolve_module()` → `load_grpc_provider()` for polyglot loading. Add `features = ["wasm"]` only when WASM module loading is needed. + +### 5.5 Documentation Strategy — Docstrings Are the Source of Truth + +**Principle:** No API usage examples in design docs or prose markdown. Per Context Poisoning prevention principles, each concept is documented in exactly ONE place. + +For API usage, that place is **Rust `/// # Examples` doc-test blocks**: +- Compiled and tested by `cargo test` — drift caught as compile failures +- Surfaced by LSP hover via rust-analyzer +- Surfaced by `cargo doc` for browsable HTML +- Single source of truth — no separate markdown to keep in sync + +**This design doc covers:** Architectural decisions and rationale only. + +**Implementation tasks will include:** Adding/updating doc-tests on `hooks_shared()`, the 4 new transport functions, `resolve_module()`, and `LoadedModule` dispatch patterns. + +**For binding layers** (Node, future Go/C#): Each binding's README gets a single quick-start example, but authoritative API docs are generated from the binding code itself (TypeScript `.d.ts` types, Go godoc, etc.). + +--- + +## 6. Python-from-Rust-Host Pattern + +When a non-Python host encounters a Python module: + +``` +resolve_module(path) Host Policy Rust Kernel + → Transport::Python → spawn gRPC adapter → load_grpc_provider() + + package name (host decides how) → Arc +``` + +**The dispatch rule:** The resolver returns `Transport::Python` with the package name. It does NOT spawn processes or manage adapters. The resolver detects; the host decides. + +**The adapter contract:** A future `amplifier-grpc-adapter` Python package (~200-400 lines) wraps any Python module as a gRPC service using the existing proto contracts. Not part of this design — documented as the intended edge-layer pattern. + +**Why gRPC, not embedded Python (PyO3):** +- Full Python isolation (own process, own GIL) +- All 6 gRPC bridges already exist and work +- No GIL contention across modules +- No interpreter lifecycle management in the kernel (violates "mechanism not policy") +- Works for ANY host language, not just Rust + +**Why NOT the kernel's responsibility:** Process spawning is policy. The kernel provides gRPC bridge mechanism; the host decides when/how to spawn adapters. Different deployments could use different strategies (containerized, Lambda, sidecar). + +--- + +## 7. Universal API Shape + +After this design, every language follows the same pattern: + +1. Create `AmplifierSession` from config +2. Get `coordinator` (shared via Arc, not copied) +3. Mount modules via typed methods +4. Get `hooks` from coordinator (shared via Arc, not copied) +5. Call `execute()` + +For polyglot bundles, the host dispatches on `Transport` from the module resolver: + +| Transport | Action | +|-----------|--------| +| `Native` | Direct `Arc` | +| `Wasm` | `load_wasm_*()` functions | +| `Grpc` | `load_grpc_*()` functions | +| `Python` (non-Python host) | Spawn gRPC adapter, then `load_grpc_*()` | +| `Python` (Python host) | Existing Python import path, unchanged | + +--- + +## 8. Rejected Alternatives + +1. **Full Rust storage unification** — Migrate Python `mount_points` to Rust. Rejected: `mount_points` is a deeply entrenched de facto public API. The bridge sandwich (Python→Rust→Python) for the Orchestrator trait is a showstopper, and the backward compat risk is critical. + +2. **Embedded Python from Rust host (PyO3)** — Rejected: puts interpreter lifecycle management (policy) in the kernel, creates GIL contention, requires 5 new complex bridge types when gRPC already works. + +3. **Python modules via WASM compilation** — Immediately disqualified. Python community modules use C extensions (httpx, aiohttp), asyncio, and filesystem access — none viable in WASM. + +4. **`Arc` trait abstraction** — YAGNI. Typed `HashMap` fields on Coordinator are simple, correct, and sufficient. + +--- + +## 9. Open Questions / Future Work + +1. **Python gRPC adapter** — `amplifier-grpc-adapter` Python package (~200-400 lines). Edge-layer project for a future sprint. + +2. **Unifying Python and Rust HookRegistries** — Currently Python creates its own HookRegistry independent of the Coordinator's. Could be unified so hooks registered from Rust are visible to Python and vice versa. Separate decision. + +3. **`process_hook_result` stays in Python** — Every branch routes to Python subsystems (context manager, approval system, display system). If Rust consumers need hook result processing, build a parallel Rust implementation. + +4. **Go/C#/C++ native bindings** — The Arc sharing fix and gRPC symmetry completion prepare the architecture. Binding design is future work. diff --git a/docs/plans/2026-03-10-app-layer-services-design.md b/docs/plans/2026-03-10-app-layer-services-design.md new file mode 100644 index 0000000..9c9d518 --- /dev/null +++ b/docs/plans/2026-03-10-app-layer-services-design.md @@ -0,0 +1,204 @@ +# App-Layer Services & Review Fixes + +> Wire `ApprovalProvider` and `DisplayService` as cross-language Rust traits on the Coordinator, fix WASM hook registration with `get-subscriptions`, and address remaining code review findings. + +**Status:** Approved +**Date:** 2026-03-10 +**Prerequisites:** Phase 3 (WASM loading), Phase 4 (module resolver), Session Init Polyglot Dispatch + +--- + +## 1. Goal + +Enable non-Python hosts to provide app-layer services (approval, display) through the Rust Coordinator while preserving 100% Python backward compatibility. Fix WASM hook registration and address code review findings from the polyglot dispatch work. + +--- + +## 2. Background + +amplifier-core has a Rust kernel with Python bindings (PyO3) and Node bindings (Napi-RS). Four app-layer services exist today but only as Python-side concerns: `ApprovalSystem`, `DisplaySystem`, `ModuleLoader`, and `ModuleSourceResolver`. The Rust Coordinator has no awareness of any of them — they're all `Py` blobs in the Python bindings. + +`ApprovalProvider` already exists as a Rust trait in `traits.rs` with gRPC and WASM bridges, but is not wired to the Coordinator struct. `DisplayService` has no Rust representation at all. + +Two independent reviewers confirmed: only `ApprovalProvider` and `DisplayService` belong as kernel traits. `ModuleLoader` and `SourceResolver` are foundation/app-layer concerns — inherently language-specific, involve file I/O, and violate kernel principles. + +Additionally, WASM hook modules are silently dropped at mount time (no registration mechanism), engine errors are swallowed at debug level, `PyWasmOrchestrator` silently discards 5 of 6 parameters without documentation, and `_safe_exception_str` is duplicated across two files. + +--- + +## 3. Architecture Decision: What Goes Where + +The kernel defines traits for services the kernel **dispatches through** during its core coordination lifecycle: + +| Service | Kernel dispatches through it? | Verdict | +|---------|-------------------------------|---------| +| `ApprovalProvider` | Yes — hook pipeline calls `request_approval()` when hook returns `ask_user` | **Kernel trait** | +| `DisplayService` | Yes — hook pipeline calls `show_message()` when hook returns `user_message` | **Kernel trait** | +| `ModuleLoader` | No — app layer calls it during init | **Foundation/app layer** | +| `SourceResolver` | No — `ModuleLoader` calls it | **Foundation/app layer** | + +The load loop (`_session_init.py`) is pure policy — module ordering, error handling, multi-instance remapping are all decisions two teams could disagree on. It stays in foundation/app layer. + +Per-language SDKs are premature — wait for ≥2 non-Python apps to prove the need. + +--- + +## 4. Components + +### 4.1 Wire `ApprovalProvider` to Coordinator + +The `ApprovalProvider` Rust trait already exists in `traits.rs` with gRPC and WASM bridges. The Rust `Coordinator` struct has no field for it — only the Python `PyCoordinator` has `approval_system_obj: Py`. + +Add to the Rust `Coordinator`, following the same pattern as `orchestrator` and `context` (single-slot, Option): + +- Field: `approval_provider: Mutex>>` +- Accessor: `set_approval_provider(Arc)` +- Accessor: `approval_provider() -> Option>` + +Add `PyApprovalProviderBridge` in the Python bindings — wraps the Python `ApprovalSystem` object and implements the Rust `ApprovalProvider` trait, following the exact pattern of `PyHookHandlerBridge`. When the Python app provides an `approval_system`, the PyO3 layer wraps it and sets it on the Rust Coordinator. + +**Note on dual Python protocols:** Two Python approval protocols exist — `ApprovalSystem` (simple: prompt, options, timeout, default → string) in `approval.py` and `ApprovalProvider` (typed: ApprovalRequest → ApprovalResponse) in `interfaces.py`. The Rust trait matches `ApprovalProvider`. The PyO3 bridge wraps the simpler `ApprovalSystem` by adapting between the two interfaces. + +**Python backward compat:** The `coordinator.approval_system` property still works — PyO3 bridge wraps it to the Rust trait. + +### 4.2 Add `DisplayService` Trait + +`DisplayService` exists only as a Python Protocol (`display.py`) with one method: `show_message(message, level, source)`. The Rust Coordinator has no awareness of it. It's consumed by `process_hook_result` when a hook returns `action: "user_message"`. + +Add a `DisplayService` trait to `traits.rs`: + +```rust +pub trait DisplayService: Send + Sync { + fn show_message( + &self, + message: &str, + level: &str, // "info", "warning", "error" + source: &str, // e.g. "hook", "system" + ) -> Pin> + Send + '_>>; +} +``` + +Add to Coordinator: + +- Field: `display_service: Mutex>>` +- Accessor: `set_display_service(Arc)` +- Accessor: `display_service() -> Option>` + +Add `PyDisplayServiceBridge` in the Python bindings — wraps the Python `DisplaySystem` object and implements the Rust trait. ~30 lines, simplest possible bridge. Display is fire-and-forget with a fallback to `logger.info()` if the service isn't set. + +**What this enables:** A Rust or Node host can provide its own display implementation (WebSocketDisplay, StdoutDisplay, etc.) and hook results with `user_message` action reach it through the kernel's dispatch. + +**Python backward compat:** The `coordinator.display_system` property still works — same bridge pattern. + +### 4.3 Fix C1 — WASM Hook Registration via `get-subscriptions` + +WASM hook modules loaded via `_make_wasm_mount` are silently dropped — the `PyWasmHook` wrapper is created but never registered with `coordinator.hooks`. There's no mechanism for a WASM hook to declare which events it handles. + +**Part A: Add `get-subscriptions` to the WIT hook interface:** + +```wit +interface hook-handler { + handle: func(event: list) -> result, string>; + get-subscriptions: func(config: list) -> list; +} + +record event-subscription { + event: string, + priority: s32, + name: string, +} +``` + +The guest SDK (`amplifier-guest`) gets a corresponding Rust trait method: + +```rust +pub trait HookHandler { + fn handle(&self, event: &str, data: Value) -> Result; + fn get_subscriptions(&self, config: Value) -> Vec; +} +``` + +**Part B: Host calls `get-subscriptions` at mount time:** + +In `load_and_mount_wasm()` for hook modules, after loading the WASM binary: + +1. Call `get-subscriptions(config)` on the guest +2. For each returned subscription, create a proxy `WasmHookBridge` handler and call `coordinator.hooks.register(event, handler, priority, name)` +3. Collect unregister functions, return cleanup + +The `_make_wasm_mount` closure in `loader.py` then handles hooks correctly — `load_and_mount_wasm()` returns `status: "mounted"` with the registrations done. + +**Same pattern for gRPC:** Add a `GetSubscriptions` RPC to the gRPC `HookService` so gRPC hooks can self-describe their subscriptions. + +**Future bidirectional path (comment only):** If hooks need to read coordinator state during registration (e.g., conditionally subscribe based on mounted providers), a `register-hook` function can be added to the `kernel-service` host import interface, enabling imperative registration matching the Python `coordinator.hooks.register()` pattern. Document this in the WIT file and host implementation as a comment. + +### 4.4 Fix I1 — Promote Engine Errors to Warning + +In `loader.py`'s transport dispatch, `except Exception as engine_err` logs at `debug` level. A real `resolve_module()` failure (corrupt `amplifier.toml`, wrong permissions, Rust engine bug) is silently swallowed and the module falls through to the Python loader with a misleading error. + +Change from `logger.debug(...)` to `logger.warning(...)`: + +```python +except Exception as engine_err: + logger.warning( + f"resolve_module failed for '{module_id}': {engine_err}, " + "falling through to Python loader" + ) +``` + +One line change. The `ImportError` path stays at `debug` — Rust engine not installed is a valid defensive pattern even though wheels always include it. + +### 4.5 Fix I3 — `PyWasmOrchestrator` Documentation + +`PyWasmOrchestrator.execute()` accepts the full Python Orchestrator Protocol (6 parameters: `prompt`, `context`, `providers`, `tools`, `hooks`, `coordinator`) then silently discards 5 of them with `let _ = (context, providers, tools, hooks, coordinator)`. + +The current signature is **correct**. `_session_exec.run_orchestrator()` always passes all 6 kwargs to whatever orchestrator is mounted — there is one unified dispatch path. Changing the signature would cause a `TypeError` at runtime. + +The fix is documentation, not code change: + +1. **Add a `log::warn!` in `load_and_mount_wasm()`** when mounting a WASM orchestrator noting that context/providers/tools/hooks/coordinator are not forwarded to WASM guests in this version, and that the WASM guest accesses kernel services via host imports instead. +2. **Improve the doc comment on `PyWasmOrchestrator.execute()`** explaining why the params are accepted and discarded — protocol conformance with the WASM guest using host imports for kernel access. +3. **Add a comment pointing to the future path:** forwarding context/providers/tools to the WASM guest via kernel-service host imports, so WASM orchestrators that need session state can pull it on demand rather than receiving it as parameters. + +`NullContextManager` stays — the Rust `Orchestrator` trait requires a `context` parameter. When WASM orchestrators gain real context forwarding, it gets replaced with the actual context. + +### 4.6 Fix I4 — Deduplicate `_safe_exception_str` + +`_safe_exception_str` is defined identically in both `session.py` and `_session_init.py`. Since `session.py` now delegates to `_session_init.py`, the copy in `session.py` is redundant. + +Delete `_safe_exception_str` from `session.py`. If it's still called there, import from `_session_init`. If it's not used after deduplication, just delete it. + +--- + +## 5. Python Backward Compatibility + +- `coordinator.approval_system` property still works — PyO3 bridge wraps it to the Rust trait +- `coordinator.display_system` property still works — same bridge pattern +- `_session_init.py` calling `loader.load()` is unchanged +- `session.py:initialize()` delegates to `_session_init.initialize_session()` (already done) +- WASM hook modules now properly register instead of being silently dropped — bug fix, not behavior change + +--- + +## 6. Rejected Alternatives + +1. **ModuleLoader as kernel trait** — Module loading is inherently language-specific (Python uses importlib, Rust uses dylibs, Node uses require()). No meaningful cross-language abstraction. Would create an FFI trampoline (Rust → GIL → Python importlib → GIL → Rust coordinator) that adds indirection without value. Violates "no file I/O in kernel." + +2. **SourceResolver as kernel trait with Coordinator field** — Source resolution involves network I/O (git clone), caching, authentication — all policy. The kernel never calls it. Trait definition in `traits.rs` is acceptable for bridge generation, but not as a Coordinator field. + +3. **Session::initialize() in Rust kernel** — The load loop is pure policy: module ordering, error handling, multi-instance remapping, fork event emission. Two teams could disagree on every decision. The kernel provides mount primitives; foundation builds the loading machinery on top. + +4. **Per-language Foundation SDKs** — Premature. Wait for ≥2 non-Python apps to prove the need. When they arrive, standalone `amplifier-sdk-*` repos. + +5. **Bidirectional WASM hook registration** — Would let WASM guests call `coordinator.hooks.register()` via kernel-service host imports. More powerful but significantly more complex (requires hybrid WIT world importing kernel-service + exporting hook-handler). The self-describing `get-subscriptions` approach covers 95%+ of real hook use cases. Bidirectional can be added later without breaking changes. + +6. **Changing `PyWasmOrchestrator.execute()` to accept only `prompt`** — Would break at runtime. `_session_exec.run_orchestrator()` always passes all 6 kwargs via a unified dispatch path. The current full-signature approach is correct protocol conformance. + +--- + +## 7. Open Questions / Future Work + +1. **Consolidate Python approval protocols** — `ApprovalSystem` (simple) and `ApprovalProvider` (typed) are two competing interfaces. Consider converging them. +2. **WASM hook cleanup** — Do WASM hooks need cleanup functions? The `get-subscriptions` approach returns unregister closures from the host — cleanup is host-managed. But if the WASM guest holds resources, it may need a `cleanup` export in the WIT. +3. **Bidirectional WASM registration** — Add `register-hook` to kernel-service when a real use case requires reading coordinator state during hook registration. +4. **`SourceResolver` trait in `traits.rs`** — Acceptable for gRPC/WASM bridge generation even though the kernel doesn't dispatch through it. Can be added when a gRPC source resolver is needed. \ No newline at end of file diff --git a/proto/amplifier_module.proto b/proto/amplifier_module.proto index 56f7bba..7972440 100644 --- a/proto/amplifier_module.proto +++ b/proto/amplifier_module.proto @@ -482,6 +482,19 @@ message HookHandleRequest { string data_json = 2; } +// GetSubscriptions: allows a hook module to declare which events it handles. +message GetSubscriptionsRequest { + string config_json = 1; +} +message GetSubscriptionsResponse { + repeated EventSubscription subscriptions = 1; +} +message EventSubscription { + string event = 1; + int32 priority = 2; + string name = 3; +} + // --------------------------------------------------------------------------- // Module services // --------------------------------------------------------------------------- @@ -512,6 +525,12 @@ service ContextService { // Hook module contract — event interception. service HookService { rpc Handle(HookHandleRequest) returns (HookResult); + + // Return the event subscriptions this hook wants to receive. + // The host calls this at mount time and registers the subscriptions itself. + // A future RegisterHook RPC on KernelService will allow bidirectional + // registration where the module pushes subscriptions to the kernel. + rpc GetSubscriptions(GetSubscriptionsRequest) returns (GetSubscriptionsResponse); } // Approval module contract — human-in-the-loop approval. diff --git a/proto/amplifier_module_pb2.py b/proto/amplifier_module_pb2.py index 53cf4ac..9b99368 100644 --- a/proto/amplifier_module_pb2.py +++ b/proto/amplifier_module_pb2.py @@ -24,7 +24,7 @@ -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x16\x61mplifier_module.proto\x12\x10\x61mplifier.module\"\x07\n\x05\x45mpty\"F\n\x08ToolSpec\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x17\n\x0fparameters_json\x18\x03 \x01(\t\"9\n\x12ToolExecuteRequest\x12\r\n\x05input\x18\x01 \x01(\x0c\x12\x14\n\x0c\x63ontent_type\x18\x02 \x01(\t\"[\n\x13ToolExecuteResponse\x12\x0f\n\x07success\x18\x01 \x01(\x08\x12\x0e\n\x06output\x18\x02 \x01(\x0c\x12\x14\n\x0c\x63ontent_type\x18\x03 \x01(\t\x12\r\n\x05\x65rror\x18\x04 \x01(\t\"\xd6\x01\n\nModuleInfo\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x0f\n\x07version\x18\x03 \x01(\t\x12\x31\n\x0bmodule_type\x18\x04 \x01(\x0e\x32\x1c.amplifier.module.ModuleType\x12\x13\n\x0bmount_point\x18\x05 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x06 \x01(\t\x12\x1a\n\x12\x63onfig_schema_json\x18\x07 \x01(\t\x12\x14\n\x0c\x63\x61pabilities\x18\x08 \x03(\t\x12\x0e\n\x06\x61uthor\x18\t \x01(\t\"\x8c\x01\n\x0cMountRequest\x12:\n\x06\x63onfig\x18\x01 \x03(\x0b\x32*.amplifier.module.MountRequest.ConfigEntry\x12\x11\n\tmodule_id\x18\x02 \x01(\t\x1a-\n\x0b\x43onfigEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"_\n\rMountResponse\x12\x0f\n\x07success\x18\x01 \x01(\x08\x12\r\n\x05\x65rror\x18\x02 \x01(\t\x12.\n\x06status\x18\x03 \x01(\x0e\x32\x1e.amplifier.module.HealthStatus\"V\n\x13HealthCheckResponse\x12.\n\x06status\x18\x01 \x01(\x0e\x32\x1e.amplifier.module.HealthStatus\x12\x0f\n\x07message\x18\x02 \x01(\t\"\xca\x02\n\x0b\x43onfigField\x12\n\n\x02id\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12\x35\n\nfield_type\x18\x03 \x01(\x0e\x32!.amplifier.module.ConfigFieldType\x12\x0e\n\x06prompt\x18\x04 \x01(\t\x12\x0f\n\x07\x65nv_var\x18\x05 \x01(\t\x12\x0f\n\x07\x63hoices\x18\x06 \x03(\t\x12\x10\n\x08required\x18\x07 \x01(\x08\x12\x15\n\rdefault_value\x18\x08 \x01(\t\x12>\n\tshow_when\x18\t \x03(\x0b\x32+.amplifier.module.ConfigField.ShowWhenEntry\x12\x16\n\x0erequires_model\x18\n \x01(\x08\x1a/\n\rShowWhenEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xbc\x01\n\rProviderError\x12\x37\n\nerror_type\x18\x01 \x01(\x0e\x32#.amplifier.module.ProviderErrorType\x12\x0f\n\x07message\x18\x02 \x01(\t\x12\x15\n\rprovider_name\x18\x03 \x01(\t\x12\r\n\x05model\x18\x04 \x01(\t\x12\x13\n\x0bstatus_code\x18\x05 \x01(\x05\x12\x11\n\tretryable\x18\x06 \x01(\x08\x12\x13\n\x0bretry_after\x18\x07 \x01(\x01\"\x97\x01\n\tToolError\x12\x33\n\nerror_type\x18\x01 \x01(\x0e\x32\x1f.amplifier.module.ToolErrorType\x12\x0f\n\x07message\x18\x02 \x01(\t\x12\x11\n\ttool_name\x18\x03 \x01(\t\x12\x0e\n\x06stdout\x18\x04 \x01(\t\x12\x0e\n\x06stderr\x18\x05 \x01(\t\x12\x11\n\texit_code\x18\x06 \x01(\x05\"d\n\tHookError\x12\x33\n\nerror_type\x18\x01 \x01(\x0e\x32\x1f.amplifier.module.HookErrorType\x12\x0f\n\x07message\x18\x02 \x01(\t\x12\x11\n\thook_name\x18\x03 \x01(\t\"\xef\x01\n\x0e\x41mplifierError\x12\x39\n\x0eprovider_error\x18\x01 \x01(\x0b\x32\x1f.amplifier.module.ProviderErrorH\x00\x12\x31\n\ntool_error\x18\x02 \x01(\x0b\x32\x1b.amplifier.module.ToolErrorH\x00\x12\x31\n\nhook_error\x18\x03 \x01(\x0b\x32\x1b.amplifier.module.HookErrorH\x00\x12\x17\n\rgeneric_error\x18\x04 \x01(\tH\x00\x12\x1a\n\x10validation_error\x18\x05 \x01(\tH\x00\x42\x07\n\x05\x65rror\"\x19\n\tTextBlock\x12\x0c\n\x04text\x18\x01 \x01(\t\"E\n\rThinkingBlock\x12\x10\n\x08thinking\x18\x01 \x01(\t\x12\x11\n\tsignature\x18\x02 \x01(\t\x12\x0f\n\x07\x63ontent\x18\x03 \x01(\t\"%\n\x15RedactedThinkingBlock\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\t\"=\n\rToolCallBlock\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x12\n\ninput_json\x18\x03 \x01(\t\"<\n\x0fToolResultBlock\x12\x14\n\x0ctool_call_id\x18\x01 \x01(\t\x12\x13\n\x0boutput_json\x18\x02 \x01(\t\"C\n\nImageBlock\x12\x12\n\nmedia_type\x18\x01 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x02 \x01(\x0c\x12\x13\n\x0bsource_json\x18\x03 \x01(\t\"2\n\x0eReasoningBlock\x12\x0f\n\x07\x63ontent\x18\x01 \x03(\t\x12\x0f\n\x07summary\x18\x02 \x03(\t\"\xf1\x03\n\x0c\x43ontentBlock\x12\x31\n\ntext_block\x18\x01 \x01(\x0b\x32\x1b.amplifier.module.TextBlockH\x00\x12\x39\n\x0ethinking_block\x18\x02 \x01(\x0b\x32\x1f.amplifier.module.ThinkingBlockH\x00\x12J\n\x17redacted_thinking_block\x18\x03 \x01(\x0b\x32\'.amplifier.module.RedactedThinkingBlockH\x00\x12:\n\x0ftool_call_block\x18\x04 \x01(\x0b\x32\x1f.amplifier.module.ToolCallBlockH\x00\x12>\n\x11tool_result_block\x18\x05 \x01(\x0b\x32!.amplifier.module.ToolResultBlockH\x00\x12\x33\n\x0bimage_block\x18\x06 \x01(\x0b\x32\x1c.amplifier.module.ImageBlockH\x00\x12;\n\x0freasoning_block\x18\x07 \x01(\x0b\x32 .amplifier.module.ReasoningBlockH\x00\x12\x30\n\nvisibility\x18\x08 \x01(\x0e\x32\x1c.amplifier.module.VisibilityB\x07\n\x05\x62lock\"B\n\x10\x43ontentBlockList\x12.\n\x06\x62locks\x18\x01 \x03(\x0b\x32\x1e.amplifier.module.ContentBlock\"\xca\x01\n\x07Message\x12$\n\x04role\x18\x01 \x01(\x0e\x32\x16.amplifier.module.Role\x12\x16\n\x0ctext_content\x18\x02 \x01(\tH\x00\x12;\n\rblock_content\x18\x03 \x01(\x0b\x32\".amplifier.module.ContentBlockListH\x00\x12\x0c\n\x04name\x18\x04 \x01(\t\x12\x14\n\x0ctool_call_id\x18\x05 \x01(\t\x12\x15\n\rmetadata_json\x18\x06 \x01(\tB\t\n\x07\x63ontent\"C\n\x0fToolCallMessage\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x16\n\x0e\x61rguments_json\x18\x03 \x01(\t\"K\n\rToolSpecProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x17\n\x0fparameters_json\x18\x03 \x01(\t\"7\n\x10JsonSchemaFormat\x12\x13\n\x0bschema_json\x18\x01 \x01(\t\x12\x0e\n\x06strict\x18\x02 \x01(\x08\"u\n\x0eResponseFormat\x12\x0e\n\x04text\x18\x01 \x01(\x08H\x00\x12\x0e\n\x04json\x18\x02 \x01(\x08H\x00\x12\x39\n\x0bjson_schema\x18\x03 \x01(\x0b\x32\".amplifier.module.JsonSchemaFormatH\x00\x42\x08\n\x06\x66ormat\"\xa3\x01\n\x05Usage\x12\x15\n\rprompt_tokens\x18\x01 \x01(\x05\x12\x19\n\x11\x63ompletion_tokens\x18\x02 \x01(\x05\x12\x14\n\x0ctotal_tokens\x18\x03 \x01(\x05\x12\x18\n\x10reasoning_tokens\x18\x04 \x01(\x05\x12\x19\n\x11\x63\x61\x63he_read_tokens\x18\x05 \x01(\x05\x12\x1d\n\x15\x63\x61\x63he_creation_tokens\x18\x06 \x01(\x05\"@\n\x0b\x44\x65gradation\x12\x11\n\trequested\x18\x01 \x01(\t\x12\x0e\n\x06\x61\x63tual\x18\x02 \x01(\t\x12\x0e\n\x06reason\x18\x03 \x01(\t\"\x81\x03\n\x0b\x43hatRequest\x12+\n\x08messages\x18\x01 \x03(\x0b\x32\x19.amplifier.module.Message\x12.\n\x05tools\x18\x02 \x03(\x0b\x32\x1f.amplifier.module.ToolSpecProto\x12\x39\n\x0fresponse_format\x18\x03 \x01(\x0b\x32 .amplifier.module.ResponseFormat\x12\x13\n\x0btemperature\x18\x04 \x01(\x01\x12\r\n\x05top_p\x18\x05 \x01(\x01\x12\x19\n\x11max_output_tokens\x18\x06 \x01(\x05\x12\x17\n\x0f\x63onversation_id\x18\x07 \x01(\t\x12\x0e\n\x06stream\x18\x08 \x01(\x08\x12\x15\n\rmetadata_json\x18\t \x01(\t\x12\r\n\x05model\x18\n \x01(\t\x12\x13\n\x0btool_choice\x18\x0b \x01(\t\x12\x0c\n\x04stop\x18\x0c \x03(\t\x12\x18\n\x10reasoning_effort\x18\r \x01(\t\x12\x0f\n\x07timeout\x18\x0e \x01(\x01\"\xe0\x01\n\x0c\x43hatResponse\x12\x0f\n\x07\x63ontent\x18\x01 \x01(\t\x12\x35\n\ntool_calls\x18\x02 \x03(\x0b\x32!.amplifier.module.ToolCallMessage\x12&\n\x05usage\x18\x03 \x01(\x0b\x32\x17.amplifier.module.Usage\x12\x32\n\x0b\x64\x65gradation\x18\x04 \x01(\x0b\x32\x1d.amplifier.module.Degradation\x12\x15\n\rfinish_reason\x18\x05 \x01(\t\x12\x15\n\rmetadata_json\x18\x06 \x01(\t\"F\n\nToolResult\x12\x0f\n\x07success\x18\x01 \x01(\x08\x12\x13\n\x0boutput_json\x18\x02 \x01(\t\x12\x12\n\nerror_json\x18\x03 \x01(\t\"\x8d\x04\n\nHookResult\x12,\n\x06\x61\x63tion\x18\x01 \x01(\x0e\x32\x1c.amplifier.module.HookAction\x12\x11\n\tdata_json\x18\x02 \x01(\t\x12\x0e\n\x06reason\x18\x03 \x01(\t\x12\x19\n\x11\x63ontext_injection\x18\x04 \x01(\t\x12\x46\n\x16\x63ontext_injection_role\x18\x05 \x01(\x0e\x32&.amplifier.module.ContextInjectionRole\x12\x11\n\tephemeral\x18\x06 \x01(\x08\x12\x17\n\x0f\x61pproval_prompt\x18\x07 \x01(\t\x12\x18\n\x10\x61pproval_options\x18\x08 \x03(\t\x12\x18\n\x10\x61pproval_timeout\x18\t \x01(\x01\x12;\n\x10\x61pproval_default\x18\n \x01(\x0e\x32!.amplifier.module.ApprovalDefault\x12\x17\n\x0fsuppress_output\x18\x0b \x01(\x08\x12\x14\n\x0cuser_message\x18\x0c \x01(\t\x12>\n\x12user_message_level\x18\r \x01(\x0e\x32\".amplifier.module.UserMessageLevel\x12\x1b\n\x13user_message_source\x18\x0e \x01(\t\x12\"\n\x1a\x61ppend_to_last_tool_result\x18\x0f \x01(\x08\"\x8d\x01\n\tModelInfo\x12\n\n\x02id\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12\x16\n\x0e\x63ontext_window\x18\x03 \x01(\x05\x12\x19\n\x11max_output_tokens\x18\x04 \x01(\x05\x12\x14\n\x0c\x63\x61pabilities\x18\x05 \x03(\t\x12\x15\n\rdefaults_json\x18\x06 \x01(\t\"\xb0\x01\n\x0cProviderInfo\x12\n\n\x02id\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12\x1b\n\x13\x63redential_env_vars\x18\x03 \x03(\t\x12\x14\n\x0c\x63\x61pabilities\x18\x04 \x03(\t\x12\x15\n\rdefaults_json\x18\x05 \x01(\t\x12\x34\n\rconfig_fields\x18\x06 \x03(\x0b\x32\x1d.amplifier.module.ConfigField\"o\n\x0f\x41pprovalRequest\x12\x11\n\ttool_name\x18\x01 \x01(\t\x12\x0e\n\x06\x61\x63tion\x18\x02 \x01(\t\x12\x14\n\x0c\x64\x65tails_json\x18\x03 \x01(\t\x12\x12\n\nrisk_level\x18\x04 \x01(\t\x12\x0f\n\x07timeout\x18\x05 \x01(\x01\"F\n\x10\x41pprovalResponse\x12\x10\n\x08\x61pproved\x18\x01 \x01(\x08\x12\x0e\n\x06reason\x18\x02 \x01(\t\x12\x10\n\x08remember\x18\x03 \x01(\x08\"A\n\x12ListModelsResponse\x12+\n\x06models\x18\x01 \x03(\x0b\x32\x1b.amplifier.module.ModelInfo\"O\n\x16ParseToolCallsResponse\x12\x35\n\ntool_calls\x18\x01 \x03(\x0b\x32!.amplifier.module.ToolCallMessage\"@\n\x1aOrchestratorExecuteRequest\x12\x0e\n\x06prompt\x18\x01 \x01(\t\x12\x12\n\nsession_id\x18\x02 \x01(\t\">\n\x1bOrchestratorExecuteResponse\x12\x10\n\x08response\x18\x01 \x01(\t\x12\r\n\x05\x65rror\x18\x02 \x01(\t\"?\n\x11\x41\x64\x64MessageRequest\x12*\n\x07message\x18\x01 \x01(\x0b\x32\x19.amplifier.module.Message\"B\n\x13GetMessagesResponse\x12+\n\x08messages\x18\x01 \x03(\x0b\x32\x19.amplifier.module.Message\"J\n\x1bGetMessagesForRequestParams\x12\x14\n\x0ctoken_budget\x18\x01 \x01(\x05\x12\x15\n\rprovider_name\x18\x02 \x01(\t\"A\n\x12SetMessagesRequest\x12+\n\x08messages\x18\x01 \x03(\x0b\x32\x19.amplifier.module.Message\"5\n\x11HookHandleRequest\x12\r\n\x05\x65vent\x18\x01 \x01(\t\x12\x11\n\tdata_json\x18\x02 \x01(\t\"d\n\x1b\x43ompleteWithProviderRequest\x12\x15\n\rprovider_name\x18\x01 \x01(\t\x12.\n\x07request\x18\x02 \x01(\x0b\x32\x1d.amplifier.module.ChatRequest\";\n\x12\x45xecuteToolRequest\x12\x11\n\ttool_name\x18\x01 \x01(\t\x12\x12\n\ninput_json\x18\x02 \x01(\t\"3\n\x0f\x45mitHookRequest\x12\r\n\x05\x65vent\x18\x01 \x01(\t\x12\x11\n\tdata_json\x18\x02 \x01(\t\"V\n\x19\x45mitHookAndCollectRequest\x12\r\n\x05\x65vent\x18\x01 \x01(\t\x12\x11\n\tdata_json\x18\x02 \x01(\t\x12\x17\n\x0ftimeout_seconds\x18\x03 \x01(\x01\"4\n\x1a\x45mitHookAndCollectResponse\x12\x16\n\x0eresponses_json\x18\x01 \x03(\t\"(\n\x12GetMessagesRequest\x12\x12\n\nsession_id\x18\x01 \x01(\t\"Y\n\x17KernelAddMessageRequest\x12\x12\n\nsession_id\x18\x01 \x01(\t\x12*\n\x07message\x18\x02 \x01(\x0b\x32\x19.amplifier.module.Message\"a\n\x17GetMountedModuleRequest\x12\x13\n\x0bmodule_name\x18\x01 \x01(\t\x12\x31\n\x0bmodule_type\x18\x02 \x01(\x0e\x32\x1c.amplifier.module.ModuleType\"U\n\x18GetMountedModuleResponse\x12\r\n\x05\x66ound\x18\x01 \x01(\x08\x12*\n\x04info\x18\x02 \x01(\x0b\x32\x1c.amplifier.module.ModuleInfo\"=\n\x19RegisterCapabilityRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\nvalue_json\x18\x02 \x01(\t\"$\n\x14GetCapabilityRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\":\n\x15GetCapabilityResponse\x12\r\n\x05\x66ound\x18\x01 \x01(\x08\x12\x12\n\nvalue_json\x18\x02 \x01(\t*\xbc\x01\n\nModuleType\x12\x1b\n\x17MODULE_TYPE_UNSPECIFIED\x10\x00\x12\x18\n\x14MODULE_TYPE_PROVIDER\x10\x01\x12\x14\n\x10MODULE_TYPE_TOOL\x10\x02\x12\x14\n\x10MODULE_TYPE_HOOK\x10\x03\x12\x16\n\x12MODULE_TYPE_MEMORY\x10\x04\x12\x19\n\x15MODULE_TYPE_GUARDRAIL\x10\x05\x12\x18\n\x14MODULE_TYPE_APPROVAL\x10\x06*\x82\x01\n\x0cHealthStatus\x12\x1d\n\x19HEALTH_STATUS_UNSPECIFIED\x10\x00\x12\x19\n\x15HEALTH_STATUS_SERVING\x10\x01\x12\x1d\n\x19HEALTH_STATUS_NOT_SERVING\x10\x02\x12\x19\n\x15HEALTH_STATUS_UNKNOWN\x10\x03*\xad\x01\n\x0f\x43onfigFieldType\x12!\n\x1d\x43ONFIG_FIELD_TYPE_UNSPECIFIED\x10\x00\x12\x1c\n\x18\x43ONFIG_FIELD_TYPE_STRING\x10\x01\x12\x1c\n\x18\x43ONFIG_FIELD_TYPE_NUMBER\x10\x02\x12\x1d\n\x19\x43ONFIG_FIELD_TYPE_BOOLEAN\x10\x03\x12\x1c\n\x18\x43ONFIG_FIELD_TYPE_SECRET\x10\x04*\xd8\x02\n\x11ProviderErrorType\x12#\n\x1fPROVIDER_ERROR_TYPE_UNSPECIFIED\x10\x00\x12\x1c\n\x18PROVIDER_ERROR_TYPE_AUTH\x10\x01\x12\"\n\x1ePROVIDER_ERROR_TYPE_RATE_LIMIT\x10\x02\x12&\n\"PROVIDER_ERROR_TYPE_CONTEXT_LENGTH\x10\x03\x12\'\n#PROVIDER_ERROR_TYPE_INVALID_REQUEST\x10\x04\x12&\n\"PROVIDER_ERROR_TYPE_CONTENT_FILTER\x10\x05\x12#\n\x1fPROVIDER_ERROR_TYPE_UNAVAILABLE\x10\x06\x12\x1f\n\x1bPROVIDER_ERROR_TYPE_TIMEOUT\x10\x07\x12\x1d\n\x19PROVIDER_ERROR_TYPE_OTHER\x10\x08*\x8c\x01\n\rToolErrorType\x12\x1f\n\x1bTOOL_ERROR_TYPE_UNSPECIFIED\x10\x00\x12\x1d\n\x19TOOL_ERROR_TYPE_EXECUTION\x10\x01\x12\x1e\n\x1aTOOL_ERROR_TYPE_VALIDATION\x10\x02\x12\x1b\n\x17TOOL_ERROR_TYPE_TIMEOUT\x10\x03*\x8c\x01\n\rHookErrorType\x12\x1f\n\x1bHOOK_ERROR_TYPE_UNSPECIFIED\x10\x00\x12\x1d\n\x19HOOK_ERROR_TYPE_EXECUTION\x10\x01\x12\x1e\n\x1aHOOK_ERROR_TYPE_VALIDATION\x10\x02\x12\x1b\n\x17HOOK_ERROR_TYPE_TIMEOUT\x10\x03*\x86\x01\n\x04Role\x12\x14\n\x10ROLE_UNSPECIFIED\x10\x00\x12\x0f\n\x0bROLE_SYSTEM\x10\x01\x12\r\n\tROLE_USER\x10\x02\x12\x12\n\x0eROLE_ASSISTANT\x10\x03\x12\r\n\tROLE_TOOL\x10\x04\x12\x11\n\rROLE_FUNCTION\x10\x05\x12\x12\n\x0eROLE_DEVELOPER\x10\x06*o\n\nVisibility\x12\x1a\n\x16VISIBILITY_UNSPECIFIED\x10\x00\x12\x12\n\x0eVISIBILITY_ALL\x10\x01\x12\x17\n\x13VISIBILITY_LLM_ONLY\x10\x02\x12\x18\n\x14VISIBILITY_USER_ONLY\x10\x03*\xab\x01\n\nHookAction\x12\x1b\n\x17HOOK_ACTION_UNSPECIFIED\x10\x00\x12\x18\n\x14HOOK_ACTION_CONTINUE\x10\x01\x12\x16\n\x12HOOK_ACTION_MODIFY\x10\x02\x12\x14\n\x10HOOK_ACTION_DENY\x10\x03\x12\x1e\n\x1aHOOK_ACTION_INJECT_CONTEXT\x10\x04\x12\x18\n\x14HOOK_ACTION_ASK_USER\x10\x05*\xa8\x01\n\x14\x43ontextInjectionRole\x12&\n\"CONTEXT_INJECTION_ROLE_UNSPECIFIED\x10\x00\x12!\n\x1d\x43ONTEXT_INJECTION_ROLE_SYSTEM\x10\x01\x12\x1f\n\x1b\x43ONTEXT_INJECTION_ROLE_USER\x10\x02\x12$\n CONTEXT_INJECTION_ROLE_ASSISTANT\x10\x03*l\n\x0f\x41pprovalDefault\x12 \n\x1c\x41PPROVAL_DEFAULT_UNSPECIFIED\x10\x00\x12\x1c\n\x18\x41PPROVAL_DEFAULT_APPROVE\x10\x01\x12\x19\n\x15\x41PPROVAL_DEFAULT_DENY\x10\x02*\x91\x01\n\x10UserMessageLevel\x12\"\n\x1eUSER_MESSAGE_LEVEL_UNSPECIFIED\x10\x00\x12\x1b\n\x17USER_MESSAGE_LEVEL_INFO\x10\x01\x12\x1e\n\x1aUSER_MESSAGE_LEVEL_WARNING\x10\x02\x12\x1c\n\x18USER_MESSAGE_LEVEL_ERROR\x10\x03\x32\xa5\x01\n\x0bToolService\x12>\n\x07GetSpec\x12\x17.amplifier.module.Empty\x1a\x1a.amplifier.module.ToolSpec\x12V\n\x07\x45xecute\x12$.amplifier.module.ToolExecuteRequest\x1a%.amplifier.module.ToolExecuteResponse2\x9f\x03\n\x0fProviderService\x12\x42\n\x07GetInfo\x12\x17.amplifier.module.Empty\x1a\x1e.amplifier.module.ProviderInfo\x12K\n\nListModels\x12\x17.amplifier.module.Empty\x1a$.amplifier.module.ListModelsResponse\x12I\n\x08\x43omplete\x12\x1d.amplifier.module.ChatRequest\x1a\x1e.amplifier.module.ChatResponse\x12T\n\x11\x43ompleteStreaming\x12\x1d.amplifier.module.ChatRequest\x1a\x1e.amplifier.module.ChatResponse0\x01\x12Z\n\x0eParseToolCalls\x12\x1e.amplifier.module.ChatResponse\x1a(.amplifier.module.ParseToolCallsResponse2}\n\x13OrchestratorService\x12\x66\n\x07\x45xecute\x12,.amplifier.module.OrchestratorExecuteRequest\x1a-.amplifier.module.OrchestratorExecuteResponse2\xa3\x03\n\x0e\x43ontextService\x12J\n\nAddMessage\x12#.amplifier.module.AddMessageRequest\x1a\x17.amplifier.module.Empty\x12M\n\x0bGetMessages\x12\x17.amplifier.module.Empty\x1a%.amplifier.module.GetMessagesResponse\x12m\n\x15GetMessagesForRequest\x12-.amplifier.module.GetMessagesForRequestParams\x1a%.amplifier.module.GetMessagesResponse\x12L\n\x0bSetMessages\x12$.amplifier.module.SetMessagesRequest\x1a\x17.amplifier.module.Empty\x12\x39\n\x05\x43lear\x12\x17.amplifier.module.Empty\x1a\x17.amplifier.module.Empty2Z\n\x0bHookService\x12K\n\x06Handle\x12#.amplifier.module.HookHandleRequest\x1a\x1c.amplifier.module.HookResult2k\n\x0f\x41pprovalService\x12X\n\x0fRequestApproval\x12!.amplifier.module.ApprovalRequest\x1a\".amplifier.module.ApprovalResponse2\xd0\x07\n\rKernelService\x12\x65\n\x14\x43ompleteWithProvider\x12-.amplifier.module.CompleteWithProviderRequest\x1a\x1e.amplifier.module.ChatResponse\x12p\n\x1d\x43ompleteWithProviderStreaming\x12-.amplifier.module.CompleteWithProviderRequest\x1a\x1e.amplifier.module.ChatResponse0\x01\x12Q\n\x0b\x45xecuteTool\x12$.amplifier.module.ExecuteToolRequest\x1a\x1c.amplifier.module.ToolResult\x12K\n\x08\x45mitHook\x12!.amplifier.module.EmitHookRequest\x1a\x1c.amplifier.module.HookResult\x12o\n\x12\x45mitHookAndCollect\x12+.amplifier.module.EmitHookAndCollectRequest\x1a,.amplifier.module.EmitHookAndCollectResponse\x12Z\n\x0bGetMessages\x12$.amplifier.module.GetMessagesRequest\x1a%.amplifier.module.GetMessagesResponse\x12P\n\nAddMessage\x12).amplifier.module.KernelAddMessageRequest\x1a\x17.amplifier.module.Empty\x12i\n\x10GetMountedModule\x12).amplifier.module.GetMountedModuleRequest\x1a*.amplifier.module.GetMountedModuleResponse\x12Z\n\x12RegisterCapability\x12+.amplifier.module.RegisterCapabilityRequest\x1a\x17.amplifier.module.Empty\x12`\n\rGetCapability\x12&.amplifier.module.GetCapabilityRequest\x1a\'.amplifier.module.GetCapabilityResponse2\xaf\x02\n\x0fModuleLifecycle\x12H\n\x05Mount\x12\x1e.amplifier.module.MountRequest\x1a\x1f.amplifier.module.MountResponse\x12;\n\x07\x43leanup\x12\x17.amplifier.module.Empty\x1a\x17.amplifier.module.Empty\x12M\n\x0bHealthCheck\x12\x17.amplifier.module.Empty\x1a%.amplifier.module.HealthCheckResponse\x12\x46\n\rGetModuleInfo\x12\x17.amplifier.module.Empty\x1a\x1c.amplifier.module.ModuleInfob\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x16\x61mplifier_module.proto\x12\x10\x61mplifier.module\"\x07\n\x05\x45mpty\"F\n\x08ToolSpec\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x17\n\x0fparameters_json\x18\x03 \x01(\t\"9\n\x12ToolExecuteRequest\x12\r\n\x05input\x18\x01 \x01(\x0c\x12\x14\n\x0c\x63ontent_type\x18\x02 \x01(\t\"[\n\x13ToolExecuteResponse\x12\x0f\n\x07success\x18\x01 \x01(\x08\x12\x0e\n\x06output\x18\x02 \x01(\x0c\x12\x14\n\x0c\x63ontent_type\x18\x03 \x01(\t\x12\r\n\x05\x65rror\x18\x04 \x01(\t\"\xd6\x01\n\nModuleInfo\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x0f\n\x07version\x18\x03 \x01(\t\x12\x31\n\x0bmodule_type\x18\x04 \x01(\x0e\x32\x1c.amplifier.module.ModuleType\x12\x13\n\x0bmount_point\x18\x05 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x06 \x01(\t\x12\x1a\n\x12\x63onfig_schema_json\x18\x07 \x01(\t\x12\x14\n\x0c\x63\x61pabilities\x18\x08 \x03(\t\x12\x0e\n\x06\x61uthor\x18\t \x01(\t\"\x8c\x01\n\x0cMountRequest\x12:\n\x06\x63onfig\x18\x01 \x03(\x0b\x32*.amplifier.module.MountRequest.ConfigEntry\x12\x11\n\tmodule_id\x18\x02 \x01(\t\x1a-\n\x0b\x43onfigEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"_\n\rMountResponse\x12\x0f\n\x07success\x18\x01 \x01(\x08\x12\r\n\x05\x65rror\x18\x02 \x01(\t\x12.\n\x06status\x18\x03 \x01(\x0e\x32\x1e.amplifier.module.HealthStatus\"V\n\x13HealthCheckResponse\x12.\n\x06status\x18\x01 \x01(\x0e\x32\x1e.amplifier.module.HealthStatus\x12\x0f\n\x07message\x18\x02 \x01(\t\"\xca\x02\n\x0b\x43onfigField\x12\n\n\x02id\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12\x35\n\nfield_type\x18\x03 \x01(\x0e\x32!.amplifier.module.ConfigFieldType\x12\x0e\n\x06prompt\x18\x04 \x01(\t\x12\x0f\n\x07\x65nv_var\x18\x05 \x01(\t\x12\x0f\n\x07\x63hoices\x18\x06 \x03(\t\x12\x10\n\x08required\x18\x07 \x01(\x08\x12\x15\n\rdefault_value\x18\x08 \x01(\t\x12>\n\tshow_when\x18\t \x03(\x0b\x32+.amplifier.module.ConfigField.ShowWhenEntry\x12\x16\n\x0erequires_model\x18\n \x01(\x08\x1a/\n\rShowWhenEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xbc\x01\n\rProviderError\x12\x37\n\nerror_type\x18\x01 \x01(\x0e\x32#.amplifier.module.ProviderErrorType\x12\x0f\n\x07message\x18\x02 \x01(\t\x12\x15\n\rprovider_name\x18\x03 \x01(\t\x12\r\n\x05model\x18\x04 \x01(\t\x12\x13\n\x0bstatus_code\x18\x05 \x01(\x05\x12\x11\n\tretryable\x18\x06 \x01(\x08\x12\x13\n\x0bretry_after\x18\x07 \x01(\x01\"\x97\x01\n\tToolError\x12\x33\n\nerror_type\x18\x01 \x01(\x0e\x32\x1f.amplifier.module.ToolErrorType\x12\x0f\n\x07message\x18\x02 \x01(\t\x12\x11\n\ttool_name\x18\x03 \x01(\t\x12\x0e\n\x06stdout\x18\x04 \x01(\t\x12\x0e\n\x06stderr\x18\x05 \x01(\t\x12\x11\n\texit_code\x18\x06 \x01(\x05\"d\n\tHookError\x12\x33\n\nerror_type\x18\x01 \x01(\x0e\x32\x1f.amplifier.module.HookErrorType\x12\x0f\n\x07message\x18\x02 \x01(\t\x12\x11\n\thook_name\x18\x03 \x01(\t\"\xef\x01\n\x0e\x41mplifierError\x12\x39\n\x0eprovider_error\x18\x01 \x01(\x0b\x32\x1f.amplifier.module.ProviderErrorH\x00\x12\x31\n\ntool_error\x18\x02 \x01(\x0b\x32\x1b.amplifier.module.ToolErrorH\x00\x12\x31\n\nhook_error\x18\x03 \x01(\x0b\x32\x1b.amplifier.module.HookErrorH\x00\x12\x17\n\rgeneric_error\x18\x04 \x01(\tH\x00\x12\x1a\n\x10validation_error\x18\x05 \x01(\tH\x00\x42\x07\n\x05\x65rror\"\x19\n\tTextBlock\x12\x0c\n\x04text\x18\x01 \x01(\t\"E\n\rThinkingBlock\x12\x10\n\x08thinking\x18\x01 \x01(\t\x12\x11\n\tsignature\x18\x02 \x01(\t\x12\x0f\n\x07\x63ontent\x18\x03 \x01(\t\"%\n\x15RedactedThinkingBlock\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\t\"=\n\rToolCallBlock\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x12\n\ninput_json\x18\x03 \x01(\t\"<\n\x0fToolResultBlock\x12\x14\n\x0ctool_call_id\x18\x01 \x01(\t\x12\x13\n\x0boutput_json\x18\x02 \x01(\t\"C\n\nImageBlock\x12\x12\n\nmedia_type\x18\x01 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x02 \x01(\x0c\x12\x13\n\x0bsource_json\x18\x03 \x01(\t\"2\n\x0eReasoningBlock\x12\x0f\n\x07\x63ontent\x18\x01 \x03(\t\x12\x0f\n\x07summary\x18\x02 \x03(\t\"\xf1\x03\n\x0c\x43ontentBlock\x12\x31\n\ntext_block\x18\x01 \x01(\x0b\x32\x1b.amplifier.module.TextBlockH\x00\x12\x39\n\x0ethinking_block\x18\x02 \x01(\x0b\x32\x1f.amplifier.module.ThinkingBlockH\x00\x12J\n\x17redacted_thinking_block\x18\x03 \x01(\x0b\x32\'.amplifier.module.RedactedThinkingBlockH\x00\x12:\n\x0ftool_call_block\x18\x04 \x01(\x0b\x32\x1f.amplifier.module.ToolCallBlockH\x00\x12>\n\x11tool_result_block\x18\x05 \x01(\x0b\x32!.amplifier.module.ToolResultBlockH\x00\x12\x33\n\x0bimage_block\x18\x06 \x01(\x0b\x32\x1c.amplifier.module.ImageBlockH\x00\x12;\n\x0freasoning_block\x18\x07 \x01(\x0b\x32 .amplifier.module.ReasoningBlockH\x00\x12\x30\n\nvisibility\x18\x08 \x01(\x0e\x32\x1c.amplifier.module.VisibilityB\x07\n\x05\x62lock\"B\n\x10\x43ontentBlockList\x12.\n\x06\x62locks\x18\x01 \x03(\x0b\x32\x1e.amplifier.module.ContentBlock\"\xca\x01\n\x07Message\x12$\n\x04role\x18\x01 \x01(\x0e\x32\x16.amplifier.module.Role\x12\x16\n\x0ctext_content\x18\x02 \x01(\tH\x00\x12;\n\rblock_content\x18\x03 \x01(\x0b\x32\".amplifier.module.ContentBlockListH\x00\x12\x0c\n\x04name\x18\x04 \x01(\t\x12\x14\n\x0ctool_call_id\x18\x05 \x01(\t\x12\x15\n\rmetadata_json\x18\x06 \x01(\tB\t\n\x07\x63ontent\"C\n\x0fToolCallMessage\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x16\n\x0e\x61rguments_json\x18\x03 \x01(\t\"K\n\rToolSpecProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x17\n\x0fparameters_json\x18\x03 \x01(\t\"7\n\x10JsonSchemaFormat\x12\x13\n\x0bschema_json\x18\x01 \x01(\t\x12\x0e\n\x06strict\x18\x02 \x01(\x08\"u\n\x0eResponseFormat\x12\x0e\n\x04text\x18\x01 \x01(\x08H\x00\x12\x0e\n\x04json\x18\x02 \x01(\x08H\x00\x12\x39\n\x0bjson_schema\x18\x03 \x01(\x0b\x32\".amplifier.module.JsonSchemaFormatH\x00\x42\x08\n\x06\x66ormat\"\xf7\x01\n\x05Usage\x12\x15\n\rprompt_tokens\x18\x01 \x01(\x05\x12\x19\n\x11\x63ompletion_tokens\x18\x02 \x01(\x05\x12\x14\n\x0ctotal_tokens\x18\x03 \x01(\x05\x12\x1d\n\x10reasoning_tokens\x18\x04 \x01(\x05H\x00\x88\x01\x01\x12\x1e\n\x11\x63\x61\x63he_read_tokens\x18\x05 \x01(\x05H\x01\x88\x01\x01\x12\"\n\x15\x63\x61\x63he_creation_tokens\x18\x06 \x01(\x05H\x02\x88\x01\x01\x42\x13\n\x11_reasoning_tokensB\x14\n\x12_cache_read_tokensB\x18\n\x16_cache_creation_tokens\"@\n\x0b\x44\x65gradation\x12\x11\n\trequested\x18\x01 \x01(\t\x12\x0e\n\x06\x61\x63tual\x18\x02 \x01(\t\x12\x0e\n\x06reason\x18\x03 \x01(\t\"\x81\x03\n\x0b\x43hatRequest\x12+\n\x08messages\x18\x01 \x03(\x0b\x32\x19.amplifier.module.Message\x12.\n\x05tools\x18\x02 \x03(\x0b\x32\x1f.amplifier.module.ToolSpecProto\x12\x39\n\x0fresponse_format\x18\x03 \x01(\x0b\x32 .amplifier.module.ResponseFormat\x12\x13\n\x0btemperature\x18\x04 \x01(\x01\x12\r\n\x05top_p\x18\x05 \x01(\x01\x12\x19\n\x11max_output_tokens\x18\x06 \x01(\x05\x12\x17\n\x0f\x63onversation_id\x18\x07 \x01(\t\x12\x0e\n\x06stream\x18\x08 \x01(\x08\x12\x15\n\rmetadata_json\x18\t \x01(\t\x12\r\n\x05model\x18\n \x01(\t\x12\x13\n\x0btool_choice\x18\x0b \x01(\t\x12\x0c\n\x04stop\x18\x0c \x03(\t\x12\x18\n\x10reasoning_effort\x18\r \x01(\t\x12\x0f\n\x07timeout\x18\x0e \x01(\x01\"\xe0\x01\n\x0c\x43hatResponse\x12\x0f\n\x07\x63ontent\x18\x01 \x01(\t\x12\x35\n\ntool_calls\x18\x02 \x03(\x0b\x32!.amplifier.module.ToolCallMessage\x12&\n\x05usage\x18\x03 \x01(\x0b\x32\x17.amplifier.module.Usage\x12\x32\n\x0b\x64\x65gradation\x18\x04 \x01(\x0b\x32\x1d.amplifier.module.Degradation\x12\x15\n\rfinish_reason\x18\x05 \x01(\t\x12\x15\n\rmetadata_json\x18\x06 \x01(\t\"F\n\nToolResult\x12\x0f\n\x07success\x18\x01 \x01(\x08\x12\x13\n\x0boutput_json\x18\x02 \x01(\t\x12\x12\n\nerror_json\x18\x03 \x01(\t\"\xa7\x04\n\nHookResult\x12,\n\x06\x61\x63tion\x18\x01 \x01(\x0e\x32\x1c.amplifier.module.HookAction\x12\x11\n\tdata_json\x18\x02 \x01(\t\x12\x0e\n\x06reason\x18\x03 \x01(\t\x12\x19\n\x11\x63ontext_injection\x18\x04 \x01(\t\x12\x46\n\x16\x63ontext_injection_role\x18\x05 \x01(\x0e\x32&.amplifier.module.ContextInjectionRole\x12\x11\n\tephemeral\x18\x06 \x01(\x08\x12\x17\n\x0f\x61pproval_prompt\x18\x07 \x01(\t\x12\x18\n\x10\x61pproval_options\x18\x08 \x03(\t\x12\x1d\n\x10\x61pproval_timeout\x18\t \x01(\x01H\x00\x88\x01\x01\x12;\n\x10\x61pproval_default\x18\n \x01(\x0e\x32!.amplifier.module.ApprovalDefault\x12\x17\n\x0fsuppress_output\x18\x0b \x01(\x08\x12\x14\n\x0cuser_message\x18\x0c \x01(\t\x12>\n\x12user_message_level\x18\r \x01(\x0e\x32\".amplifier.module.UserMessageLevel\x12\x1b\n\x13user_message_source\x18\x0e \x01(\t\x12\"\n\x1a\x61ppend_to_last_tool_result\x18\x0f \x01(\x08\x42\x13\n\x11_approval_timeout\"\x8d\x01\n\tModelInfo\x12\n\n\x02id\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12\x16\n\x0e\x63ontext_window\x18\x03 \x01(\x05\x12\x19\n\x11max_output_tokens\x18\x04 \x01(\x05\x12\x14\n\x0c\x63\x61pabilities\x18\x05 \x03(\t\x12\x15\n\rdefaults_json\x18\x06 \x01(\t\"\xb0\x01\n\x0cProviderInfo\x12\n\n\x02id\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12\x1b\n\x13\x63redential_env_vars\x18\x03 \x03(\t\x12\x14\n\x0c\x63\x61pabilities\x18\x04 \x03(\t\x12\x15\n\rdefaults_json\x18\x05 \x01(\t\x12\x34\n\rconfig_fields\x18\x06 \x03(\x0b\x32\x1d.amplifier.module.ConfigField\"\x80\x01\n\x0f\x41pprovalRequest\x12\x11\n\ttool_name\x18\x01 \x01(\t\x12\x0e\n\x06\x61\x63tion\x18\x02 \x01(\t\x12\x14\n\x0c\x64\x65tails_json\x18\x03 \x01(\t\x12\x12\n\nrisk_level\x18\x04 \x01(\t\x12\x14\n\x07timeout\x18\x05 \x01(\x01H\x00\x88\x01\x01\x42\n\n\x08_timeout\"F\n\x10\x41pprovalResponse\x12\x10\n\x08\x61pproved\x18\x01 \x01(\x08\x12\x0e\n\x06reason\x18\x02 \x01(\t\x12\x10\n\x08remember\x18\x03 \x01(\x08\"A\n\x12ListModelsResponse\x12+\n\x06models\x18\x01 \x03(\x0b\x32\x1b.amplifier.module.ModelInfo\"O\n\x16ParseToolCallsResponse\x12\x35\n\ntool_calls\x18\x01 \x03(\x0b\x32!.amplifier.module.ToolCallMessage\"@\n\x1aOrchestratorExecuteRequest\x12\x0e\n\x06prompt\x18\x01 \x01(\t\x12\x12\n\nsession_id\x18\x02 \x01(\t\">\n\x1bOrchestratorExecuteResponse\x12\x10\n\x08response\x18\x01 \x01(\t\x12\r\n\x05\x65rror\x18\x02 \x01(\t\"?\n\x11\x41\x64\x64MessageRequest\x12*\n\x07message\x18\x01 \x01(\x0b\x32\x19.amplifier.module.Message\"B\n\x13GetMessagesResponse\x12+\n\x08messages\x18\x01 \x03(\x0b\x32\x19.amplifier.module.Message\"J\n\x1bGetMessagesForRequestParams\x12\x14\n\x0ctoken_budget\x18\x01 \x01(\x05\x12\x15\n\rprovider_name\x18\x02 \x01(\t\"A\n\x12SetMessagesRequest\x12+\n\x08messages\x18\x01 \x03(\x0b\x32\x19.amplifier.module.Message\"5\n\x11HookHandleRequest\x12\r\n\x05\x65vent\x18\x01 \x01(\t\x12\x11\n\tdata_json\x18\x02 \x01(\t\".\n\x17GetSubscriptionsRequest\x12\x13\n\x0b\x63onfig_json\x18\x01 \x01(\t\"V\n\x18GetSubscriptionsResponse\x12:\n\rsubscriptions\x18\x01 \x03(\x0b\x32#.amplifier.module.EventSubscription\"B\n\x11\x45ventSubscription\x12\r\n\x05\x65vent\x18\x01 \x01(\t\x12\x10\n\x08priority\x18\x02 \x01(\x05\x12\x0c\n\x04name\x18\x03 \x01(\t\"d\n\x1b\x43ompleteWithProviderRequest\x12\x15\n\rprovider_name\x18\x01 \x01(\t\x12.\n\x07request\x18\x02 \x01(\x0b\x32\x1d.amplifier.module.ChatRequest\";\n\x12\x45xecuteToolRequest\x12\x11\n\ttool_name\x18\x01 \x01(\t\x12\x12\n\ninput_json\x18\x02 \x01(\t\"3\n\x0f\x45mitHookRequest\x12\r\n\x05\x65vent\x18\x01 \x01(\t\x12\x11\n\tdata_json\x18\x02 \x01(\t\"V\n\x19\x45mitHookAndCollectRequest\x12\r\n\x05\x65vent\x18\x01 \x01(\t\x12\x11\n\tdata_json\x18\x02 \x01(\t\x12\x17\n\x0ftimeout_seconds\x18\x03 \x01(\x01\"4\n\x1a\x45mitHookAndCollectResponse\x12\x16\n\x0eresponses_json\x18\x01 \x03(\t\"(\n\x12GetMessagesRequest\x12\x12\n\nsession_id\x18\x01 \x01(\t\"Y\n\x17KernelAddMessageRequest\x12\x12\n\nsession_id\x18\x01 \x01(\t\x12*\n\x07message\x18\x02 \x01(\x0b\x32\x19.amplifier.module.Message\"a\n\x17GetMountedModuleRequest\x12\x13\n\x0bmodule_name\x18\x01 \x01(\t\x12\x31\n\x0bmodule_type\x18\x02 \x01(\x0e\x32\x1c.amplifier.module.ModuleType\"U\n\x18GetMountedModuleResponse\x12\r\n\x05\x66ound\x18\x01 \x01(\x08\x12*\n\x04info\x18\x02 \x01(\x0b\x32\x1c.amplifier.module.ModuleInfo\"=\n\x19RegisterCapabilityRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\nvalue_json\x18\x02 \x01(\t\"$\n\x14GetCapabilityRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\":\n\x15GetCapabilityResponse\x12\r\n\x05\x66ound\x18\x01 \x01(\x08\x12\x12\n\nvalue_json\x18\x02 \x01(\t*\xbc\x01\n\nModuleType\x12\x1b\n\x17MODULE_TYPE_UNSPECIFIED\x10\x00\x12\x18\n\x14MODULE_TYPE_PROVIDER\x10\x01\x12\x14\n\x10MODULE_TYPE_TOOL\x10\x02\x12\x14\n\x10MODULE_TYPE_HOOK\x10\x03\x12\x16\n\x12MODULE_TYPE_MEMORY\x10\x04\x12\x19\n\x15MODULE_TYPE_GUARDRAIL\x10\x05\x12\x18\n\x14MODULE_TYPE_APPROVAL\x10\x06*\x82\x01\n\x0cHealthStatus\x12\x1d\n\x19HEALTH_STATUS_UNSPECIFIED\x10\x00\x12\x19\n\x15HEALTH_STATUS_SERVING\x10\x01\x12\x1d\n\x19HEALTH_STATUS_NOT_SERVING\x10\x02\x12\x19\n\x15HEALTH_STATUS_UNKNOWN\x10\x03*\xad\x01\n\x0f\x43onfigFieldType\x12!\n\x1d\x43ONFIG_FIELD_TYPE_UNSPECIFIED\x10\x00\x12\x1c\n\x18\x43ONFIG_FIELD_TYPE_STRING\x10\x01\x12\x1c\n\x18\x43ONFIG_FIELD_TYPE_NUMBER\x10\x02\x12\x1d\n\x19\x43ONFIG_FIELD_TYPE_BOOLEAN\x10\x03\x12\x1c\n\x18\x43ONFIG_FIELD_TYPE_SECRET\x10\x04*\xd8\x02\n\x11ProviderErrorType\x12#\n\x1fPROVIDER_ERROR_TYPE_UNSPECIFIED\x10\x00\x12\x1c\n\x18PROVIDER_ERROR_TYPE_AUTH\x10\x01\x12\"\n\x1ePROVIDER_ERROR_TYPE_RATE_LIMIT\x10\x02\x12&\n\"PROVIDER_ERROR_TYPE_CONTEXT_LENGTH\x10\x03\x12\'\n#PROVIDER_ERROR_TYPE_INVALID_REQUEST\x10\x04\x12&\n\"PROVIDER_ERROR_TYPE_CONTENT_FILTER\x10\x05\x12#\n\x1fPROVIDER_ERROR_TYPE_UNAVAILABLE\x10\x06\x12\x1f\n\x1bPROVIDER_ERROR_TYPE_TIMEOUT\x10\x07\x12\x1d\n\x19PROVIDER_ERROR_TYPE_OTHER\x10\x08*\x8c\x01\n\rToolErrorType\x12\x1f\n\x1bTOOL_ERROR_TYPE_UNSPECIFIED\x10\x00\x12\x1d\n\x19TOOL_ERROR_TYPE_EXECUTION\x10\x01\x12\x1e\n\x1aTOOL_ERROR_TYPE_VALIDATION\x10\x02\x12\x1b\n\x17TOOL_ERROR_TYPE_TIMEOUT\x10\x03*\x8c\x01\n\rHookErrorType\x12\x1f\n\x1bHOOK_ERROR_TYPE_UNSPECIFIED\x10\x00\x12\x1d\n\x19HOOK_ERROR_TYPE_EXECUTION\x10\x01\x12\x1e\n\x1aHOOK_ERROR_TYPE_VALIDATION\x10\x02\x12\x1b\n\x17HOOK_ERROR_TYPE_TIMEOUT\x10\x03*\x86\x01\n\x04Role\x12\x14\n\x10ROLE_UNSPECIFIED\x10\x00\x12\x0f\n\x0bROLE_SYSTEM\x10\x01\x12\r\n\tROLE_USER\x10\x02\x12\x12\n\x0eROLE_ASSISTANT\x10\x03\x12\r\n\tROLE_TOOL\x10\x04\x12\x11\n\rROLE_FUNCTION\x10\x05\x12\x12\n\x0eROLE_DEVELOPER\x10\x06*o\n\nVisibility\x12\x1a\n\x16VISIBILITY_UNSPECIFIED\x10\x00\x12\x12\n\x0eVISIBILITY_ALL\x10\x01\x12\x17\n\x13VISIBILITY_LLM_ONLY\x10\x02\x12\x18\n\x14VISIBILITY_USER_ONLY\x10\x03*\xab\x01\n\nHookAction\x12\x1b\n\x17HOOK_ACTION_UNSPECIFIED\x10\x00\x12\x18\n\x14HOOK_ACTION_CONTINUE\x10\x01\x12\x16\n\x12HOOK_ACTION_MODIFY\x10\x02\x12\x14\n\x10HOOK_ACTION_DENY\x10\x03\x12\x1e\n\x1aHOOK_ACTION_INJECT_CONTEXT\x10\x04\x12\x18\n\x14HOOK_ACTION_ASK_USER\x10\x05*\xa8\x01\n\x14\x43ontextInjectionRole\x12&\n\"CONTEXT_INJECTION_ROLE_UNSPECIFIED\x10\x00\x12!\n\x1d\x43ONTEXT_INJECTION_ROLE_SYSTEM\x10\x01\x12\x1f\n\x1b\x43ONTEXT_INJECTION_ROLE_USER\x10\x02\x12$\n CONTEXT_INJECTION_ROLE_ASSISTANT\x10\x03*l\n\x0f\x41pprovalDefault\x12 \n\x1c\x41PPROVAL_DEFAULT_UNSPECIFIED\x10\x00\x12\x1c\n\x18\x41PPROVAL_DEFAULT_APPROVE\x10\x01\x12\x19\n\x15\x41PPROVAL_DEFAULT_DENY\x10\x02*\x91\x01\n\x10UserMessageLevel\x12\"\n\x1eUSER_MESSAGE_LEVEL_UNSPECIFIED\x10\x00\x12\x1b\n\x17USER_MESSAGE_LEVEL_INFO\x10\x01\x12\x1e\n\x1aUSER_MESSAGE_LEVEL_WARNING\x10\x02\x12\x1c\n\x18USER_MESSAGE_LEVEL_ERROR\x10\x03\x32\xa5\x01\n\x0bToolService\x12>\n\x07GetSpec\x12\x17.amplifier.module.Empty\x1a\x1a.amplifier.module.ToolSpec\x12V\n\x07\x45xecute\x12$.amplifier.module.ToolExecuteRequest\x1a%.amplifier.module.ToolExecuteResponse2\x9f\x03\n\x0fProviderService\x12\x42\n\x07GetInfo\x12\x17.amplifier.module.Empty\x1a\x1e.amplifier.module.ProviderInfo\x12K\n\nListModels\x12\x17.amplifier.module.Empty\x1a$.amplifier.module.ListModelsResponse\x12I\n\x08\x43omplete\x12\x1d.amplifier.module.ChatRequest\x1a\x1e.amplifier.module.ChatResponse\x12T\n\x11\x43ompleteStreaming\x12\x1d.amplifier.module.ChatRequest\x1a\x1e.amplifier.module.ChatResponse0\x01\x12Z\n\x0eParseToolCalls\x12\x1e.amplifier.module.ChatResponse\x1a(.amplifier.module.ParseToolCallsResponse2}\n\x13OrchestratorService\x12\x66\n\x07\x45xecute\x12,.amplifier.module.OrchestratorExecuteRequest\x1a-.amplifier.module.OrchestratorExecuteResponse2\xa3\x03\n\x0e\x43ontextService\x12J\n\nAddMessage\x12#.amplifier.module.AddMessageRequest\x1a\x17.amplifier.module.Empty\x12M\n\x0bGetMessages\x12\x17.amplifier.module.Empty\x1a%.amplifier.module.GetMessagesResponse\x12m\n\x15GetMessagesForRequest\x12-.amplifier.module.GetMessagesForRequestParams\x1a%.amplifier.module.GetMessagesResponse\x12L\n\x0bSetMessages\x12$.amplifier.module.SetMessagesRequest\x1a\x17.amplifier.module.Empty\x12\x39\n\x05\x43lear\x12\x17.amplifier.module.Empty\x1a\x17.amplifier.module.Empty2\xc5\x01\n\x0bHookService\x12K\n\x06Handle\x12#.amplifier.module.HookHandleRequest\x1a\x1c.amplifier.module.HookResult\x12i\n\x10GetSubscriptions\x12).amplifier.module.GetSubscriptionsRequest\x1a*.amplifier.module.GetSubscriptionsResponse2k\n\x0f\x41pprovalService\x12X\n\x0fRequestApproval\x12!.amplifier.module.ApprovalRequest\x1a\".amplifier.module.ApprovalResponse2\xd0\x07\n\rKernelService\x12\x65\n\x14\x43ompleteWithProvider\x12-.amplifier.module.CompleteWithProviderRequest\x1a\x1e.amplifier.module.ChatResponse\x12p\n\x1d\x43ompleteWithProviderStreaming\x12-.amplifier.module.CompleteWithProviderRequest\x1a\x1e.amplifier.module.ChatResponse0\x01\x12Q\n\x0b\x45xecuteTool\x12$.amplifier.module.ExecuteToolRequest\x1a\x1c.amplifier.module.ToolResult\x12K\n\x08\x45mitHook\x12!.amplifier.module.EmitHookRequest\x1a\x1c.amplifier.module.HookResult\x12o\n\x12\x45mitHookAndCollect\x12+.amplifier.module.EmitHookAndCollectRequest\x1a,.amplifier.module.EmitHookAndCollectResponse\x12Z\n\x0bGetMessages\x12$.amplifier.module.GetMessagesRequest\x1a%.amplifier.module.GetMessagesResponse\x12P\n\nAddMessage\x12).amplifier.module.KernelAddMessageRequest\x1a\x17.amplifier.module.Empty\x12i\n\x10GetMountedModule\x12).amplifier.module.GetMountedModuleRequest\x1a*.amplifier.module.GetMountedModuleResponse\x12Z\n\x12RegisterCapability\x12+.amplifier.module.RegisterCapabilityRequest\x1a\x17.amplifier.module.Empty\x12`\n\rGetCapability\x12&.amplifier.module.GetCapabilityRequest\x1a\'.amplifier.module.GetCapabilityResponse2\xaf\x02\n\x0fModuleLifecycle\x12H\n\x05Mount\x12\x1e.amplifier.module.MountRequest\x1a\x1f.amplifier.module.MountResponse\x12;\n\x07\x43leanup\x12\x17.amplifier.module.Empty\x1a\x17.amplifier.module.Empty\x12M\n\x0bHealthCheck\x12\x17.amplifier.module.Empty\x1a%.amplifier.module.HealthCheckResponse\x12\x46\n\rGetModuleInfo\x12\x17.amplifier.module.Empty\x1a\x1c.amplifier.module.ModuleInfob\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -35,30 +35,30 @@ _globals['_MOUNTREQUEST_CONFIGENTRY']._serialized_options = b'8\001' _globals['_CONFIGFIELD_SHOWWHENENTRY']._loaded_options = None _globals['_CONFIGFIELD_SHOWWHENENTRY']._serialized_options = b'8\001' - _globals['_MODULETYPE']._serialized_start=6725 - _globals['_MODULETYPE']._serialized_end=6913 - _globals['_HEALTHSTATUS']._serialized_start=6916 - _globals['_HEALTHSTATUS']._serialized_end=7046 - _globals['_CONFIGFIELDTYPE']._serialized_start=7049 - _globals['_CONFIGFIELDTYPE']._serialized_end=7222 - _globals['_PROVIDERERRORTYPE']._serialized_start=7225 - _globals['_PROVIDERERRORTYPE']._serialized_end=7569 - _globals['_TOOLERRORTYPE']._serialized_start=7572 - _globals['_TOOLERRORTYPE']._serialized_end=7712 - _globals['_HOOKERRORTYPE']._serialized_start=7715 - _globals['_HOOKERRORTYPE']._serialized_end=7855 - _globals['_ROLE']._serialized_start=7858 - _globals['_ROLE']._serialized_end=7992 - _globals['_VISIBILITY']._serialized_start=7994 - _globals['_VISIBILITY']._serialized_end=8105 - _globals['_HOOKACTION']._serialized_start=8108 - _globals['_HOOKACTION']._serialized_end=8279 - _globals['_CONTEXTINJECTIONROLE']._serialized_start=8282 - _globals['_CONTEXTINJECTIONROLE']._serialized_end=8450 - _globals['_APPROVALDEFAULT']._serialized_start=8452 - _globals['_APPROVALDEFAULT']._serialized_end=8560 - _globals['_USERMESSAGELEVEL']._serialized_start=8563 - _globals['_USERMESSAGELEVEL']._serialized_end=8708 + _globals['_MODULETYPE']._serialized_start=7057 + _globals['_MODULETYPE']._serialized_end=7245 + _globals['_HEALTHSTATUS']._serialized_start=7248 + _globals['_HEALTHSTATUS']._serialized_end=7378 + _globals['_CONFIGFIELDTYPE']._serialized_start=7381 + _globals['_CONFIGFIELDTYPE']._serialized_end=7554 + _globals['_PROVIDERERRORTYPE']._serialized_start=7557 + _globals['_PROVIDERERRORTYPE']._serialized_end=7901 + _globals['_TOOLERRORTYPE']._serialized_start=7904 + _globals['_TOOLERRORTYPE']._serialized_end=8044 + _globals['_HOOKERRORTYPE']._serialized_start=8047 + _globals['_HOOKERRORTYPE']._serialized_end=8187 + _globals['_ROLE']._serialized_start=8190 + _globals['_ROLE']._serialized_end=8324 + _globals['_VISIBILITY']._serialized_start=8326 + _globals['_VISIBILITY']._serialized_end=8437 + _globals['_HOOKACTION']._serialized_start=8440 + _globals['_HOOKACTION']._serialized_end=8611 + _globals['_CONTEXTINJECTIONROLE']._serialized_start=8614 + _globals['_CONTEXTINJECTIONROLE']._serialized_end=8782 + _globals['_APPROVALDEFAULT']._serialized_start=8784 + _globals['_APPROVALDEFAULT']._serialized_end=8892 + _globals['_USERMESSAGELEVEL']._serialized_start=8895 + _globals['_USERMESSAGELEVEL']._serialized_end=9040 _globals['_EMPTY']._serialized_start=44 _globals['_EMPTY']._serialized_end=51 _globals['_TOOLSPEC']._serialized_start=53 @@ -118,81 +118,87 @@ _globals['_RESPONSEFORMAT']._serialized_start=3203 _globals['_RESPONSEFORMAT']._serialized_end=3320 _globals['_USAGE']._serialized_start=3323 - _globals['_USAGE']._serialized_end=3486 - _globals['_DEGRADATION']._serialized_start=3488 - _globals['_DEGRADATION']._serialized_end=3552 - _globals['_CHATREQUEST']._serialized_start=3555 - _globals['_CHATREQUEST']._serialized_end=3940 - _globals['_CHATRESPONSE']._serialized_start=3943 - _globals['_CHATRESPONSE']._serialized_end=4167 - _globals['_TOOLRESULT']._serialized_start=4169 - _globals['_TOOLRESULT']._serialized_end=4239 - _globals['_HOOKRESULT']._serialized_start=4242 - _globals['_HOOKRESULT']._serialized_end=4767 - _globals['_MODELINFO']._serialized_start=4770 - _globals['_MODELINFO']._serialized_end=4911 - _globals['_PROVIDERINFO']._serialized_start=4914 - _globals['_PROVIDERINFO']._serialized_end=5090 - _globals['_APPROVALREQUEST']._serialized_start=5092 - _globals['_APPROVALREQUEST']._serialized_end=5203 - _globals['_APPROVALRESPONSE']._serialized_start=5205 - _globals['_APPROVALRESPONSE']._serialized_end=5275 - _globals['_LISTMODELSRESPONSE']._serialized_start=5277 - _globals['_LISTMODELSRESPONSE']._serialized_end=5342 - _globals['_PARSETOOLCALLSRESPONSE']._serialized_start=5344 - _globals['_PARSETOOLCALLSRESPONSE']._serialized_end=5423 - _globals['_ORCHESTRATOREXECUTEREQUEST']._serialized_start=5425 - _globals['_ORCHESTRATOREXECUTEREQUEST']._serialized_end=5489 - _globals['_ORCHESTRATOREXECUTERESPONSE']._serialized_start=5491 - _globals['_ORCHESTRATOREXECUTERESPONSE']._serialized_end=5553 - _globals['_ADDMESSAGEREQUEST']._serialized_start=5555 - _globals['_ADDMESSAGEREQUEST']._serialized_end=5618 - _globals['_GETMESSAGESRESPONSE']._serialized_start=5620 - _globals['_GETMESSAGESRESPONSE']._serialized_end=5686 - _globals['_GETMESSAGESFORREQUESTPARAMS']._serialized_start=5688 - _globals['_GETMESSAGESFORREQUESTPARAMS']._serialized_end=5762 - _globals['_SETMESSAGESREQUEST']._serialized_start=5764 - _globals['_SETMESSAGESREQUEST']._serialized_end=5829 - _globals['_HOOKHANDLEREQUEST']._serialized_start=5831 - _globals['_HOOKHANDLEREQUEST']._serialized_end=5884 - _globals['_COMPLETEWITHPROVIDERREQUEST']._serialized_start=5886 - _globals['_COMPLETEWITHPROVIDERREQUEST']._serialized_end=5986 - _globals['_EXECUTETOOLREQUEST']._serialized_start=5988 - _globals['_EXECUTETOOLREQUEST']._serialized_end=6047 - _globals['_EMITHOOKREQUEST']._serialized_start=6049 - _globals['_EMITHOOKREQUEST']._serialized_end=6100 - _globals['_EMITHOOKANDCOLLECTREQUEST']._serialized_start=6102 - _globals['_EMITHOOKANDCOLLECTREQUEST']._serialized_end=6188 - _globals['_EMITHOOKANDCOLLECTRESPONSE']._serialized_start=6190 - _globals['_EMITHOOKANDCOLLECTRESPONSE']._serialized_end=6242 - _globals['_GETMESSAGESREQUEST']._serialized_start=6244 - _globals['_GETMESSAGESREQUEST']._serialized_end=6284 - _globals['_KERNELADDMESSAGEREQUEST']._serialized_start=6286 - _globals['_KERNELADDMESSAGEREQUEST']._serialized_end=6375 - _globals['_GETMOUNTEDMODULEREQUEST']._serialized_start=6377 - _globals['_GETMOUNTEDMODULEREQUEST']._serialized_end=6474 - _globals['_GETMOUNTEDMODULERESPONSE']._serialized_start=6476 - _globals['_GETMOUNTEDMODULERESPONSE']._serialized_end=6561 - _globals['_REGISTERCAPABILITYREQUEST']._serialized_start=6563 - _globals['_REGISTERCAPABILITYREQUEST']._serialized_end=6624 - _globals['_GETCAPABILITYREQUEST']._serialized_start=6626 - _globals['_GETCAPABILITYREQUEST']._serialized_end=6662 - _globals['_GETCAPABILITYRESPONSE']._serialized_start=6664 - _globals['_GETCAPABILITYRESPONSE']._serialized_end=6722 - _globals['_TOOLSERVICE']._serialized_start=8711 - _globals['_TOOLSERVICE']._serialized_end=8876 - _globals['_PROVIDERSERVICE']._serialized_start=8879 - _globals['_PROVIDERSERVICE']._serialized_end=9294 - _globals['_ORCHESTRATORSERVICE']._serialized_start=9296 - _globals['_ORCHESTRATORSERVICE']._serialized_end=9421 - _globals['_CONTEXTSERVICE']._serialized_start=9424 - _globals['_CONTEXTSERVICE']._serialized_end=9843 - _globals['_HOOKSERVICE']._serialized_start=9845 - _globals['_HOOKSERVICE']._serialized_end=9935 - _globals['_APPROVALSERVICE']._serialized_start=9937 - _globals['_APPROVALSERVICE']._serialized_end=10044 - _globals['_KERNELSERVICE']._serialized_start=10047 - _globals['_KERNELSERVICE']._serialized_end=11023 - _globals['_MODULELIFECYCLE']._serialized_start=11026 - _globals['_MODULELIFECYCLE']._serialized_end=11329 + _globals['_USAGE']._serialized_end=3570 + _globals['_DEGRADATION']._serialized_start=3572 + _globals['_DEGRADATION']._serialized_end=3636 + _globals['_CHATREQUEST']._serialized_start=3639 + _globals['_CHATREQUEST']._serialized_end=4024 + _globals['_CHATRESPONSE']._serialized_start=4027 + _globals['_CHATRESPONSE']._serialized_end=4251 + _globals['_TOOLRESULT']._serialized_start=4253 + _globals['_TOOLRESULT']._serialized_end=4323 + _globals['_HOOKRESULT']._serialized_start=4326 + _globals['_HOOKRESULT']._serialized_end=4877 + _globals['_MODELINFO']._serialized_start=4880 + _globals['_MODELINFO']._serialized_end=5021 + _globals['_PROVIDERINFO']._serialized_start=5024 + _globals['_PROVIDERINFO']._serialized_end=5200 + _globals['_APPROVALREQUEST']._serialized_start=5203 + _globals['_APPROVALREQUEST']._serialized_end=5331 + _globals['_APPROVALRESPONSE']._serialized_start=5333 + _globals['_APPROVALRESPONSE']._serialized_end=5403 + _globals['_LISTMODELSRESPONSE']._serialized_start=5405 + _globals['_LISTMODELSRESPONSE']._serialized_end=5470 + _globals['_PARSETOOLCALLSRESPONSE']._serialized_start=5472 + _globals['_PARSETOOLCALLSRESPONSE']._serialized_end=5551 + _globals['_ORCHESTRATOREXECUTEREQUEST']._serialized_start=5553 + _globals['_ORCHESTRATOREXECUTEREQUEST']._serialized_end=5617 + _globals['_ORCHESTRATOREXECUTERESPONSE']._serialized_start=5619 + _globals['_ORCHESTRATOREXECUTERESPONSE']._serialized_end=5681 + _globals['_ADDMESSAGEREQUEST']._serialized_start=5683 + _globals['_ADDMESSAGEREQUEST']._serialized_end=5746 + _globals['_GETMESSAGESRESPONSE']._serialized_start=5748 + _globals['_GETMESSAGESRESPONSE']._serialized_end=5814 + _globals['_GETMESSAGESFORREQUESTPARAMS']._serialized_start=5816 + _globals['_GETMESSAGESFORREQUESTPARAMS']._serialized_end=5890 + _globals['_SETMESSAGESREQUEST']._serialized_start=5892 + _globals['_SETMESSAGESREQUEST']._serialized_end=5957 + _globals['_HOOKHANDLEREQUEST']._serialized_start=5959 + _globals['_HOOKHANDLEREQUEST']._serialized_end=6012 + _globals['_GETSUBSCRIPTIONSREQUEST']._serialized_start=6014 + _globals['_GETSUBSCRIPTIONSREQUEST']._serialized_end=6060 + _globals['_GETSUBSCRIPTIONSRESPONSE']._serialized_start=6062 + _globals['_GETSUBSCRIPTIONSRESPONSE']._serialized_end=6148 + _globals['_EVENTSUBSCRIPTION']._serialized_start=6150 + _globals['_EVENTSUBSCRIPTION']._serialized_end=6216 + _globals['_COMPLETEWITHPROVIDERREQUEST']._serialized_start=6218 + _globals['_COMPLETEWITHPROVIDERREQUEST']._serialized_end=6318 + _globals['_EXECUTETOOLREQUEST']._serialized_start=6320 + _globals['_EXECUTETOOLREQUEST']._serialized_end=6379 + _globals['_EMITHOOKREQUEST']._serialized_start=6381 + _globals['_EMITHOOKREQUEST']._serialized_end=6432 + _globals['_EMITHOOKANDCOLLECTREQUEST']._serialized_start=6434 + _globals['_EMITHOOKANDCOLLECTREQUEST']._serialized_end=6520 + _globals['_EMITHOOKANDCOLLECTRESPONSE']._serialized_start=6522 + _globals['_EMITHOOKANDCOLLECTRESPONSE']._serialized_end=6574 + _globals['_GETMESSAGESREQUEST']._serialized_start=6576 + _globals['_GETMESSAGESREQUEST']._serialized_end=6616 + _globals['_KERNELADDMESSAGEREQUEST']._serialized_start=6618 + _globals['_KERNELADDMESSAGEREQUEST']._serialized_end=6707 + _globals['_GETMOUNTEDMODULEREQUEST']._serialized_start=6709 + _globals['_GETMOUNTEDMODULEREQUEST']._serialized_end=6806 + _globals['_GETMOUNTEDMODULERESPONSE']._serialized_start=6808 + _globals['_GETMOUNTEDMODULERESPONSE']._serialized_end=6893 + _globals['_REGISTERCAPABILITYREQUEST']._serialized_start=6895 + _globals['_REGISTERCAPABILITYREQUEST']._serialized_end=6956 + _globals['_GETCAPABILITYREQUEST']._serialized_start=6958 + _globals['_GETCAPABILITYREQUEST']._serialized_end=6994 + _globals['_GETCAPABILITYRESPONSE']._serialized_start=6996 + _globals['_GETCAPABILITYRESPONSE']._serialized_end=7054 + _globals['_TOOLSERVICE']._serialized_start=9043 + _globals['_TOOLSERVICE']._serialized_end=9208 + _globals['_PROVIDERSERVICE']._serialized_start=9211 + _globals['_PROVIDERSERVICE']._serialized_end=9626 + _globals['_ORCHESTRATORSERVICE']._serialized_start=9628 + _globals['_ORCHESTRATORSERVICE']._serialized_end=9753 + _globals['_CONTEXTSERVICE']._serialized_start=9756 + _globals['_CONTEXTSERVICE']._serialized_end=10175 + _globals['_HOOKSERVICE']._serialized_start=10178 + _globals['_HOOKSERVICE']._serialized_end=10375 + _globals['_APPROVALSERVICE']._serialized_start=10377 + _globals['_APPROVALSERVICE']._serialized_end=10484 + _globals['_KERNELSERVICE']._serialized_start=10487 + _globals['_KERNELSERVICE']._serialized_end=11463 + _globals['_MODULELIFECYCLE']._serialized_start=11466 + _globals['_MODULELIFECYCLE']._serialized_end=11769 # @@protoc_insertion_point(module_scope) diff --git a/proto/amplifier_module_pb2_grpc.py b/proto/amplifier_module_pb2_grpc.py index 1c5c8cd..e6b7668 100644 --- a/proto/amplifier_module_pb2_grpc.py +++ b/proto/amplifier_module_pb2_grpc.py @@ -744,6 +744,11 @@ def __init__(self, channel): request_serializer=amplifier__module__pb2.HookHandleRequest.SerializeToString, response_deserializer=amplifier__module__pb2.HookResult.FromString, _registered_method=True) + self.GetSubscriptions = channel.unary_unary( + '/amplifier.module.HookService/GetSubscriptions', + request_serializer=amplifier__module__pb2.GetSubscriptionsRequest.SerializeToString, + response_deserializer=amplifier__module__pb2.GetSubscriptionsResponse.FromString, + _registered_method=True) class HookServiceServicer(object): @@ -756,6 +761,16 @@ def Handle(self, request, context): context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') + def GetSubscriptions(self, request, context): + """Return the event subscriptions this hook wants to receive. + The host calls this at mount time and registers the subscriptions itself. + A future RegisterHook RPC on KernelService will allow bidirectional + registration where the module pushes subscriptions to the kernel. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + def add_HookServiceServicer_to_server(servicer, server): rpc_method_handlers = { @@ -764,6 +779,11 @@ def add_HookServiceServicer_to_server(servicer, server): request_deserializer=amplifier__module__pb2.HookHandleRequest.FromString, response_serializer=amplifier__module__pb2.HookResult.SerializeToString, ), + 'GetSubscriptions': grpc.unary_unary_rpc_method_handler( + servicer.GetSubscriptions, + request_deserializer=amplifier__module__pb2.GetSubscriptionsRequest.FromString, + response_serializer=amplifier__module__pb2.GetSubscriptionsResponse.SerializeToString, + ), } generic_handler = grpc.method_handlers_generic_handler( 'amplifier.module.HookService', rpc_method_handlers) @@ -803,6 +823,33 @@ def Handle(request, metadata, _registered_method=True) + @staticmethod + def GetSubscriptions(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/amplifier.module.HookService/GetSubscriptions', + amplifier__module__pb2.GetSubscriptionsRequest.SerializeToString, + amplifier__module__pb2.GetSubscriptionsResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + class ApprovalServiceStub(object): """Approval module contract — human-in-the-loop approval. diff --git a/pyproject.toml b/pyproject.toml index 96556fd..b60db61 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -69,3 +69,6 @@ dev = [ testpaths = ["tests", "bindings/python/tests"] addopts = "--import-mode=importlib" asyncio_mode = "strict" +markers = [ + "slow: marks tests as slow (deselect with '-m \"not slow\"')", +] diff --git a/python/amplifier_core/__init__.py b/python/amplifier_core/__init__.py index 751b57c..e7370e5 100644 --- a/python/amplifier_core/__init__.py +++ b/python/amplifier_core/__init__.py @@ -84,7 +84,7 @@ from .testing import MockContextManager from .testing import MockTool from .testing import ScriptedOrchestrator -from .testing import TestCoordinator +from .testing import MockCoordinator from .testing import create_test_coordinator from .testing import wait_for from .utils.retry import classify_error_message @@ -168,7 +168,7 @@ "ToolCallContent", "ToolResultContent", # Testing utilities - "TestCoordinator", + "MockCoordinator", "MockTool", "MockContextManager", "EventRecorder", diff --git a/python/amplifier_core/_session_init.py b/python/amplifier_core/_session_init.py index eb48c5e..9731515 100644 --- a/python/amplifier_core/_session_init.py +++ b/python/amplifier_core/_session_init.py @@ -61,6 +61,7 @@ async def initialize_session( orchestrator_id, orchestrator_config, source_hint=orchestrator_source, + coordinator=coordinator, ) cleanup = await orchestrator_mount(coordinator) if cleanup: @@ -84,7 +85,10 @@ async def initialize_session( logger.info(f"Loading context manager: {context_id}") try: context_mount = await loader.load( - context_id, context_config, source_hint=context_source + context_id, + context_config, + source_hint=context_source, + coordinator=coordinator, ) cleanup = await context_mount(coordinator) if cleanup: @@ -142,6 +146,7 @@ async def initialize_session( module_id, provider_config.get("config", {}), source_hint=provider_config.get("source"), + coordinator=coordinator, ) cleanup = await provider_mount(coordinator) if cleanup: @@ -188,6 +193,7 @@ async def initialize_session( module_id, tool_config.get("config", {}), source_hint=tool_config.get("source"), + coordinator=coordinator, ) cleanup = await tool_mount(coordinator) if cleanup: @@ -209,6 +215,7 @@ async def initialize_session( module_id, hook_config.get("config", {}), source_hint=hook_config.get("source"), + coordinator=coordinator, ) cleanup = await hook_mount(coordinator) if cleanup: diff --git a/python/amplifier_core/loader.py b/python/amplifier_core/loader.py index 3e7cab7..9934e75 100644 --- a/python/amplifier_core/loader.py +++ b/python/amplifier_core/loader.py @@ -178,6 +178,7 @@ async def load( module_id: str, config: dict[str, Any] | None = None, source_hint: str | dict | None = None, + coordinator: ModuleCoordinator | None = None, ) -> Callable[[ModuleCoordinator], Awaitable[Callable | None]]: """ Load a specific module using source resolution. @@ -186,6 +187,11 @@ async def load( module_id: Module identifier config: Optional module configuration source_hint: Optional source URI/object from bundle config + coordinator: Optional coordinator for polyglot dispatch. + When provided and the resolved module is non-Python, + dispatch routes to the appropriate polyglot loader + (WASM or gRPC). When None, all modules load via the + Python path (backward compatible). Returns: Mount function for the module @@ -253,8 +259,38 @@ async def mount_with_config_cached( f"Added '{path_str}' to sys.path for module '{module_id}'" ) - # Validate module before loading + # --- Transport dispatch (polyglot) --- + # Check transport BEFORE validation: non-Python modules + # (WASM, gRPC) don't have Python packages to validate. + if coordinator is not None: + try: + from amplifier_core._engine import resolve_module + + manifest = resolve_module(str(module_path)) + transport = manifest.get("transport", "python") + + if transport == "wasm": + return self._make_wasm_mount(module_path, coordinator) + + if transport == "grpc": + return await self._make_grpc_mount( + module_path, module_id, config, coordinator + ) + + # transport == "python" or unknown → fall through + except ImportError: + logger.debug( + "Rust engine not available, falling through to Python loader" + ) + except Exception as engine_err: + logger.warning( + f"resolve_module failed for '{module_id}': {engine_err}, " + "falling through to Python loader" + ) + + # Validate module before loading (Python modules only at this point) await self._validate_module(module_id, module_path, config=config) + except Exception as resolve_error: # Import here to avoid circular dependency from .module_sources import ModuleNotFoundError as SourceNotFoundError @@ -588,6 +624,78 @@ def _find_package_dir(self, module_id: str, module_path: Path) -> Path | None: return None + def _make_wasm_mount( + self, module_path: Path, coordinator: ModuleCoordinator + ) -> Callable[[ModuleCoordinator], Awaitable[Callable | None]]: + """Return a mount function that loads a WASM module via Rust ``load_and_mount_wasm()``. + + Calls the Rust ``load_and_mount_wasm()`` binding which resolves the + module manifest, instantiates a WASM engine, and mounts the loaded + module directly into the coordinator's ``mount_points`` dict (e.g. + ``mount_points["tools"]`` for tool modules). + + Args: + module_path: Path to the .wasm file or directory containing it. + coordinator: Reserved for future WASM lifecycle management. + Currently unused — the inner closure receives its own + ``coord`` argument at mount time. Kept for signature + parity with ``_make_grpc_mount``. + + Returns: + Async mount function that loads and mounts the WASM module. + """ + # Re-import from _engine: the dispatch block already proved the module + # exists (resolve_module succeeded), but load_and_mount_wasm could be + # absent in a version-mismatch scenario. That ImportError propagates + # to the caller's outer try/except, which is intentional. + from amplifier_core._engine import load_and_mount_wasm + + async def wasm_mount(coord: ModuleCoordinator) -> Callable | None: + result = load_and_mount_wasm(coord, str(module_path)) + logger.info(f"[module:mount] WASM mounted: {result}") + return None # No cleanup function for WASM modules + + return wasm_mount + + async def _make_grpc_mount( + self, + module_path: Path, + module_id: str, + config: dict[str, Any] | None, + coordinator: ModuleCoordinator, + ) -> Callable[[ModuleCoordinator], Awaitable[Callable | None]]: + """Return a mount function that loads a gRPC module via the gRPC loader bridge. + + Reads ``amplifier.toml`` from the module directory for endpoint and + service configuration, then delegates to the gRPC loader bridge + (``loader_grpc.load_grpc_module``) which handles channel setup, + protobuf negotiation, and adapter wrapping. + + Args: + module_path: Path to the module directory containing amplifier.toml. + module_id: Module identifier. + config: Optional module configuration. + coordinator: The coordinator instance. + + Returns: + Async mount function from the gRPC loader bridge. + """ + from .loader_grpc import load_grpc_module + + # Read amplifier.toml for gRPC config + try: + import tomli + except ImportError: + import tomllib as tomli # type: ignore[no-redef] + + toml_path = module_path / "amplifier.toml" + meta: dict[str, Any] = {} + if toml_path.exists(): + with open(toml_path, "rb") as f: + meta = tomli.load(f) + + return await load_grpc_module(module_id, config, meta, coordinator) + async def initialize( self, module: Any, coordinator: ModuleCoordinator ) -> Callable[[], Awaitable[None]] | None: diff --git a/python/amplifier_core/loader_dispatch.py b/python/amplifier_core/loader_dispatch.py deleted file mode 100644 index 991c2ce..0000000 --- a/python/amplifier_core/loader_dispatch.py +++ /dev/null @@ -1,131 +0,0 @@ -"""Polyglot module loader dispatch. - -Routes module loading to the appropriate loader based on amplifier.toml. -If no amplifier.toml exists, falls back to the existing Python loader -for 100% backward compatibility. - -Integration point: _session_init.py calls load_module() instead of -directly calling loader.load(). -""" - -import logging -import os -from typing import Any - -logger = logging.getLogger(__name__) - - -def _read_module_meta(source_path: str) -> dict[str, Any]: - """Read amplifier.toml from a module's source directory. - - Returns: - Parsed TOML as a dict, or empty dict if file doesn't exist. - """ - toml_path = os.path.join(source_path, "amplifier.toml") - if not os.path.exists(toml_path): - return {} - - try: - import tomli - except ImportError: - try: - import tomllib as tomli # Python 3.11+ - except ImportError: - logger.warning( - "Neither tomli nor tomllib available, cannot read amplifier.toml" - ) - return {} - - with open(toml_path, "rb") as f: - return tomli.load(f) - - -def _detect_transport(source_path: str) -> str: - """Detect the transport type from amplifier.toml. - - Returns: - Transport string: "python" (default), "grpc", "native", or "wasm". - """ - meta = _read_module_meta(source_path) - if not meta: - return "python" - return meta.get("module", {}).get("transport", "python") - - -async def load_module( - module_id: str, - config: dict[str, Any] | None, - source_path: str | None, - coordinator: Any, -) -> Any: - """Load a module from a resolved source path. - - Uses the Rust module resolver to auto-detect transport type. - Falls back to Python loader for backward compatibility. - - Args: - module_id: Module identifier (e.g., "tool-database") - config: Optional module configuration dict - source_path: Resolved filesystem path to the module (or None) - coordinator: The coordinator instance (RustCoordinator or ModuleCoordinator) - - Returns: - Mount function for the module - - Raises: - NotImplementedError: For transport types not yet supported - ValueError: If module cannot be loaded - """ - # No source path means we can't detect transport — fall through to Python loader - if source_path is None: - from .loader import ModuleLoader - - loader = coordinator.loader or ModuleLoader(coordinator=coordinator) - return await loader.load(module_id, config, source_hint=None) - - try: - from amplifier_core._engine import resolve_module as rust_resolve - - manifest = rust_resolve(source_path) - transport = manifest.get("transport", "python") - except ImportError: - logger.debug("Rust engine not available, using Python-only transport detection") - transport = _detect_transport(source_path) - except Exception as e: - logger.debug( - f"Rust resolver failed for '{module_id}': {e}, falling back to Python detection" - ) - transport = _detect_transport(source_path) - - if transport == "grpc": - from .loader_grpc import load_grpc_module - - meta = _read_module_meta(source_path) - return await load_grpc_module(module_id, config, meta, coordinator) - - if transport == "wasm": - try: - from amplifier_core._engine import load_and_mount_wasm - - async def _wasm_mount(coord: Any) -> None: - result = load_and_mount_wasm(coord, source_path) - logger.info(f"[module:mount] {module_id} mounted via WASM: {result}") - - return _wasm_mount - except ImportError: - raise NotImplementedError( - f"WASM module loading for '{module_id}' requires the Rust engine. " - "Install amplifier-core with Rust extensions enabled." - ) - - if transport == "native": - raise NotImplementedError( - f"Native Rust module loading not yet implemented for '{module_id}'. " - "Use transport = 'grpc' to load Rust modules as gRPC services." - ) - - # Default: existing Python loader (backward compatible) - from .loader import ModuleLoader - - loader = coordinator.loader or ModuleLoader(coordinator=coordinator) - return await loader.load(module_id, config, source_hint=source_path) diff --git a/python/amplifier_core/pytest_plugin.py b/python/amplifier_core/pytest_plugin.py index 7a7b7d7..05279f9 100644 --- a/python/amplifier_core/pytest_plugin.py +++ b/python/amplifier_core/pytest_plugin.py @@ -8,7 +8,7 @@ In a module repo, tests automatically get: - `module_path` fixture: Path to the module's Python package - `module_type` fixture: Detected type (provider, tool, hook, etc.) - - `coordinator` fixture: TestCoordinator for mounting modules + - `coordinator` fixture: MockCoordinator for mounting modules - `provider_module`, `tool_module`, etc.: Mounted module instances Modules can inherit from base test classes: @@ -394,9 +394,9 @@ async def _load_module( @pytest.fixture def coordinator() -> Any: """Create a fresh test coordinator for module testing.""" - from amplifier_core.testing import TestCoordinator + from amplifier_core.testing import MockCoordinator - return TestCoordinator() + return MockCoordinator() @pytest.fixture diff --git a/python/amplifier_core/session.py b/python/amplifier_core/session.py index a105c8f..557d728 100644 --- a/python/amplifier_core/session.py +++ b/python/amplifier_core/session.py @@ -8,6 +8,7 @@ from typing import TYPE_CHECKING from typing import Any +from ._session_init import _safe_exception_str, initialize_session from .coordinator import ModuleCoordinator from .loader import ModuleLoader from .models import SessionStatus @@ -20,18 +21,6 @@ logger = logging.getLogger(__name__) -def _safe_exception_str(e: BaseException) -> str: - """ - CRITICAL: Explicitly handle exception string conversion for Windows cp1252 compatibility. - Default encoding can fail on non-cp1252 characters, causing a crash during error handling. - We fall back to repr() which is safer as it escapes problematic characters. - """ - try: - return str(e) - except UnicodeDecodeError: - return repr(e) - - class AmplifierSession: """ A single Amplifier session tying everything together. @@ -119,258 +108,17 @@ def _merge_configs( return result async def initialize(self) -> None: - """ - Load and mount all configured modules. - The orchestrator module determines behavior. - """ + """Delegates to _session_init.initialize_session() — the single + implementation shared by both AmplifierSession and RustSession.""" if self._initialized: return - - # Note: Module source resolver should be mounted by app layer before initialization - # The loader will use entry point fallback if no resolver is mounted - - try: - # Load orchestrator (required) - # Handle both dict (ModuleConfig) and string formats - orchestrator_spec = self.config.get("session", {}).get( - "orchestrator", "loop-basic" - ) - if isinstance(orchestrator_spec, dict): - orchestrator_id = orchestrator_spec.get("module", "loop-basic") - orchestrator_source = orchestrator_spec.get("source") - orchestrator_config = orchestrator_spec.get("config", {}) - else: - orchestrator_id = orchestrator_spec - orchestrator_source = self.config.get("session", {}).get( - "orchestrator_source" - ) - orchestrator_config = self.config.get("orchestrator", {}).get( - "config", {} - ) - - logger.info(f"Loading orchestrator: {orchestrator_id}") - - try: - orchestrator_mount = await self.loader.load( - orchestrator_id, - orchestrator_config, - source_hint=orchestrator_source, - ) - # Note: config is already embedded in orchestrator_mount by the loader - cleanup = await orchestrator_mount(self.coordinator) - if cleanup: - self.coordinator.register_cleanup(cleanup) - except Exception as e: - logger.error( - f"Failed to load orchestrator '{orchestrator_id}': {_safe_exception_str(e)}" - ) - raise RuntimeError( - f"Cannot initialize without orchestrator: {_safe_exception_str(e)}" - ) - - # Load context manager (required) - # Handle both dict (ModuleConfig) and string formats - context_spec = self.config.get("session", {}).get( - "context", "context-simple" - ) - if isinstance(context_spec, dict): - context_id = context_spec.get("module", "context-simple") - context_source = context_spec.get("source") - context_config = context_spec.get("config", {}) - else: - context_id = context_spec - context_source = self.config.get("session", {}).get("context_source") - context_config = self.config.get("context", {}).get("config", {}) - - logger.info(f"Loading context manager: {context_id}") - - try: - context_mount = await self.loader.load( - context_id, context_config, source_hint=context_source - ) - cleanup = await context_mount(self.coordinator) - if cleanup: - self.coordinator.register_cleanup(cleanup) - except Exception as e: - logger.error( - f"Failed to load context manager '{context_id}': {_safe_exception_str(e)}" - ) - raise RuntimeError( - f"Cannot initialize without context manager: {_safe_exception_str(e)}" - ) - - # Validate multi-instance providers: at most ONE entry per module may omit - # instance_id. That one entry is the "default" instance that keeps the - # provider's default mount name. All additional entries need explicit instance_id. - _provider_module_counts: dict[str, int] = {} - _provider_no_id_counts: dict[str, int] = {} - for _pc in self.config.get("providers", []): - _mid = _pc.get("module", "") - if _mid: - _provider_module_counts[_mid] = ( - _provider_module_counts.get(_mid, 0) + 1 - ) - if not _pc.get("instance_id"): - _provider_no_id_counts[_mid] = ( - _provider_no_id_counts.get(_mid, 0) + 1 - ) - - for _mid, _no_id_count in _provider_no_id_counts.items(): - if _provider_module_counts.get(_mid, 0) > 1 and _no_id_count > 1: - raise ValueError( - f"Multi-instance providers require explicit 'instance_id' on each " - f"additional entry. Found {_no_id_count} entries for module " - f"'{_mid}' without instance_id (at most 1 allowed as the default " - f"instance)." - ) - - # Load providers - for provider_config in self.config.get("providers", []): - module_id = provider_config.get("module") - if not module_id: - continue - instance_id = provider_config.get( - "instance_id" - ) # multi-instance support - - try: - logger.info( - f"Loading provider: {module_id}" - + (f" (instance: {instance_id})" if instance_id else "") - ) - - # Snapshot: save any existing provider at the default mount name - # before loading. The new provider will self-mount there and may - # overwrite a previously-loaded default instance. - existing_at_default: object | None = None - if instance_id: - _default_name = ( - module_id.removeprefix("provider-") - if module_id.startswith("provider-") - else module_id - ) - _snap_dict = self.coordinator.get("providers") or {} - existing_at_default = _snap_dict.get(_default_name) - - provider_mount = await self.loader.load( - module_id, - provider_config.get("config", {}), - source_hint=provider_config.get("source"), - ) - cleanup = await provider_mount(self.coordinator) - if cleanup: - self.coordinator.register_cleanup(cleanup) - - # Multi-instance remapping: if instance_id specified, remap mount name - if instance_id: - default_name = ( - module_id.removeprefix("provider-") - if module_id.startswith("provider-") - else module_id - ) - providers_dict = self.coordinator.get("providers") or {} - if ( - default_name in providers_dict - and default_name != instance_id - ): - new_instance = providers_dict[default_name] - await self.coordinator.mount( - "providers", new_instance, name=instance_id - ) - # Restore the previous occupant if the self-mount overwrote it - if ( - existing_at_default is not None - and existing_at_default is not new_instance - ): - await self.coordinator.mount( - "providers", - existing_at_default, - name=default_name, - ) - else: - await self.coordinator.unmount( - "providers", name=default_name - ) - logger.info( - f"Remapped provider '{default_name}' -> '{instance_id}'" - ) - except Exception as e: - logger.warning( - f"Failed to load provider '{module_id}': {_safe_exception_str(e)}", - exc_info=True, - ) - - # Load tools - for tool_config in self.config.get("tools", []): - module_id = tool_config.get("module") - if not module_id: - continue - - try: - logger.info(f"Loading tool: {module_id}") - tool_mount = await self.loader.load( - module_id, - tool_config.get("config", {}), - source_hint=tool_config.get("source"), - ) - cleanup = await tool_mount(self.coordinator) - if cleanup: - self.coordinator.register_cleanup(cleanup) - except Exception as e: - logger.warning( - f"Failed to load tool '{module_id}': {_safe_exception_str(e)}", - exc_info=True, - ) - - # Note: agents section is app-layer data (config overlays), not modules to mount - # The kernel passes agents through in the mount plan without interpretation - - # Load hooks - for hook_config in self.config.get("hooks", []): - module_id = hook_config.get("module") - if not module_id: - continue - - try: - logger.info(f"Loading hook: {module_id}") - hook_mount = await self.loader.load( - module_id, - hook_config.get("config", {}), - source_hint=hook_config.get("source"), - ) - cleanup = await hook_mount(self.coordinator) - if cleanup: - self.coordinator.register_cleanup(cleanup) - except Exception as e: - logger.warning( - f"Failed to load hook '{module_id}': {_safe_exception_str(e)}", - exc_info=True, - ) - - self._initialized = True - - # Emit session:fork event if this is a child session - if self.parent_id: - from .events import SESSION_FORK - - session_config = self.config.get("session", {}) - session_metadata = session_config.get("metadata", {}) - raw = session_config.get("raw", False) - - payload: dict = { - "parent": self.parent_id, - "session_id": self.session_id, - **({"metadata": session_metadata} if session_metadata else {}), - } - if raw: - payload["raw"] = redact_secrets(self.config) - await self.coordinator.hooks.emit(SESSION_FORK, payload) - - logger.info(f"Session {self.session_id} initialized successfully") - - except Exception as e: - logger.error(f"Session initialization failed: {_safe_exception_str(e)}") - raise + # Propagate session's loader to coordinator so initialize_session() + # uses it (RustSession sets coordinator.loader directly instead). + self.coordinator.loader = self.loader + await initialize_session( + self.config, self.coordinator, self.session_id, self.parent_id + ) + self._initialized = True async def execute(self, prompt: str) -> str: """ diff --git a/python/amplifier_core/testing.py b/python/amplifier_core/testing.py index da37593..4c1db73 100644 --- a/python/amplifier_core/testing.py +++ b/python/amplifier_core/testing.py @@ -14,7 +14,7 @@ from amplifier_core import ToolResult -class TestCoordinator(ModuleCoordinator): +class MockCoordinator(ModuleCoordinator): """Test coordinator with additional debugging capabilities. Subclasses the Rust-backed ModuleCoordinator (via _rust_wrappers). @@ -159,9 +159,9 @@ async def execute(self, prompt: str, context, providers, tools, hooks) -> str: return response -def create_test_coordinator() -> TestCoordinator: +def create_test_coordinator() -> MockCoordinator: """Create a test coordinator with basic setup.""" - coordinator = TestCoordinator() + coordinator = MockCoordinator() # Add mock tools coordinator.mount_points["tools"]["echo"] = MockTool("echo", "Echo response") diff --git a/python/amplifier_core/validation/context.py b/python/amplifier_core/validation/context.py index 7c1823b..ddc147d 100644 --- a/python/amplifier_core/validation/context.py +++ b/python/amplifier_core/validation/context.py @@ -217,9 +217,9 @@ async def _check_protocol_compliance( config: Optional module configuration (uses empty dict if not provided) """ # Create coordinator and track mount_result outside try block so finally can access them - from ..testing import TestCoordinator + from ..testing import MockCoordinator - coordinator = TestCoordinator() + coordinator = MockCoordinator() mount_result = None # Track returned cleanup function try: # Use provided config or empty dict as fallback diff --git a/python/amplifier_core/validation/hook.py b/python/amplifier_core/validation/hook.py index 0f6fd31..d315d16 100644 --- a/python/amplifier_core/validation/hook.py +++ b/python/amplifier_core/validation/hook.py @@ -217,9 +217,9 @@ async def _check_protocol_compliance( config: Optional module configuration (uses empty dict if not provided) """ # Create coordinator and track mount_result outside try block so finally can access them - from ..testing import TestCoordinator + from ..testing import MockCoordinator - coordinator = TestCoordinator() + coordinator = MockCoordinator() mount_result = None # Track returned cleanup function try: # Use provided config or empty dict as fallback diff --git a/python/amplifier_core/validation/orchestrator.py b/python/amplifier_core/validation/orchestrator.py index de8f9ef..c78d102 100644 --- a/python/amplifier_core/validation/orchestrator.py +++ b/python/amplifier_core/validation/orchestrator.py @@ -219,9 +219,9 @@ async def _check_protocol_compliance( config: Optional module configuration (uses empty dict if not provided) """ # Create coordinator and track mount_result outside try block so finally can access them - from ..testing import TestCoordinator + from ..testing import MockCoordinator - coordinator = TestCoordinator() + coordinator = MockCoordinator() mount_result = None # Track returned cleanup function try: # Use provided config or empty dict as fallback diff --git a/python/amplifier_core/validation/provider.py b/python/amplifier_core/validation/provider.py index 9b02cb7..73a4535 100644 --- a/python/amplifier_core/validation/provider.py +++ b/python/amplifier_core/validation/provider.py @@ -218,9 +218,9 @@ async def _check_protocol_compliance( config: Optional module configuration (uses empty dict if not provided) """ # Create coordinator and track mount_result outside try block so finally can access them - from ..testing import TestCoordinator + from ..testing import MockCoordinator - coordinator = TestCoordinator() + coordinator = MockCoordinator() mount_result = None # Track returned cleanup function try: # Use provided config or empty dict as fallback diff --git a/python/amplifier_core/validation/tool.py b/python/amplifier_core/validation/tool.py index bb662f8..c459925 100644 --- a/python/amplifier_core/validation/tool.py +++ b/python/amplifier_core/validation/tool.py @@ -217,9 +217,9 @@ async def _check_protocol_compliance( config: Optional module configuration (uses empty dict if not provided) """ # Create coordinator and track mount_result outside try block so finally can access them - from ..testing import TestCoordinator + from ..testing import MockCoordinator - coordinator = TestCoordinator() + coordinator = MockCoordinator() mount_result = None # Track returned cleanup function try: # Use provided config or empty dict as fallback diff --git a/python/tests/test_loader_dispatch_wasm.py b/python/tests/test_loader_dispatch_wasm.py index 611fc3e..460819e 100644 --- a/python/tests/test_loader_dispatch_wasm.py +++ b/python/tests/test_loader_dispatch_wasm.py @@ -1,106 +1,119 @@ -"""Tests for WASM module mounting via loader_dispatch. +"""Tests for WASM module mounting via loader.load() dispatch. -Verifies that WASM modules loaded through loader_dispatch are actually +Verifies that WASM modules loaded through loader.load() are actually mounted into the coordinator's mount_points, not just loaded and discarded. Uses mocks to avoid slow WASM compilation on ARM64 while still verifying -the critical behavior: _noop_mount is replaced with a real bridge that -calls load_and_mount_wasm. +the critical behavior: the mount closure returned by loader.load() calls +load_and_mount_wasm at mount time. """ -import os import sys -import tempfile -from unittest.mock import MagicMock, patch +from pathlib import Path +from unittest.mock import AsyncMock, MagicMock, patch import pytest +from amplifier_core.loader import ModuleLoader + +MODULE_ID = "echo-tool" + @pytest.fixture -def fixture_dir(): - """Create a temp directory referencing the echo-tool fixture location.""" - # Use the real fixture path for documentation clarity, but the mock - # means we won't actually read WASM files during the test. - fixture_base = os.path.join( - os.path.dirname(__file__), - "..", - "..", - "tests", - "fixtures", - "wasm", +def wasm_fixture_path(): + """Path to the echo-tool.wasm fixture file. Skips if missing.""" + path = ( + Path(__file__).parent + / ".." + / ".." + / "tests" + / "fixtures" + / "wasm" + / f"{MODULE_ID}.wasm" ) - wasm_path = os.path.join(fixture_base, "echo-tool.wasm") - if not os.path.exists(wasm_path): - pytest.skip(f"WASM fixture not found: {wasm_path}") + if not path.exists(): + pytest.skip(f"WASM fixture not found: {path}") + return path + - with tempfile.TemporaryDirectory() as tmpdir: - # Write an amplifier.toml so Python fallback detects wasm transport - toml_path = os.path.join(tmpdir, "amplifier.toml") - with open(toml_path, "w") as f: - f.write('[module]\ntransport = "wasm"\ntype = "tool"\n') - yield tmpdir +@pytest.fixture +def mock_coordinator(): + """MagicMock coordinator with real mount_points structure.""" + coord = MagicMock() + coord.mount_points = { + "orchestrator": None, + "providers": {}, + "tools": {}, + "context": None, + "hooks": MagicMock(), + "module-source-resolver": None, + } + return coord @pytest.mark.asyncio -async def test_wasm_tool_mounts_into_coordinator(fixture_dir): - """WASM tool loaded via loader_dispatch is actually registered in coordinator.mount_points['tools']. +async def test_wasm_tool_mounts_into_coordinator(wasm_fixture_path, mock_coordinator): + """WASM tool loaded via loader.load() is actually registered in coordinator.mount_points['tools']. With the old _noop_mount, the mount function did nothing and the tool was never registered. With the real bridge, load_and_mount_wasm is called at mount time and the tool appears in mount_points['tools']. """ - from amplifier_core.loader_dispatch import load_module + # -- Mock source resolution ----------------------------------------------- + fake_source = MagicMock() + fake_source.resolve.return_value = wasm_fixture_path - # Mock coordinator with real mount_points dict structure - coordinator = MagicMock() - coordinator.loader = None - coordinator.mount_points = { - "orchestrator": None, - "providers": {}, - "tools": {}, - "context": None, - "hooks": MagicMock(), - "module-source-resolver": None, - } + mock_resolver = MagicMock() + mock_resolver.async_resolve = AsyncMock(return_value=fake_source) - # Mock the Rust _engine module + # Wire resolver into coordinator + mock_coordinator.get.return_value = mock_resolver + + # -- Mock Rust engine ----------------------------------------------------- fake_engine = MagicMock() fake_engine.resolve_module.return_value = { "transport": "wasm", - "name": "echo-tool", + "name": MODULE_ID, } # Simulate what load_and_mount_wasm does: mount tool into coordinator def fake_load_and_mount(coord, path): tool_mock = MagicMock() - tool_mock.name = "echo-tool" - coord.mount_points["tools"]["echo-tool"] = tool_mock - return {"status": "mounted", "module_type": "tool", "name": "echo-tool"} + tool_mock.name = MODULE_ID + coord.mount_points["tools"][MODULE_ID] = tool_mock + return {"status": "mounted", "module_type": "tool", "name": MODULE_ID} fake_engine.load_and_mount_wasm = MagicMock(side_effect=fake_load_and_mount) - # Also provide load_wasm_from_path for backward compat (old code path) - fake_engine.load_wasm_from_path.return_value = { - "status": "loaded", - "module_type": "tool", - } - with patch.dict(sys.modules, {"amplifier_core._engine": fake_engine}): - mount_fn = await load_module("echo-tool", {}, fixture_dir, coordinator) + # -- Execute -------------------------------------------------------------- + loader = ModuleLoader(coordinator=mock_coordinator) - # mount_fn must be callable + with patch.dict(sys.modules, {"amplifier_core._engine": fake_engine}): + mount_fn = await loader.load( + MODULE_ID, + {}, + source_hint="/fake/path", + coordinator=mock_coordinator, # type: ignore[call-arg] + ) + + # -- Verify --------------------------------------------------------------- + # 1. mount_fn must be callable assert callable(mount_fn) - # Before calling mount: tools should still be empty - assert "echo-tool" not in coordinator.mount_points["tools"] + # 2. echo-tool NOT in mount_points before calling mount + assert MODULE_ID not in mock_coordinator.mount_points["tools"] - # Call the mount function — this is where the tool gets registered - await mount_fn(coordinator) # type: ignore[misc] + # 3. Call the mount function — this is where the tool gets registered + with patch.dict(sys.modules, {"amplifier_core._engine": fake_engine}): + await mount_fn(mock_coordinator) # type: ignore[misc] - # The tool must now be in the coordinator's mount_points - tools = coordinator.mount_points["tools"] - assert "echo-tool" in tools, ( - f"'echo-tool' not found in mount_points['tools']. Keys: {list(tools.keys())}" + # 4. echo-tool IS in mount_points after calling mount + tools = mock_coordinator.mount_points["tools"] + assert MODULE_ID in tools, ( + f"'{MODULE_ID}' not found in mount_points['tools']. Keys: {list(tools.keys())}" ) - # Verify load_and_mount_wasm was called with the coordinator and path - fake_engine.load_and_mount_wasm.assert_called_once_with(coordinator, fixture_dir) + # 5. load_and_mount_wasm was called with correct args + fake_engine.load_and_mount_wasm.assert_called_once_with( + mock_coordinator, str(wasm_fixture_path) + ) diff --git a/tests/fixtures/wasm/deny-hook.wasm b/tests/fixtures/wasm/deny-hook.wasm index 8bf24df..d1d27e9 100644 Binary files a/tests/fixtures/wasm/deny-hook.wasm and b/tests/fixtures/wasm/deny-hook.wasm differ diff --git a/tests/fixtures/wasm/src/deny-hook/Cargo.lock b/tests/fixtures/wasm/src/deny-hook/Cargo.lock new file mode 100644 index 0000000..aa62deb --- /dev/null +++ b/tests/fixtures/wasm/src/deny-hook/Cargo.lock @@ -0,0 +1,861 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 4 + +[[package]] +name = "adler2" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" + +[[package]] +name = "amplifier-guest" +version = "0.1.0" +dependencies = [ + "prost", + "serde", + "serde_json", + "wit-bindgen", +] + +[[package]] +name = "anyhow" +version = "1.0.102" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f202df86484c868dbad7eaa557ef785d5c66295e41b460ef922eca0723b842c" + +[[package]] +name = "auditable-serde" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c7bf8143dfc3c0258df908843e169b5cc5fcf76c7718bd66135ef4a9cd558c5" +dependencies = [ + "semver", + "serde", + "serde_json", + "topological-sort", +] + +[[package]] +name = "bitflags" +version = "2.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "843867be96c8daad0d758b57df9392b6d8d271134fce549de6ce169ff98a92af" + +[[package]] +name = "bytes" +version = "1.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e748733b7cbc798e1434b6ac524f0c1ff2ab456fe201501e6497c8417a4fc33" + +[[package]] +name = "cfg-if" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801" + +[[package]] +name = "crc32fast" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9481c1c90cbf2ac953f07c8d4a58aa3945c425b7185c9154d67a65e4230da511" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "deny-hook" +version = "0.1.0" +dependencies = [ + "amplifier-guest", + "serde_json", + "wit-bindgen-rt", +] + +[[package]] +name = "displaydoc" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "either" +version = "1.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" + +[[package]] +name = "equivalent" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" + +[[package]] +name = "flate2" +version = "1.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "843fba2746e448b37e26a819579957415c8cef339bf08564fe8b7ddbd959573c" +dependencies = [ + "crc32fast", + "miniz_oxide", +] + +[[package]] +name = "foldhash" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" + +[[package]] +name = "form_urlencoded" +version = "1.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "futures" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b147ee9d1f6d097cef9ce628cd2ee62288d963e16fb287bd9286455b241382d" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-channel" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07bbe89c50d7a535e539b8c17bc0b49bdb77747034daa8087407d655f3f7cc1d" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e3450815272ef58cec6d564423f6e755e25379b217b0bc688e295ba24df6b1d" + +[[package]] +name = "futures-executor" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf29c38818342a3b26b5b923639e7b1f4a61fc5e76102d4b1981c6dc7a7579d" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-io" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cecba35d7ad927e23624b22ad55235f2239cfa44fd10428eecbeba6d6a717718" + +[[package]] +name = "futures-macro" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e835b70203e41293343137df5c0664546da5745f82ec9b84d40be8336958447b" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "futures-sink" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c39754e157331b013978ec91992bde1ac089843443c49cbc7f46150b0fad0893" + +[[package]] +name = "futures-task" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "037711b3d59c33004d3856fbdc83b99d4ff37a24768fa1be9ce3538a1cde4393" + +[[package]] +name = "futures-util" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "389ca41296e6190b48053de0321d02a77f32f8a5d2461dd38762c0593805c6d6" +dependencies = [ + "futures-channel", + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "slab", +] + +[[package]] +name = "hashbrown" +version = "0.15.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1" +dependencies = [ + "foldhash", +] + +[[package]] +name = "hashbrown" +version = "0.16.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100" + +[[package]] +name = "heck" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + +[[package]] +name = "icu_collections" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c6b649701667bbe825c3b7e6388cb521c23d88644678e83c0c4d0a621a34b43" +dependencies = [ + "displaydoc", + "potential_utf", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_locale_core" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "edba7861004dd3714265b4db54a3c390e880ab658fec5f7db895fae2046b5bb6" +dependencies = [ + "displaydoc", + "litemap", + "tinystr", + "writeable", + "zerovec", +] + +[[package]] +name = "icu_normalizer" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f6c8828b67bf8908d82127b2054ea1b4427ff0230ee9141c54251934ab1b599" +dependencies = [ + "icu_collections", + "icu_normalizer_data", + "icu_properties", + "icu_provider", + "smallvec", + "zerovec", +] + +[[package]] +name = "icu_normalizer_data" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7aedcccd01fc5fe81e6b489c15b247b8b0690feb23304303a9e560f37efc560a" + +[[package]] +name = "icu_properties" +version = "2.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "020bfc02fe870ec3a66d93e677ccca0562506e5872c650f893269e08615d74ec" +dependencies = [ + "icu_collections", + "icu_locale_core", + "icu_properties_data", + "icu_provider", + "zerotrie", + "zerovec", +] + +[[package]] +name = "icu_properties_data" +version = "2.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "616c294cf8d725c6afcd8f55abc17c56464ef6211f9ed59cccffe534129c77af" + +[[package]] +name = "icu_provider" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85962cf0ce02e1e0a629cc34e7ca3e373ce20dda4c4d7294bbd0bf1fdb59e614" +dependencies = [ + "displaydoc", + "icu_locale_core", + "writeable", + "yoke", + "zerofrom", + "zerotrie", + "zerovec", +] + +[[package]] +name = "id-arena" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d3067d79b975e8844ca9eb072e16b31c3c1c36928edf9c6789548c524d0d954" + +[[package]] +name = "idna" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de" +dependencies = [ + "idna_adapter", + "smallvec", + "utf8_iter", +] + +[[package]] +name = "idna_adapter" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344" +dependencies = [ + "icu_normalizer", + "icu_properties", +] + +[[package]] +name = "indexmap" +version = "2.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7714e70437a7dc3ac8eb7e6f8df75fd8eb422675fc7678aff7364301092b1017" +dependencies = [ + "equivalent", + "hashbrown 0.16.1", + "serde", + "serde_core", +] + +[[package]] +name = "itertools" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285" +dependencies = [ + "either", +] + +[[package]] +name = "itoa" +version = "1.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92ecc6618181def0457392ccd0ee51198e065e016d1d527a7ac1b6dc7c1f09d2" + +[[package]] +name = "leb128fmt" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09edd9e8b54e49e587e4f6295a7d29c3ea94d469cb40ab8ca70b288248a81db2" + +[[package]] +name = "litemap" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6373607a59f0be73a39b6fe456b8192fcc3585f602af20751600e974dd455e77" + +[[package]] +name = "log" +version = "0.4.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897" + +[[package]] +name = "memchr" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8ca58f447f06ed17d5fc4043ce1b10dd205e060fb3ce5b979b8ed8e59ff3f79" + +[[package]] +name = "miniz_oxide" +version = "0.8.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316" +dependencies = [ + "adler2", + "simd-adler32", +] + +[[package]] +name = "once_cell" +version = "1.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" + +[[package]] +name = "percent-encoding" +version = "2.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" + +[[package]] +name = "pin-project-lite" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a89322df9ebe1c1578d689c92318e070967d1042b512afbe49518723f4e6d5cd" + +[[package]] +name = "potential_utf" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b73949432f5e2a09657003c25bca5e19a0e9c84f8058ca374f49e0ebe605af77" +dependencies = [ + "zerovec", +] + +[[package]] +name = "prettyplease" +version = "0.2.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "479ca8adacdd7ce8f1fb39ce9ecccbfe93a3f1344b3d0d97f20bc0196208f62b" +dependencies = [ + "proc-macro2", + "syn", +] + +[[package]] +name = "proc-macro2" +version = "1.0.106" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fd00f0bb2e90d81d1044c2b32617f68fcb9fa3bb7640c23e9c748e53fb30934" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "prost" +version = "0.13.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2796faa41db3ec313a31f7624d9286acf277b52de526150b7e69f3debf891ee5" +dependencies = [ + "bytes", + "prost-derive", +] + +[[package]] +name = "prost-derive" +version = "0.13.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a56d757972c98b346a9b766e3f02746cde6dd1cd1d1d563472929fdd74bec4d" +dependencies = [ + "anyhow", + "itertools", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "quote" +version = "1.0.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41f2619966050689382d2b44f664f4bc593e129785a36d6ee376ddf37259b924" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "semver" +version = "1.0.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2" +dependencies = [ + "serde", + "serde_core", +] + +[[package]] +name = "serde" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e" +dependencies = [ + "serde_core", + "serde_derive", +] + +[[package]] +name = "serde_core" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_json" +version = "1.0.149" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83fc039473c5595ace860d8c4fafa220ff474b3fc6bfdb4293327f1a37e94d86" +dependencies = [ + "itoa", + "memchr", + "serde", + "serde_core", + "zmij", +] + +[[package]] +name = "simd-adler32" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e320a6c5ad31d271ad523dcf3ad13e2767ad8b1cb8f047f75a8aeaf8da139da2" + +[[package]] +name = "slab" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c790de23124f9ab44544d7ac05d60440adc586479ce501c1d6d7da3cd8c9cf5" + +[[package]] +name = "smallvec" +version = "1.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" + +[[package]] +name = "spdx" +version = "0.10.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3e17e880bafaeb362a7b751ec46bdc5b61445a188f80e0606e68167cd540fa3" +dependencies = [ + "smallvec", +] + +[[package]] +name = "stable_deref_trait" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596" + +[[package]] +name = "syn" +version = "2.0.117" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e665b8803e7b1d2a727f4023456bbbbe74da67099c585258af0ad9c5013b9b99" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "synstructure" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tinystr" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42d3e9c45c09de15d06dd8acf5f4e0e399e85927b7f00711024eb7ae10fa4869" +dependencies = [ + "displaydoc", + "zerovec", +] + +[[package]] +name = "topological-sort" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea68304e134ecd095ac6c3574494fc62b909f416c4fca77e440530221e549d3d" + +[[package]] +name = "unicode-ident" +version = "1.0.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6e4313cd5fcd3dad5cafa179702e2b244f760991f45397d14d4ebf38247da75" + +[[package]] +name = "unicode-xid" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" + +[[package]] +name = "url" +version = "2.5.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff67a8a4397373c3ef660812acab3268222035010ab8680ec4215f38ba3d0eed" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", + "serde", +] + +[[package]] +name = "utf8_iter" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" + +[[package]] +name = "wasm-encoder" +version = "0.227.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "80bb72f02e7fbf07183443b27b0f3d4144abf8c114189f2e088ed95b696a7822" +dependencies = [ + "leb128fmt", + "wasmparser", +] + +[[package]] +name = "wasm-metadata" +version = "0.227.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce1ef0faabbbba6674e97a56bee857ccddf942785a336c8b47b42373c922a91d" +dependencies = [ + "anyhow", + "auditable-serde", + "flate2", + "indexmap", + "serde", + "serde_derive", + "serde_json", + "spdx", + "url", + "wasm-encoder", + "wasmparser", +] + +[[package]] +name = "wasmparser" +version = "0.227.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f51cad774fb3c9461ab9bccc9c62dfb7388397b5deda31bf40e8108ccd678b2" +dependencies = [ + "bitflags", + "hashbrown 0.15.5", + "indexmap", + "semver", +] + +[[package]] +name = "wit-bindgen" +version = "0.41.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "10fb6648689b3929d56bbc7eb1acf70c9a42a29eb5358c67c10f54dbd5d695de" +dependencies = [ + "wit-bindgen-rt", + "wit-bindgen-rust-macro", +] + +[[package]] +name = "wit-bindgen-core" +version = "0.41.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92fa781d4f2ff6d3f27f3cc9b74a73327b31ca0dc4a3ef25a0ce2983e0e5af9b" +dependencies = [ + "anyhow", + "heck", + "wit-parser", +] + +[[package]] +name = "wit-bindgen-rt" +version = "0.41.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4db52a11d4dfb0a59f194c064055794ee6564eb1ced88c25da2cf76e50c5621" +dependencies = [ + "bitflags", + "futures", + "once_cell", +] + +[[package]] +name = "wit-bindgen-rust" +version = "0.41.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d0809dc5ba19e2e98661bf32fc0addc5a3ca5bf3a6a7083aa6ba484085ff3ce" +dependencies = [ + "anyhow", + "heck", + "indexmap", + "prettyplease", + "syn", + "wasm-metadata", + "wit-bindgen-core", + "wit-component", +] + +[[package]] +name = "wit-bindgen-rust-macro" +version = "0.41.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad19eec017904e04c60719592a803ee5da76cb51c81e3f6fbf9457f59db49799" +dependencies = [ + "anyhow", + "prettyplease", + "proc-macro2", + "quote", + "syn", + "wit-bindgen-core", + "wit-bindgen-rust", +] + +[[package]] +name = "wit-component" +version = "0.227.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "635c3adc595422cbf2341a17fb73a319669cc8d33deed3a48368a841df86b676" +dependencies = [ + "anyhow", + "bitflags", + "indexmap", + "log", + "serde", + "serde_derive", + "serde_json", + "wasm-encoder", + "wasm-metadata", + "wasmparser", + "wit-parser", +] + +[[package]] +name = "wit-parser" +version = "0.227.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ddf445ed5157046e4baf56f9138c124a0824d4d1657e7204d71886ad8ce2fc11" +dependencies = [ + "anyhow", + "id-arena", + "indexmap", + "log", + "semver", + "serde", + "serde_derive", + "serde_json", + "unicode-xid", + "wasmparser", +] + +[[package]] +name = "writeable" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9edde0db4769d2dc68579893f2306b26c6ecfbe0ef499b013d731b7b9247e0b9" + +[[package]] +name = "yoke" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72d6e5c6afb84d73944e5cedb052c4680d5657337201555f9f2a16b7406d4954" +dependencies = [ + "stable_deref_trait", + "yoke-derive", + "zerofrom", +] + +[[package]] +name = "yoke-derive" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b659052874eb698efe5b9e8cf382204678a0086ebf46982b79d6ca3182927e5d" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + +[[package]] +name = "zerofrom" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5" +dependencies = [ + "zerofrom-derive", +] + +[[package]] +name = "zerofrom-derive" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + +[[package]] +name = "zerotrie" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a59c17a5562d507e4b54960e8569ebee33bee890c70aa3fe7b97e85a9fd7851" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", +] + +[[package]] +name = "zerovec" +version = "0.11.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c28719294829477f525be0186d13efa9a3c602f7ec202ca9e353d310fb9a002" +dependencies = [ + "yoke", + "zerofrom", + "zerovec-derive", +] + +[[package]] +name = "zerovec-derive" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eadce39539ca5cb3985590102671f2567e659fca9666581ad3411d59207951f3" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "zmij" +version = "1.0.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8848ee67ecc8aedbaf3e4122217aff892639231befc6a1b58d29fff4c2cabaa" diff --git a/tests/fixtures/wasm/src/deny-hook/src/bindings.rs b/tests/fixtures/wasm/src/deny-hook/src/bindings.rs index 90fac86..d852837 100644 --- a/tests/fixtures/wasm/src/deny-hook/src/bindings.rs +++ b/tests/fixtures/wasm/src/deny-hook/src/bindings.rs @@ -13,6 +13,24 @@ pub mod exports { #[doc(hidden)] static __FORCE_SECTION_REF: fn() = super::super::super::super::__link_custom_section_describing_imports; use super::super::super::super::_rt; + #[derive(Clone)] + pub struct EventSubscription { + pub event: _rt::String, + pub priority: i32, + pub name: _rt::String, + } + impl ::core::fmt::Debug for EventSubscription { + fn fmt( + &self, + f: &mut ::core::fmt::Formatter<'_>, + ) -> ::core::fmt::Result { + f.debug_struct("EventSubscription") + .field("event", &self.event) + .field("priority", &self.priority) + .field("name", &self.name) + .finish() + } + } #[doc(hidden)] #[allow(non_snake_case)] pub unsafe fn _export_handle_cabi( @@ -82,10 +100,115 @@ pub mod exports { } } } + #[doc(hidden)] + #[allow(non_snake_case)] + pub unsafe fn _export_get_subscriptions_cabi( + arg0: *mut u8, + arg1: usize, + ) -> *mut u8 { + #[cfg(target_arch = "wasm32")] _rt::run_ctors_once(); + let len0 = arg1; + let result1 = T::get_subscriptions( + _rt::Vec::from_raw_parts(arg0.cast(), len0, len0), + ); + let ptr2 = (&raw mut _RET_AREA.0).cast::(); + let vec6 = result1; + let len6 = vec6.len(); + let layout6 = _rt::alloc::Layout::from_size_align_unchecked( + vec6.len() * (5 * ::core::mem::size_of::<*const u8>()), + ::core::mem::size_of::<*const u8>(), + ); + let result6 = if layout6.size() != 0 { + let ptr = _rt::alloc::alloc(layout6).cast::(); + if ptr.is_null() { + _rt::alloc::handle_alloc_error(layout6); + } + ptr + } else { + ::core::ptr::null_mut() + }; + for (i, e) in vec6.into_iter().enumerate() { + let base = result6 + .add(i * (5 * ::core::mem::size_of::<*const u8>())); + { + let EventSubscription { + event: event3, + priority: priority3, + name: name3, + } = e; + let vec4 = (event3.into_bytes()).into_boxed_slice(); + let ptr4 = vec4.as_ptr().cast::(); + let len4 = vec4.len(); + ::core::mem::forget(vec4); + *base + .add(::core::mem::size_of::<*const u8>()) + .cast::() = len4; + *base.add(0).cast::<*mut u8>() = ptr4.cast_mut(); + *base + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::() = _rt::as_i32(priority3); + let vec5 = (name3.into_bytes()).into_boxed_slice(); + let ptr5 = vec5.as_ptr().cast::(); + let len5 = vec5.len(); + ::core::mem::forget(vec5); + *base + .add(4 * ::core::mem::size_of::<*const u8>()) + .cast::() = len5; + *base + .add(3 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>() = ptr5.cast_mut(); + } + } + *ptr2.add(::core::mem::size_of::<*const u8>()).cast::() = len6; + *ptr2.add(0).cast::<*mut u8>() = result6; + ptr2 + } + #[doc(hidden)] + #[allow(non_snake_case)] + pub unsafe fn __post_return_get_subscriptions(arg0: *mut u8) { + let l0 = *arg0.add(0).cast::<*mut u8>(); + let l1 = *arg0 + .add(::core::mem::size_of::<*const u8>()) + .cast::(); + let base6 = l0; + let len6 = l1; + for i in 0..len6 { + let base = base6 + .add(i * (5 * ::core::mem::size_of::<*const u8>())); + { + let l2 = *base.add(0).cast::<*mut u8>(); + let l3 = *base + .add(::core::mem::size_of::<*const u8>()) + .cast::(); + _rt::cabi_dealloc(l2, l3, 1); + let l4 = *base + .add(3 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l5 = *base + .add(4 * ::core::mem::size_of::<*const u8>()) + .cast::(); + _rt::cabi_dealloc(l4, l5, 1); + } + } + _rt::cabi_dealloc( + base6, + len6 * (5 * ::core::mem::size_of::<*const u8>()), + ::core::mem::size_of::<*const u8>(), + ); + } pub trait Guest { /// Handle a lifecycle event (HookHandleRequest proto, serialized). /// Returns proto-serialized HookResult on success. fn handle(event: _rt::Vec) -> Result<_rt::Vec, _rt::String>; + /// Return the events this hook wants to receive, along with priority + /// and a human-readable name for each subscription. + /// + /// `config` is the module's JSON configuration blob (from bundle YAML), + /// serialized as bytes so the hook can adjust its subscriptions at + /// load time. + fn get_subscriptions( + config: _rt::Vec, + ) -> _rt::Vec; } #[doc(hidden)] macro_rules! __export_amplifier_modules_hook_handler_1_0_0_cabi { @@ -97,7 +220,17 @@ pub mod exports { arg1) } } #[unsafe (export_name = "cabi_post_amplifier:modules/hook-handler@1.0.0#handle")] unsafe extern "C" fn _post_return_handle(arg0 : * mut u8,) { unsafe { - $($path_to_types)*:: __post_return_handle::<$ty > (arg0) } } }; + $($path_to_types)*:: __post_return_handle::<$ty > (arg0) } } + #[unsafe (export_name = + "amplifier:modules/hook-handler@1.0.0#get-subscriptions")] unsafe + extern "C" fn export_get_subscriptions(arg0 : * mut u8, arg1 : + usize,) -> * mut u8 { unsafe { $($path_to_types)*:: + _export_get_subscriptions_cabi::<$ty > (arg0, arg1) } } #[unsafe + (export_name = + "cabi_post_amplifier:modules/hook-handler@1.0.0#get-subscriptions")] + unsafe extern "C" fn _post_return_get_subscriptions(arg0 : * mut + u8,) { unsafe { $($path_to_types)*:: + __post_return_get_subscriptions::<$ty > (arg0) } } }; }; } #[doc(hidden)] @@ -120,6 +253,7 @@ pub mod exports { #[rustfmt::skip] mod _rt { #![allow(dead_code, clippy::all)] + pub use alloc_crate::string::String; #[cfg(target_arch = "wasm32")] pub fn run_ctors_once() { wit_bindgen_rt::run_ctors_once(); @@ -132,9 +266,67 @@ mod _rt { let layout = alloc::Layout::from_size_align_unchecked(size, align); alloc::dealloc(ptr, layout); } - pub use alloc_crate::string::String; - extern crate alloc as alloc_crate; + pub fn as_i32(t: T) -> i32 { + t.as_i32() + } + pub trait AsI32 { + fn as_i32(self) -> i32; + } + impl<'a, T: Copy + AsI32> AsI32 for &'a T { + fn as_i32(self) -> i32 { + (*self).as_i32() + } + } + impl AsI32 for i32 { + #[inline] + fn as_i32(self) -> i32 { + self as i32 + } + } + impl AsI32 for u32 { + #[inline] + fn as_i32(self) -> i32 { + self as i32 + } + } + impl AsI32 for i16 { + #[inline] + fn as_i32(self) -> i32 { + self as i32 + } + } + impl AsI32 for u16 { + #[inline] + fn as_i32(self) -> i32 { + self as i32 + } + } + impl AsI32 for i8 { + #[inline] + fn as_i32(self) -> i32 { + self as i32 + } + } + impl AsI32 for u8 { + #[inline] + fn as_i32(self) -> i32 { + self as i32 + } + } + impl AsI32 for char { + #[inline] + fn as_i32(self) -> i32 { + self as i32 + } + } + impl AsI32 for usize { + #[inline] + fn as_i32(self) -> i32 { + self as i32 + } + } pub use alloc_crate::alloc; + extern crate alloc as alloc_crate; } /// Generates `#[unsafe(no_mangle)]` functions to export the specified type as /// the root implementation of all generated traits. @@ -173,9 +365,11 @@ pub(crate) use __export_hook_module_impl as export; )] #[doc(hidden)] #[allow(clippy::octal_escapes)] -pub static __WIT_BINDGEN_COMPONENT_TYPE: [u8; 251] = *b"\ -\0asm\x0d\0\x01\0\0\x19\x16wit-component-encoding\x04\0\x07z\x01A\x02\x01A\x02\x01\ -B\x04\x01p}\x01j\x01\0\x01s\x01@\x01\x05event\0\0\x01\x04\0\x06handle\x01\x02\x04\ +pub static __WIT_BINDGEN_COMPONENT_TYPE: [u8; 340] = *b"\ +\0asm\x0d\0\x01\0\0\x19\x16wit-component-encoding\x04\0\x07\xd2\x01\x01A\x02\x01\ +A\x02\x01B\x09\x01r\x03\x05events\x08priorityz\x04names\x04\0\x12event-subscript\ +ion\x03\0\0\x01p}\x01j\x01\x02\x01s\x01@\x01\x05event\x02\0\x03\x04\0\x06handle\x01\ +\x04\x01p\x01\x01@\x01\x06config\x02\0\x05\x04\0\x11get-subscriptions\x01\x06\x04\ \0$amplifier:modules/hook-handler@1.0.0\x05\0\x04\0#amplifier:modules/hook-modul\ e@1.0.0\x04\0\x0b\x11\x01\0\x0bhook-module\x03\0\0\0G\x09producers\x01\x0cproces\ sed-by\x02\x0dwit-component\x070.227.1\x10wit-bindgen-rust\x060.41.0"; diff --git a/tests/fixtures/wasm/src/deny-hook/src/lib.rs b/tests/fixtures/wasm/src/deny-hook/src/lib.rs index e733e8f..e060357 100644 --- a/tests/fixtures/wasm/src/deny-hook/src/lib.rs +++ b/tests/fixtures/wasm/src/deny-hook/src/lib.rs @@ -1,7 +1,7 @@ #[allow(warnings)] mod bindings; -use amplifier_guest::{HookAction, HookHandler, HookResult, Value}; +use amplifier_guest::{EventSubscription, HookAction, HookHandler, HookResult, Value}; #[derive(Default)] struct DenyHook; @@ -14,6 +14,14 @@ impl HookHandler for DenyHook { ..Default::default() }) } + + fn get_subscriptions(&self, _config: Value) -> Vec { + vec![EventSubscription { + event: "tool:pre".to_string(), + priority: 0, + name: "deny-all".to_string(), + }] + } } amplifier_guest::export_hook!(DenyHook); diff --git a/tests/fixtures/wasm/src/deny-hook/wit/amplifier-modules.wit b/tests/fixtures/wasm/src/deny-hook/wit/amplifier-modules.wit new file mode 100644 index 0000000..1e477f9 --- /dev/null +++ b/tests/fixtures/wasm/src/deny-hook/wit/amplifier-modules.wit @@ -0,0 +1,31 @@ +// Minimal WIT for hook-module world. +// Extracted from the main amplifier-modules.wit to avoid pulling in +// interfaces that are only needed by other module worlds. + +package amplifier:modules@1.0.0; + +/// Hook handler interface — responds to lifecycle events. +interface hook-handler { + /// Handle a lifecycle event (HookHandleRequest proto, serialized). + /// Returns proto-serialized HookResult on success. + handle: func(event: list) -> result, string>; + + /// Return the events this hook wants to receive, along with priority + /// and a human-readable name for each subscription. + /// + /// `config` is the module's JSON configuration blob (from bundle YAML), + /// serialized as bytes so the hook can adjust its subscriptions at + /// load time. + get-subscriptions: func(config: list) -> list; + + record event-subscription { + event: string, + priority: s32, + name: string, + } +} + +/// Tier 1: Pure-compute hook handler module. +world hook-module { + export hook-handler; +} diff --git a/tests/fixtures/wasm/src/deny-hook/wit/hook.wit b/tests/fixtures/wasm/src/deny-hook/wit/hook.wit deleted file mode 100644 index 2a82d70..0000000 --- a/tests/fixtures/wasm/src/deny-hook/wit/hook.wit +++ /dev/null @@ -1,17 +0,0 @@ -// Minimal WIT for hook-module world. -// Extracted from the main amplifier-modules.wit to avoid pulling in -// WASI HTTP dependencies that are only needed by the provider-module world. - -package amplifier:modules@1.0.0; - -/// Hook handler interface — responds to lifecycle events. -interface hook-handler { - /// Handle a lifecycle event (HookHandleRequest proto, serialized). - /// Returns proto-serialized HookResult on success. - handle: func(event: list) -> result, string>; -} - -/// Tier 1: Pure-compute hook handler module. -world hook-module { - export hook-handler; -} diff --git a/tests/test_approval_provider_bridge.py b/tests/test_approval_provider_bridge.py new file mode 100644 index 0000000..ba4cd23 --- /dev/null +++ b/tests/test_approval_provider_bridge.py @@ -0,0 +1,53 @@ +"""Test that setting approval_system on the coordinator sets has_approval_provider.""" + +import pytest + + +def test_approval_system_sets_has_approval_provider(): + """Setting coordinator.approval_system should set has_approval_provider in to_dict.""" + try: + from amplifier_core._engine import RustCoordinator + except ImportError: + pytest.skip("Rust engine not available") + + coord = RustCoordinator() + + # Initially no approval provider + d = coord.to_dict() + assert ( + d.get("has_approval_provider") is False + or d.get("has_approval_provider") is None + ) + + # Set a simple approval system + class FakeApproval: + def request_approval(self, prompt, options, timeout, default): + return "approve" + + coord.approval_system = FakeApproval() + d = coord.to_dict() + assert d.get("has_approval_provider") is True + + +def test_clearing_approval_system_with_none(): + """Setting coordinator.approval_system = None should clear the provider.""" + try: + from amplifier_core._engine import RustCoordinator + except ImportError: + pytest.skip("Rust engine not available") + + coord = RustCoordinator() + + # Set a provider first + class FakeApproval: + def request_approval(self, prompt, options, timeout, default): + return "approve" + + coord.approval_system = FakeApproval() + d = coord.to_dict() + assert d.get("has_approval_provider") is True + + # Clear by setting to None + coord.approval_system = None + d = coord.to_dict() + assert d.get("has_approval_provider") is False diff --git a/tests/test_display_service_bridge.py b/tests/test_display_service_bridge.py new file mode 100644 index 0000000..9dd5be8 --- /dev/null +++ b/tests/test_display_service_bridge.py @@ -0,0 +1,25 @@ +"""Tests for PyDisplayServiceBridge — verifies that setting coordinator.display_system +creates a Rust-side DisplayService bridge and is reflected in to_dict().""" + +import pytest + + +def test_display_system_sets_has_display_service(): + try: + from amplifier_core._engine import RustCoordinator + except ImportError: + pytest.skip("Rust engine not available") + coord = RustCoordinator() + d = coord.to_dict() + assert d.get("has_display_service") is False or d.get("has_display_service") is None + + class FakeDisplay: + def __init__(self): + self.messages = [] + + def show_message(self, message, level, source): + self.messages.append((message, level, source)) + + coord.display_system = FakeDisplay() + d = coord.to_dict() + assert d.get("has_display_service") is True diff --git a/tests/test_loader_transport_dispatch.py b/tests/test_loader_transport_dispatch.py new file mode 100644 index 0000000..aa60a15 --- /dev/null +++ b/tests/test_loader_transport_dispatch.py @@ -0,0 +1,177 @@ +"""Tests for transport dispatch through ModuleLoader.load(). + +Verifies that loader.load() can dispatch to different transports (WASM, gRPC) +when the Rust engine resolves a module accordingly. +""" + +import sys +import tempfile +from pathlib import Path +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest + +from amplifier_core.loader import ModuleLoader + +MODULE_ID = "echo-tool" + + +# --------------------------------------------------------------------------- +# Fixtures +# --------------------------------------------------------------------------- + + +@pytest.fixture +def wasm_fixture_path(): + """Path to the echo-tool.wasm fixture file. Skips if missing.""" + path = Path(__file__).parent / "fixtures" / "wasm" / f"{MODULE_ID}.wasm" + if not path.exists(): + pytest.skip(f"WASM fixture not found: {path}") + return path + + +@pytest.fixture +def mock_coordinator(): + """MagicMock coordinator with real mount_points structure.""" + coord = MagicMock() + coord.mount_points = { + "orchestrator": None, + "providers": {}, + "tools": {}, + "context": None, + "hooks": MagicMock(), + "module-source-resolver": None, + } + return coord + + +# --------------------------------------------------------------------------- +# Tests +# --------------------------------------------------------------------------- + + +@pytest.mark.asyncio +async def test_wasm_dispatch_returns_mount_function( + wasm_fixture_path, mock_coordinator +): + """loader.load() with a WASM path returns a callable mount function. + + The Rust engine resolves the module as WASM transport. loader.load() + should dispatch to the WASM loading path and return a mount function + that, when called with a coordinator, mounts the tool into + mount_points['tools']. + """ + # -- Mock source resolution ----------------------------------------------- + # fake_source.resolve returns the wasm fixture path + fake_source = MagicMock() + fake_source.resolve.return_value = wasm_fixture_path + + # mock_resolver.async_resolve returns fake_source + mock_resolver = MagicMock() + mock_resolver.async_resolve = AsyncMock(return_value=fake_source) + + # Wire resolver into coordinator so the loader finds it at + # coordinator.get("module-source-resolver") + mock_coordinator.get.return_value = mock_resolver + + # -- Mock Rust engine ----------------------------------------------------- + fake_engine = MagicMock() + fake_engine.resolve_module.return_value = { + "transport": "wasm", + "module_type": "tool", + "artifact_type": "wasm", + "artifact_path": wasm_fixture_path, + } + + # Simulate what load_and_mount_wasm does: mount tool into coordinator + def fake_load_and_mount(coord, path): + tool_mock = MagicMock() + tool_mock.name = MODULE_ID + coord.mount_points["tools"][MODULE_ID] = tool_mock + return {"status": "mounted", "module_type": "tool", "name": MODULE_ID} + + fake_engine.load_and_mount_wasm = MagicMock(side_effect=fake_load_and_mount) + + # -- Execute -------------------------------------------------------------- + loader = ModuleLoader(coordinator=mock_coordinator) + mount_points = mock_coordinator.mount_points + + with patch.dict(sys.modules, {"amplifier_core._engine": fake_engine}): + mount_fn = await loader.load( + MODULE_ID, {}, source_hint="/fake/path", coordinator=mock_coordinator + ) + + # -- Verify --------------------------------------------------------------- + # mount_fn must be callable + assert callable(mount_fn) + + # Call mount function and verify the tool is registered + await mount_fn(mock_coordinator) + assert MODULE_ID in mount_points["tools"] + + +@pytest.mark.asyncio +async def test_grpc_dispatch_routes_to_grpc_loader(mock_coordinator): + """loader.load() with gRPC transport dispatches to gRPC loading path. + + When the Rust engine resolves a module as gRPC transport, loader.load() + should attempt to establish a gRPC channel. Since grpcio is not installed + (or connection fails), we expect an error whose message contains + gRPC-related keywords, confirming the loader routed to the gRPC path + rather than the Python entry-point path. + """ + # -- Create temp module dir with amplifier.toml -------------------------- + with tempfile.TemporaryDirectory() as tmpdir: + toml_path = Path(tmpdir) / "amplifier.toml" + toml_path.write_text( + "[module]\n" + "name = 'my-tool'\n" + "type = 'tool'\n" + "transport = 'grpc'\n" + "\n" + "[grpc]\n" + "endpoint = 'localhost:99999'\n" + ) + + # -- Mock source resolution ------------------------------------------ + fake_source = MagicMock() + fake_source.resolve.return_value = Path(tmpdir) + + mock_resolver = MagicMock() + mock_resolver.async_resolve = AsyncMock(return_value=fake_source) + mock_coordinator.get.return_value = mock_resolver + + # -- Mock Rust engine ------------------------------------------------ + fake_engine = MagicMock() + fake_engine.resolve_module.return_value = { + "transport": "grpc", + "module_type": "tool", + "artifact_type": "grpc", + "endpoint": "localhost:99999", + } + + # -- Execute --------------------------------------------------------- + loader = ModuleLoader(coordinator=mock_coordinator) + + with patch.dict(sys.modules, {"amplifier_core._engine": fake_engine}): + with pytest.raises((ImportError, OSError, Exception)) as exc_info: + await loader.load( + "my-grpc-tool", + {}, + source_hint="/fake/path", + coordinator=mock_coordinator, + ) + + # -- Verify ---------------------------------------------------------- + # The error message must contain gRPC-related keywords, confirming + # the loader dispatched to the gRPC path (not the Python path). + # NOTE: This assertion relies on upstream error-message content + # (e.g. from grpcio or ImportError text). It's a pragmatic + # tradeoff — installing grpcio just for this test would add a + # heavy dependency. If the assertion breaks after a library + # upgrade, update ``grpc_keywords`` to match the new wording. + error_msg = str(exc_info.value).lower() + grpc_keywords = ("grpc", "grpcio", "connect", "channel") + assert any(kw in error_msg for kw in grpc_keywords), ( + f"Expected gRPC-related error but got: {exc_info.value}" + ) diff --git a/tests/test_loader_warning.py b/tests/test_loader_warning.py new file mode 100644 index 0000000..4fb55bb --- /dev/null +++ b/tests/test_loader_warning.py @@ -0,0 +1,75 @@ +"""Tests that resolve_module failures are logged at WARNING level. + +When the Rust engine's resolve_module raises an unexpected exception, +the loader falls through to the Python loader. This fallback should be +logged at WARNING (not DEBUG) so operators notice manifest corruption +or other engine failures in normal log output. +""" + +import logging +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest + +from amplifier_core.loader import ModuleLoader + + +@pytest.mark.asyncio +async def test_resolve_module_failure_logs_warning(caplog, tmp_path): + """resolve_module raising RuntimeError must produce a WARNING log record.""" + + # -- Mock coordinator with source resolver --------------------------------- + fake_source = MagicMock() + fake_source.resolve.return_value = tmp_path # any valid Path + + mock_resolver = MagicMock() + mock_resolver.async_resolve = AsyncMock(return_value=fake_source) + + mock_coordinator = MagicMock() + mock_coordinator.get.return_value = mock_resolver + mock_coordinator.mount_points = { + "orchestrator": None, + "providers": {}, + "tools": {}, + "context": None, + "hooks": MagicMock(), + "module-source-resolver": mock_resolver, + } + + loader = ModuleLoader() + loader._coordinator = mock_coordinator + + # -- Build a mock engine where resolve_module raises ----------------------- + mock_engine = MagicMock() + mock_engine.resolve_module.side_effect = RuntimeError("corrupt manifest") + + # Patch _load_entry_point and _load_filesystem to return None so we + # reach the ValueError at the end (after the engine fallthrough path). + # Patch _validate_module to be a no-op (we don't care about validation here). + with ( + patch.object(loader, "_load_entry_point", return_value=None), + patch.object(loader, "_load_filesystem", return_value=None), + patch.object( + loader, "_validate_module", new_callable=AsyncMock + ), # no-op: we only need the engine fallback path + patch("amplifier_core._engine", mock_engine, create=True), + patch.dict("sys.modules", {"amplifier_core._engine": mock_engine}), + caplog.at_level(logging.WARNING, logger="amplifier_core.loader"), + ): + with pytest.raises(ValueError, match="failed to load"): + await loader.load( + module_id="test-mod", + config={}, + coordinator=mock_coordinator, + ) + + # -- Assert that the warning was emitted ----------------------------------- + warning_records = [ + r + for r in caplog.records + if r.levelno >= logging.WARNING and "resolve_module failed" in r.message + ] + assert len(warning_records) >= 1, ( + f"Expected at least one WARNING with 'resolve_module failed', " + f"got records: {[(r.levelname, r.message) for r in caplog.records]}" + ) diff --git a/tests/test_multi_instance.py b/tests/test_multi_instance.py index 75ba5e2..42c1337 100644 --- a/tests/test_multi_instance.py +++ b/tests/test_multi_instance.py @@ -11,7 +11,7 @@ from amplifier_core.session import AmplifierSession as PyAmplifierSession from amplifier_core._session_init import initialize_session -from amplifier_core.testing import TestCoordinator +from amplifier_core.testing import MockCoordinator # --------------------------------------------------------------------------- @@ -35,7 +35,7 @@ def _make_loader(module_to_mount_fn: dict): """Return a mock loader whose load() returns the configured mount function.""" loader = AsyncMock() - async def _load(module_id, config=None, source_hint=None): + async def _load(module_id, config=None, source_hint=None, coordinator=None): return module_to_mount_fn[module_id] loader.load.side_effect = _load @@ -72,7 +72,7 @@ async def test_single_instance_no_remapping(): } ) - coordinator = TestCoordinator() + coordinator = MockCoordinator() coordinator.loader = loader await initialize_session(config, coordinator, session_id="test", parent_id=None) @@ -113,7 +113,7 @@ async def test_instance_id_remapping_removes_default_key(): } ) - coordinator = TestCoordinator() + coordinator = MockCoordinator() coordinator.loader = loader await initialize_session(config, coordinator, session_id="test", parent_id=None) @@ -155,7 +155,7 @@ async def mount_fn_b(coordinator): call_count = {"n": 0} - async def load_side_effect(module_id, config=None, source_hint=None): + async def load_side_effect(module_id, config=None, source_hint=None, coordinator=None): if module_id == "loop-basic": return AsyncMock(return_value=None) if module_id == "context-simple": @@ -176,7 +176,7 @@ async def load_side_effect(module_id, config=None, source_hint=None): ], } - coordinator = TestCoordinator() + coordinator = MockCoordinator() coordinator.loader = loader await initialize_session(config, coordinator, session_id="test", parent_id=None) @@ -225,7 +225,7 @@ async def test_session_py_instance_id_remapping(): # Replace the session's coordinator with our tracking one so we can inspect # mount/unmount history after initialization. - tracking_coordinator = TestCoordinator() + tracking_coordinator = MockCoordinator() tracking_coordinator.loader = loader session.coordinator = tracking_coordinator @@ -267,7 +267,7 @@ async def test_duplicate_module_without_instance_id_raises(): } ) - coordinator = TestCoordinator() + coordinator = MockCoordinator() coordinator.loader = loader with pytest.raises(ValueError, match="instance_id"): @@ -289,7 +289,7 @@ async def mount_fn_b(coord): await coord.mount("providers", provider_b, name="mock") return None - async def load_side_effect(module_id, config=None, source_hint=None): + async def load_side_effect(module_id, config=None, source_hint=None, coordinator=None): if module_id == "loop-basic": return AsyncMock(return_value=None) if module_id == "context-simple": @@ -310,7 +310,7 @@ async def load_side_effect(module_id, config=None, source_hint=None): ], } - coordinator = TestCoordinator() + coordinator = MockCoordinator() coordinator.loader = loader # Should not raise @@ -339,7 +339,7 @@ async def test_single_module_no_instance_id_ok(): } ) - coordinator = TestCoordinator() + coordinator = MockCoordinator() coordinator.loader = loader # Should not raise @@ -367,7 +367,7 @@ async def mount_fn_named(coord): await coord.mount("providers", named_instance, name="mock") return None - async def load_side_effect(module_id, config=None, source_hint=None): + async def load_side_effect(module_id, config=None, source_hint=None, coordinator=None): if module_id == "loop-basic": return AsyncMock(return_value=None) if module_id == "context-simple": @@ -388,7 +388,7 @@ async def load_side_effect(module_id, config=None, source_hint=None): ], } - coordinator = TestCoordinator() + coordinator = MockCoordinator() coordinator.loader = loader # Should NOT raise — one default entry is allowed @@ -429,7 +429,7 @@ async def mount_fn_second(coordinator): call_count = {"n": 0} - async def load_side_effect(module_id, config=None, source_hint=None): + async def load_side_effect(module_id, config=None, source_hint=None, coordinator=None): if module_id == "loop-basic": return AsyncMock(return_value=None) if module_id == "context-simple": @@ -453,7 +453,7 @@ async def load_side_effect(module_id, config=None, source_hint=None): ], } - coordinator = TestCoordinator() + coordinator = MockCoordinator() coordinator.loader = loader await initialize_session(config, coordinator, session_id="test", parent_id=None) @@ -498,7 +498,7 @@ async def test_session_py_no_instance_id_no_remap(): session = PyAmplifierSession(config, loader=loader) - tracking_coordinator = TestCoordinator() + tracking_coordinator = MockCoordinator() tracking_coordinator.loader = loader session.coordinator = tracking_coordinator diff --git a/tests/test_safe_exception_str_dedup.py b/tests/test_safe_exception_str_dedup.py new file mode 100644 index 0000000..95ff587 --- /dev/null +++ b/tests/test_safe_exception_str_dedup.py @@ -0,0 +1,40 @@ +""" +Tests that _safe_exception_str is not duplicated across session modules. + +session.py should import _safe_exception_str from _session_init, +not define its own copy. +""" + +import ast +from pathlib import Path + + +def test_safe_exception_str_not_defined_in_session_module(): + """session.py must not define _safe_exception_str locally.""" + session_path = ( + Path(__file__).parent.parent / "python" / "amplifier_core" / "session.py" + ) + tree = ast.parse(session_path.read_text()) + local_defs = [ + node.name + for node in ast.walk(tree) + if isinstance(node, ast.FunctionDef) and node.name == "_safe_exception_str" + ] + assert local_defs == [], ( + f"_safe_exception_str should be imported from _session_init, " + f"not defined locally in session.py. Found {len(local_defs)} local definition(s)." + ) + + +def test_session_uses_safe_exception_str_from_session_init(): + """The _safe_exception_str used in session.py must be the one from _session_init.""" + from amplifier_core import _session_init + from amplifier_core import session + + assert hasattr(session, "_safe_exception_str"), ( + "session module must have _safe_exception_str available (via import)" + ) + assert session._safe_exception_str is _session_init._safe_exception_str, ( + "_safe_exception_str in session.py must be the exact same object " + "as in _session_init.py (imported, not duplicated)" + ) diff --git a/tests/test_session_init_delegation.py b/tests/test_session_init_delegation.py new file mode 100644 index 0000000..edbcd0a --- /dev/null +++ b/tests/test_session_init_delegation.py @@ -0,0 +1,80 @@ +"""Tests verifying AmplifierSession.initialize() delegates to _session_init.initialize_session().""" + +from unittest.mock import AsyncMock, patch + +import pytest + +from amplifier_core.session import AmplifierSession as PyAmplifierSession + + +@pytest.fixture +def minimal_config(): + """Minimal valid configuration for delegation tests.""" + return { + "session": { + "orchestrator": "loop-basic", + "context": "context-simple", + }, + "providers": [], + "tools": [], + "hooks": [], + } + + +@pytest.mark.asyncio +async def test_initialize_delegates_to_session_init(minimal_config): + """initialize() calls _session_init.initialize_session() with correct args.""" + session = PyAmplifierSession(minimal_config) + + with patch( + "amplifier_core.session.initialize_session", new_callable=AsyncMock + ) as mock_init: + await session.initialize() + + mock_init.assert_called_once_with( + minimal_config, + session.coordinator, + session.session_id, + session.parent_id, + ) + + +@pytest.mark.asyncio +async def test_initialize_is_idempotent(minimal_config): + """Calling initialize() twice only delegates once.""" + session = PyAmplifierSession(minimal_config) + + with patch( + "amplifier_core.session.initialize_session", new_callable=AsyncMock + ) as mock_init: + await session.initialize() + await session.initialize() + + mock_init.assert_called_once() + + +@pytest.mark.asyncio +async def test_initialize_sets_initialized_flag(minimal_config): + """After successful delegation _initialized is True.""" + session = PyAmplifierSession(minimal_config) + + with patch("amplifier_core.session.initialize_session", new_callable=AsyncMock): + assert not session._initialized + await session.initialize() + assert session._initialized + + +@pytest.mark.asyncio +async def test_initialize_propagates_errors(minimal_config): + """If _session_init raises, error propagates and _initialized stays False.""" + session = PyAmplifierSession(minimal_config) + + with patch( + "amplifier_core.session.initialize_session", + new_callable=AsyncMock, + side_effect=RuntimeError("init failed"), + ): + with pytest.raises(RuntimeError, match="init failed"): + await session.initialize() + + assert not session._initialized diff --git a/tests/test_session_init_integration.py b/tests/test_session_init_integration.py new file mode 100644 index 0000000..c7f528e --- /dev/null +++ b/tests/test_session_init_integration.py @@ -0,0 +1,203 @@ +""" +Integration test: real session init loading pipeline. + +Exercises the real ModuleLoader.load() → source resolution → filesystem +discovery → mount path WITHOUT mocking the loader. This verifies that +``initialize_session`` actually wires up modules into the coordinator end-to-end. +""" + +import importlib +import os +import shutil +import sys +import tempfile + +import pytest + +from amplifier_core._session_init import initialize_session +from amplifier_core.loader import ModuleLoader +from amplifier_core.testing import MockCoordinator + + +# --------------------------------------------------------------------------- +# Fixture helpers +# --------------------------------------------------------------------------- + +ORCH_MODULE_NAME = "amplifier_module_test_orch" +CTX_MODULE_NAME = "amplifier_module_test_ctx" + +ORCH_INIT_PY = '''\ +__amplifier_module_type__ = "orchestrator" + + +async def mount(coordinator, config=None): + """Mount a fake orchestrator that echoes the prompt.""" + + class FakeOrch: + async def execute(self, prompt, context, providers, tools, hooks, **kwargs): + return f"echo: {prompt}" + + await coordinator.mount("orchestrator", FakeOrch()) + return None # no cleanup +''' + +CTX_INIT_PY = '''\ +__amplifier_module_type__ = "context" + + +async def mount(coordinator, config=None): + """Mount a fake context manager.""" + + class FakeCtx: + async def add_message(self, msg): + pass + + async def get_messages(self): + return [] + + async def get_messages_for_request(self, request=None): + return [] + + async def set_messages(self, msgs): + pass + + async def clear(self): + pass + + await coordinator.mount("context", FakeCtx()) + return None # no cleanup +''' + + +@pytest.fixture +def fixture_dir(): + """Create a temp directory with minimal orchestrator and context modules.""" + tmp = tempfile.mkdtemp(prefix="amp_integ_test_") + + # Create orchestrator package + orch_pkg = os.path.join(tmp, ORCH_MODULE_NAME) + os.makedirs(orch_pkg) + with open(os.path.join(orch_pkg, "__init__.py"), "w") as fh: + fh.write(ORCH_INIT_PY) + + # Create context package + ctx_pkg = os.path.join(tmp, CTX_MODULE_NAME) + os.makedirs(ctx_pkg) + with open(os.path.join(ctx_pkg, "__init__.py"), "w") as fh: + fh.write(CTX_INIT_PY) + + # Make modules importable + sys.path.insert(0, tmp) + importlib.invalidate_caches() + + yield tmp + + # Teardown: restore sys.path and evict cached modules + try: + sys.path.remove(tmp) + except ValueError: + pass + for name in [ORCH_MODULE_NAME, CTX_MODULE_NAME]: + sys.modules.pop(name, None) + + shutil.rmtree(tmp, ignore_errors=True) + + +# --------------------------------------------------------------------------- +# Integration tests +# --------------------------------------------------------------------------- + + +@pytest.mark.asyncio +async def test_real_loader_mounts_orchestrator_and_context(fixture_dir): + """initialize_session() with a real ModuleLoader loads and mounts both + the orchestrator and context modules from the filesystem fixture.""" + config = { + "session": { + "orchestrator": "test-orch", + "context": "test-ctx", + }, + "providers": [], + "tools": [], + "hooks": [], + } + + coordinator = MockCoordinator() + loader = ModuleLoader(coordinator=coordinator) + coordinator.loader = loader + + await initialize_session( + config, coordinator, session_id="integ-test", parent_id=None + ) + + # Both modules must be mounted + orchestrator = coordinator.get("orchestrator") + context = coordinator.get("context") + + assert orchestrator is not None, "Orchestrator was not mounted by real loader" + assert context is not None, "Context manager was not mounted by real loader" + + +@pytest.mark.asyncio +async def test_real_loader_orchestrator_execute_works(fixture_dir): + """The mounted orchestrator's execute() method is callable and returns + the expected echo response, proving a real object (not a mock) was wired up.""" + config = { + "session": { + "orchestrator": "test-orch", + "context": "test-ctx", + }, + "providers": [], + "tools": [], + "hooks": [], + } + + coordinator = MockCoordinator() + loader = ModuleLoader(coordinator=coordinator) + coordinator.loader = loader + + await initialize_session( + config, coordinator, session_id="integ-test-2", parent_id=None + ) + + orchestrator = coordinator.get("orchestrator") + assert orchestrator is not None + + result = await orchestrator.execute( + "hello", + context=None, + providers=None, + tools=None, + hooks=None, + ) + assert result == "echo: hello", f"Unexpected orchestrator response: {result!r}" + + +@pytest.mark.asyncio +async def test_real_loader_session_init_creates_loader_if_none(fixture_dir): + """initialize_session() auto-creates a ModuleLoader when coordinator.loader + is None, and the pipeline still succeeds.""" + config = { + "session": { + "orchestrator": "test-orch", + "context": "test-ctx", + }, + "providers": [], + "tools": [], + "hooks": [], + } + + coordinator = MockCoordinator() + # Deliberately do NOT set coordinator.loader — let initialize_session create it + assert coordinator.loader is None, "Expected coordinator.loader to start as None" + + await initialize_session( + config, coordinator, session_id="integ-test-3", parent_id=None + ) + + assert coordinator.get("orchestrator") is not None, ( + "Orchestrator not mounted when loader was auto-created" + ) + assert coordinator.get("context") is not None, ( + "Context not mounted when loader was auto-created" + ) diff --git a/tests/test_wasm_integration.py b/tests/test_wasm_integration.py new file mode 100644 index 0000000..808552e --- /dev/null +++ b/tests/test_wasm_integration.py @@ -0,0 +1,167 @@ +"""Integration tests — load WASM fixtures through load_and_mount_wasm for all 6 module types. + +Uses real Rust _engine module (no mocks). May be slow on ARM64 due to WASM compilation. +""" + +import os +import shutil +import tempfile +from pathlib import Path + +import pytest + +FIXTURES_DIR = Path(__file__).parent / "fixtures" / "wasm" + +# Module-level skip if WASM fixtures not found +if not FIXTURES_DIR.exists(): + pytest.skip( + "WASM fixtures not found in tests/fixtures/wasm/", allow_module_level=True + ) + +try: + from amplifier_core._engine import RustCoordinator, load_and_mount_wasm # type: ignore[reportAttributeAccessIssue] +except ImportError: + pytest.skip( + "Rust _engine module not available (load_and_mount_wasm missing)", + allow_module_level=True, + ) + + +# --------------------------------------------------------------------------- +# Helpers +# --------------------------------------------------------------------------- + + +def _get_coordinator(): + """Create a real RustCoordinator with a fake session. + + session_id='test-session', parent_id=None, config={}. + """ + + class _FakeSession: + session_id = "test-session" + parent_id = None + config = {} + + return RustCoordinator(session=_FakeSession()) + + +def _isolated_wasm_dir(wasm_filename: str) -> str: + """Create a temp directory containing only the given .wasm fixture (symlink). + + Returns the temp directory path. Caller must clean up. + Skips the test if the fixture file does not exist. + """ + src = FIXTURES_DIR / wasm_filename + if not src.exists(): + pytest.skip(f"WASM fixture not found: {src}") + tmpdir = tempfile.mkdtemp(prefix=f"wasm_{wasm_filename.replace('.wasm', '')}_") + os.symlink(str(src.resolve()), os.path.join(tmpdir, wasm_filename)) + return tmpdir + + +# --------------------------------------------------------------------------- +# Tests +# --------------------------------------------------------------------------- + + +@pytest.mark.slow +@pytest.mark.asyncio +async def test_load_echo_tool_wasm(): + """Load echo-tool.wasm via load_and_mount_wasm — tool module.""" + tmpdir = _isolated_wasm_dir("echo-tool.wasm") + try: + coord = _get_coordinator() + result = load_and_mount_wasm(coord, tmpdir) + + assert result["status"] == "mounted" + assert result["module_type"] == "tool" + assert result["name"] == "echo-tool" + + tool = coord.mount_points["tools"]["echo-tool"] + assert hasattr(tool, "name") + assert hasattr(tool, "get_spec") + assert hasattr(tool, "execute") + assert tool.name == "echo-tool" + finally: + shutil.rmtree(tmpdir, ignore_errors=True) + + +@pytest.mark.slow +@pytest.mark.asyncio +async def test_load_echo_provider_wasm(): + """Load echo-provider.wasm via load_and_mount_wasm — provider module.""" + tmpdir = _isolated_wasm_dir("echo-provider.wasm") + try: + coord = _get_coordinator() + result = load_and_mount_wasm(coord, tmpdir) + + assert result["status"] == "mounted" + assert result["module_type"] == "provider" + assert len(coord.mount_points["providers"]) > 0 + finally: + shutil.rmtree(tmpdir, ignore_errors=True) + + +@pytest.mark.slow +@pytest.mark.asyncio +async def test_load_memory_context_wasm(): + """Load memory-context.wasm via load_and_mount_wasm — context module.""" + tmpdir = _isolated_wasm_dir("memory-context.wasm") + try: + coord = _get_coordinator() + result = load_and_mount_wasm(coord, tmpdir) + + assert result["status"] == "mounted" + assert result["module_type"] == "context" + assert coord.mount_points["context"] is not None + finally: + shutil.rmtree(tmpdir, ignore_errors=True) + + +@pytest.mark.slow +@pytest.mark.asyncio +async def test_load_passthrough_orchestrator_wasm(): + """Load passthrough-orchestrator.wasm via load_and_mount_wasm — orchestrator module.""" + tmpdir = _isolated_wasm_dir("passthrough-orchestrator.wasm") + try: + coord = _get_coordinator() + result = load_and_mount_wasm(coord, tmpdir) + + assert result["status"] == "mounted" + assert result["module_type"] == "orchestrator" + assert coord.mount_points["orchestrator"] is not None + finally: + shutil.rmtree(tmpdir, ignore_errors=True) + + +@pytest.mark.slow +@pytest.mark.asyncio +async def test_load_deny_hook_wasm(): + """Load deny-hook.wasm via load_and_mount_wasm — hook module.""" + tmpdir = _isolated_wasm_dir("deny-hook.wasm") + try: + coord = _get_coordinator() + result = load_and_mount_wasm(coord, tmpdir) + + assert result["module_type"] == "hook" + assert result["status"] == "loaded" + assert "wrapper" in result + finally: + shutil.rmtree(tmpdir, ignore_errors=True) + + +@pytest.mark.slow +@pytest.mark.asyncio +async def test_load_auto_approve_wasm(): + """Load auto-approve.wasm via load_and_mount_wasm — approval module.""" + tmpdir = _isolated_wasm_dir("auto-approve.wasm") + try: + coord = _get_coordinator() + result = load_and_mount_wasm(coord, tmpdir) + + assert result["module_type"] == "approval" + assert result["status"] == "loaded" + assert "wrapper" in result + finally: + shutil.rmtree(tmpdir, ignore_errors=True) diff --git a/tests/test_wasm_provider_binding.py b/tests/test_wasm_provider_binding.py new file mode 100644 index 0000000..f485ad8 --- /dev/null +++ b/tests/test_wasm_provider_binding.py @@ -0,0 +1,57 @@ +"""Tests for PyWasmProvider (WasmProvider) Rust binding. + +Verifies that WasmProvider is exported from _engine and has the expected +interface: name property, get_info(), list_models(), complete(), parse_tool_calls(), +and __repr__. +""" + + +class TestWasmProviderExport: + """WasmProvider must be importable from the Rust _engine module.""" + + def test_wasm_provider_class_exists(self): + """WasmProvider class must be exported from _engine.""" + from amplifier_core._engine import WasmProvider + + assert WasmProvider is not None + + def test_wasm_provider_has_name_property(self): + """WasmProvider must expose a 'name' property.""" + from amplifier_core._engine import WasmProvider + + # name should be a defined descriptor (getter) on the class + assert hasattr(WasmProvider, "name"), "WasmProvider missing 'name' property" + + def test_wasm_provider_has_get_info(self): + """WasmProvider must have a get_info method.""" + from amplifier_core._engine import WasmProvider + + assert hasattr(WasmProvider, "get_info"), "WasmProvider missing 'get_info'" + + def test_wasm_provider_has_list_models(self): + """WasmProvider must have a list_models method.""" + from amplifier_core._engine import WasmProvider + + assert hasattr(WasmProvider, "list_models"), ( + "WasmProvider missing 'list_models'" + ) + + def test_wasm_provider_has_complete(self): + """WasmProvider must have a complete method.""" + from amplifier_core._engine import WasmProvider + + assert hasattr(WasmProvider, "complete"), "WasmProvider missing 'complete'" + + def test_wasm_provider_has_parse_tool_calls(self): + """WasmProvider must have a parse_tool_calls method.""" + from amplifier_core._engine import WasmProvider + + assert hasattr(WasmProvider, "parse_tool_calls"), ( + "WasmProvider missing 'parse_tool_calls'" + ) + + def test_wasm_provider_has_repr(self): + """WasmProvider must have a __repr__ method.""" + from amplifier_core._engine import WasmProvider + + assert hasattr(WasmProvider, "__repr__"), "WasmProvider missing '__repr__'" diff --git a/tests/validate_rust_kernel.py b/tests/validate_rust_kernel.py index c4b51a1..9158af0 100644 --- a/tests/validate_rust_kernel.py +++ b/tests/validate_rust_kernel.py @@ -356,11 +356,11 @@ async def handler(event, data): # C1: gRPC loader infrastructure print("C1: gRPC loader") try: - from amplifier_core.loader_dispatch import load_module, _detect_transport + from amplifier_core.loader import ModuleLoader - check("loader_dispatch importable", True) + check("loader transport dispatch importable", True) except ImportError as e: - check("loader_dispatch importable", False, str(e)) + check("loader transport dispatch importable", False, str(e)) try: from amplifier_core.loader_grpc import GrpcToolBridge, load_grpc_module diff --git a/uv.lock b/uv.lock index cbfa13c..f70c947 100644 --- a/uv.lock +++ b/uv.lock @@ -4,7 +4,7 @@ requires-python = ">=3.11" [[package]] name = "amplifier-core" -version = "1.0.7" +version = "1.1.1" source = { editable = "." } dependencies = [ { name = "click" }, diff --git a/wit/amplifier-modules.wit b/wit/amplifier-modules.wit index ff6bdbd..963c497 100644 --- a/wit/amplifier-modules.wit +++ b/wit/amplifier-modules.wit @@ -26,6 +26,32 @@ interface hook-handler { /// Handle a lifecycle event (HookHandleRequest proto, serialized). /// Returns proto-serialized HookResult on success. handle: func(event: list) -> result, string>; + + /// Return the events this hook wants to receive, along with priority + /// and a human-readable name for each subscription. + /// + /// `config` is the module's JSON configuration blob (from bundle YAML), + /// serialized as bytes so the hook can adjust its subscriptions at + /// load time. + /// + /// Old hook modules compiled against the previous WIT (before this + /// function was added) will not export `get-subscriptions`. The host + /// detects this and falls back to a wildcard subscription so those + /// modules continue to receive every event without recompilation. + /// + // Future enhancement: If hooks need to read coordinator state during + // registration (e.g., conditionally subscribe based on mounted providers), + // add register-hook to the kernel-service host import interface. This + // would allow hook-module worlds to import kernel-service alongside + // exporting hook-handler, enabling imperative registration matching + // the Python coordinator.hooks.register() pattern. + get-subscriptions: func(config: list) -> list; +} + +record event-subscription { + event: string, + priority: s32, + name: string, } /// Context manager interface — owns conversation memory policy.