diff --git a/.gitignore b/.gitignore index df1de68..debf18f 100644 --- a/.gitignore +++ b/.gitignore @@ -186,3 +186,8 @@ logs/ pkgs/bay/ship_data/ pkgs/bay/scripts/ pkgs/bay/tests/k8s/k8s-deploy-local.yaml + +.kilocode/ +openspec/ +AGENTS.md +plans/mydoc/ diff --git a/docs/CHANGELOG-SESSION-FIRST.md b/docs/CHANGELOG-SESSION-FIRST.md new file mode 100644 index 0000000..1056905 --- /dev/null +++ b/docs/CHANGELOG-SESSION-FIRST.md @@ -0,0 +1,1110 @@ +# Shipyard Session-First 重构变更文档 + +## 概述 + +本次重构实现了三个核心功能: +1. **1:1 Session-Ship 绑定** - 简化架构,每个会话独占一个容器 +2. **Execution History** - 记录执行历史,支持 Agent 技能库构建 +3. **MCP Server 集成** - 提供 stdio 传输的 MCP 服务器 + +## 变更目的 + +### 1. 1:1 Session-Ship 绑定 + +**问题背景:** +- 原设计支持多个会话共享一个 Ship(容器),通过 `max_session_num` 控制 +- 多会话共享增加了状态管理复杂度和潜在的隔离问题 +- Agent 场景下,每个任务需要独立的环境 + +**解决方案:** +- 移除多会话共享逻辑,每个 Session 绑定一个专属 Ship +- 引入 Warm Pool(预热池)弥补冷启动延迟 + +### 2. Execution History(执行历史) + +**问题背景:** +- 受 VOYAGER 论文启发,Agent 需要记录成功的执行路径来构建技能库 +- 技能自我进化需要:代码/命令 + 成功状态 + 执行时间 + +**解决方案:** +- Bay 侧存储执行历史(对 Ship 透明) +- 记录 Python 和 Shell 执行的完整信息 +- 提供查询 API 支持技能库构建 + +### 3. MCP Server 集成 + +**问题背景:** +- MCP (Model Context Protocol) 已成为行业标准(2025年12月捐赠给 Linux Foundation) +- OpenAI (2025年3月)、Google DeepMind (2025年4月) 均已采用 +- Claude Desktop、ChatGPT Desktop、Cursor、VS Code 等工具支持 MCP 协议 + +**解决方案:** +- 使用官方 MCP Python SDK (`mcp` 包) 实现标准 MCP 服务器 +- 支持 stdio 和 streamable-http 两种传输方式 +- 提供 Python 执行、Shell 执行、文件操作等工具 +- 发布 npm 包 `shipyard-mcp` 用于快速安装 + +### 4. SDK 架构重构 + +**问题背景:** +- 原 SDK (`ShipyardClient`) 暴露了过多底层细节 +- MCP Server 和 SDK 代码重复 +- 需要更简洁的接口给开发者使用 + +**解决方案:** +- 新增统一 `Sandbox` 类作为主要入口 +- MCP Server 内部使用 SDK,避免代码重复 +- 保留 `ShipyardClient` 作为低级 API + +--- + +## 详细变更 + +### Bay 服务端 + +#### 模型变更 (`pkgs/bay/app/models.py`) + +**移除字段:** +```python +# Ship 模型 +- max_session_num: int = Field(default=1) +- current_session_num: int = Field(default=0) +``` + +**新增模型:** +```python +class ExecutionHistory(SQLModel, table=True): + """执行历史记录,用于 Agent 技能库构建""" + id: str # 主键 + session_id: str # 关联的会话 ID + ship_id: str # 执行的 Ship ID + exec_type: str # 'python' 或 'shell' + code: str # 执行的代码/命令 + success: bool # 是否成功 + execution_time_ms: int # 执行耗时(毫秒) + output: Optional[str] # 输出(可选存储) + error: Optional[str] # 错误信息 + created_at: datetime # 创建时间 + +class ExecutionHistoryEntry(BaseModel): + """API 响应模型""" + id: str + exec_type: str + code: str + success: bool + execution_time_ms: int + created_at: datetime + +class ExecutionHistoryResponse(BaseModel): + """执行历史查询响应""" + entries: List[ExecutionHistoryEntry] + total: int +``` + +#### 数据库服务 (`pkgs/bay/app/database.py`) + +**移除方法:** +- `find_available_ship()` - 查找可用 Ship(多会话共享逻辑) +- `increment_ship_session_count()` - 增加会话计数 +- `decrement_ship_session_count()` - 减少会话计数 + +**新增方法:** +```python +async def find_ship_for_session(session_id: str) -> Optional[Ship] + """查找会话已绑定的 Ship(1:1 绑定)""" + +async def find_warm_pool_ship() -> Optional[Ship] + """从预热池获取可用 Ship""" + +async def count_warm_pool_ships() -> int + """统计预热池中的 Ship 数量""" + +async def create_execution_history( + session_id: str, + ship_id: str, + exec_type: str, + code: str, + success: bool, + execution_time_ms: int, + output: Optional[str] = None, + error: Optional[str] = None, +) -> ExecutionHistory + """创建执行历史记录""" + +async def get_execution_history( + session_id: str, + exec_type: Optional[str] = None, + success_only: bool = False, + limit: int = 100, + offset: int = 0, +) -> Tuple[List[ExecutionHistory], int] + """查询执行历史""" +``` + +#### Ship 服务 (`pkgs/bay/app/services/ship/service.py`) + +**简化逻辑:** +- `create_ship()` - 移除多会话分配逻辑,实现 1:1 绑定 +- 优先复用会话已绑定的 Ship +- 其次从 Warm Pool 分配 +- 最后创建新容器 + +**新增 Warm Pool 功能:** +```python +async def start_warm_pool() + """启动预热池后台任务""" + +async def stop_warm_pool() + """停止预热池""" + +async def _replenish_warm_pool() + """补充预热池到目标数量""" + +async def _create_warm_pool_ship() -> Ship + """创建预热池 Ship""" + +async def _assign_ship_to_session(ship: Ship, session_id: str, ttl: int) + """将 Ship 分配给会话""" +``` + +**执行历史记录:** +- `execute_operation()` 中添加执行历史记录逻辑 + +#### 配置 (`pkgs/bay/app/config.py`) + +**新增配置项:** +```python +warm_pool_enabled: bool = True # 是否启用预热池 +warm_pool_min_size: int = 2 # 最小预热数量 +warm_pool_max_size: int = 10 # 最大预热数量 +warm_pool_replenish_interval: int = 30 # 补充检查间隔(秒) +``` + +#### 路由 (`pkgs/bay/app/routes/sessions.py`) + +**新增端点:** +```python +@router.get("/sessions/{session_id}/history") +async def get_execution_history( + session_id: str, + exec_type: Optional[str] = None, + success_only: bool = False, + limit: int = 100, + offset: int = 0, +) -> ExecutionHistoryResponse + """获取会话执行历史""" +``` + +--- + +### Ship 容器端 + +#### IPython 组件 (`pkgs/ship/app/components/ipython.py`) + +**ExecuteCodeResponse 新增字段:** +```python +code: str # 执行的代码 +execution_time_ms: int # 执行耗时(毫秒) +``` + +#### Shell 组件 (`pkgs/ship/app/components/shell.py`) + +**ExecuteShellResponse 新增字段:** +```python +command: str # 执行的命令 +execution_time_ms: int # 执行耗时(毫秒) +``` + +#### 用户管理器 (`pkgs/ship/app/components/user_manager.py`) + +**ProcessResult 新增字段:** +```python +command: str # 执行的命令 +execution_time_ms: int # 执行耗时(毫秒) +``` + +--- + +### Python SDK + +#### 新增 Sandbox 类 (`shipyard_python_sdk/shipyard/sandbox.py`) + +**统一入口类:** +```python +class Sandbox: + """ + 简化的沙箱接口,连接 Bay 服务执行代码。 + + Usage: + async with Sandbox() as sandbox: + result = await sandbox.python.exec("print('hello')") + print(result.stdout) + """ + + def __init__( + self, + endpoint: Optional[str] = None, # Bay API URL + token: Optional[str] = None, # 访问令牌 + ttl: int = 3600, # 会话 TTL + session_id: Optional[str] = None, # 会话 ID(自动生成) + ) + + # 组件接口 + python: PythonExecutor # sandbox.python.exec(code) + shell: ShellExecutor # sandbox.shell.exec(command) + fs: FileSystem # sandbox.fs.read/write/list + + # 方法 + async def start() -> Sandbox + async def stop() -> None + async def extend_ttl(ttl: int) -> None + async def get_execution_history(...) -> Dict +``` + +**执行结果类:** +```python +@dataclass +class ExecResult: + success: bool + stdout: str = "" + stderr: str = "" + result: Any = None + exit_code: int = 0 + execution_time_ms: int = 0 + code: str = "" +``` + +**便捷函数:** +```python +async def run_python(code: str, **kwargs) -> ExecResult +async def run_shell(command: str, **kwargs) -> ExecResult +``` + +#### Client (`shipyard_python_sdk/shipyard/client.py`) + +**变更方法:** +```python +async def create_ship( + ttl: int, + spec: Optional[Spec] = None, + max_session_num: int | None = None, # 已弃用,添加警告 + session_id: Optional[str] = None, + force_create: bool = False, +) -> SessionShip +``` + +**新增方法:** +```python +async def get_or_create_session( + session_id: str, + ttl: int = 3600, + spec: Optional[Spec] = None, +) -> SessionShip + """推荐的 Session-First API""" + +def session( + session_id: str, + ttl: int = 3600, + spec: Optional[Spec] = None, +) -> SessionContext + """上下文管理器方式使用会话""" + +async def get_execution_history( + session_id: str, + exec_type: Optional[str] = None, + success_only: bool = False, + limit: int = 100, + offset: int = 0, +) -> Dict[str, Any] + """获取执行历史""" +``` + +**新增类:** +```python +class SessionContext: + """会话上下文管理器""" + async def __aenter__(self) -> SessionShip + async def __aexit__(self, ...) +``` + +#### Session (`shipyard_python_sdk/shipyard/session.py`) + +**新增方法:** +```python +async def get_execution_history( + exec_type: Optional[str] = None, + success_only: bool = False, + limit: int = 100, + offset: int = 0, +) -> Dict[str, Any] + """获取当前会话的执行历史""" + +@property +def session_id(self) -> str + """获取会话 ID""" +``` + +#### Types (`shipyard_python_sdk/shipyard/types.py`) + +**移除属性:** +- `max_session_num` +- `current_session_num` + +**新增属性:** +- `expires_at` + +#### Utils (`shipyard_python_sdk/shipyard/utils.py`) + +**更新函数签名:** +```python +async def create_session_ship( + ttl: int = 3600, + spec: Optional[Spec] = None, + max_session_num: int | None = None, # 已弃用 + endpoint_url: Optional[str] = None, + access_token: Optional[str] = None, + session_id: Optional[str] = None, + force_create: bool = False, +) -> SessionShip +``` + +--- + +### MCP Server(新增) + +#### 架构 + +``` +┌─────────────────────────────────────────────────────────────────┐ +│ Agent / LLM │ +└───────────────────────────────┬─────────────────────────────────┘ + │ + ┌─────────────────┴─────────────────┐ + │ │ + ▼ ▼ + ┌─────────────────┐ ┌─────────────────┐ + │ MCP Protocol │ │ 开发者代码 │ + │ (Claude/Cursor) │ │ (自研 Agent) │ + └────────┬────────┘ └────────┬────────┘ + │ │ + ▼ ▼ + ┌─────────────────┐ ┌─────────────────┐ + │ MCP Server │────────────────►│ SDK │ + │ │ 内部使用 SDK │ Sandbox │ + └────────┬────────┘ └────────┬────────┘ + │ │ + └───────────────┬───────────────────┘ + │ + ▼ + ┌─────────────────┐ + │ Bay │ + │ - Pool 管理 │ + │ - Session 状态 │ + │ - 执行历史 │ + └────────┬────────┘ + │ + ▼ + ┌─────────────────┐ + │ Ship │ + │ (Python/Shell) │ + └─────────────────┘ +``` + +#### 组件职责 + +| 组件 | 职责 | +|------|------| +| **MCP Server** | 规范化输入输出,让 MCP 客户端能调用沙箱 | +| **SDK (Sandbox)** | Python 开发者构建 Agent 时使用 | +| **Bay** | 容器池管理、Session 状态、执行历史 | +| **Ship** | 实际执行 Python/Shell | + +#### 文件结构 + +**Bay 内置 MCP Server(使用 FastMCP):** +``` +pkgs/bay/app/mcp/ +├── __init__.py +├── server.py # MCP 服务器,内部使用 SDK Sandbox 类 +└── run.py # 入口点 +``` + +**npm 包独立 MCP Server:** +``` +pkgs/mcp-server/ +├── bin/ +│ └── shipyard-mcp.js # Node.js CLI 入口 +├── python/ +│ ├── __init__.py +│ ├── __main__.py +│ └── server.py # 独立 Python MCP 服务器(内置精简 SDK) +├── package.json +└── README.md +``` + +#### 提供的 MCP 工具 + +| 工具 | 描述 | +|------|------| +| `execute_python` | 执行 Python 代码 | +| `execute_shell` | 执行 Shell 命令 | +| `read_file` | 读取文件内容 | +| `write_file` | 写入文件 | +| `list_files` | 列出目录内容 | +| `install_package` | 通过 pip 安装包 | +| `get_sandbox_info` | 获取沙箱信息 | +| `get_execution_history` | 获取执行历史 | + +#### 使用方式 +```bash +# 环境变量 +export SHIPYARD_ENDPOINT=http://localhost:8156 +export SHIPYARD_TOKEN=secret-token + +# 运行 +python -m app.mcp.run +``` + +--- + +### Dashboard 前端 + +#### 类型定义 (`types/api.ts`) + +**移除字段:** +- `max_session_num` +- `current_session_num` + +#### 创建 Ship 表单 (`views/ship-create/useCreateShip.ts`) + +- 移除 `maxSessionNum` 表单字段 + +#### Ship 详情页 (`views/ship-detail/index.vue`) + +- 移除会话计数显示 + +#### Ship 列表页 (`views/ships/index.vue`) + +- 移除会话数量列 + +--- + +### 单元测试 + +#### `pkgs/bay/tests/unit/test_ships.py` + +**移除测试:** +- `test_create_ship_with_max_session_num` +- `test_find_available_ship` +- `test_session_count_increment_decrement` + +**新增测试:** +- `test_execution_history_creation` +- `test_execution_history_query` +- `test_warm_pool_ship_allocation` + +--- + +## SDK 使用示例 + +### 新方式:Sandbox 类(推荐) + +**基本使用:** +```python +from shipyard import Sandbox + +async with Sandbox() as sandbox: + result = await sandbox.python.exec("print('hello')") + print(result.stdout) # hello +``` + +**自定义配置:** +```python +from shipyard import Sandbox + +async with Sandbox( + endpoint="http://bay.example.com:8156", + token="your-token", + ttl=7200, + session_id="my-session-123" +) as sandbox: + # Python 执行 + result = await sandbox.python.exec("import pandas; print(pandas.__version__)") + + # Shell 执行 + result = await sandbox.shell.exec("ls -la") + + # 文件操作 + await sandbox.fs.write("/workspace/test.py", "print('hello')") + content = await sandbox.fs.read("/workspace/test.py") + + # 执行历史 + history = await sandbox.get_execution_history(success_only=True) +``` + +**一行代码:** +```python +from shipyard import run_python, run_shell + +result = await run_python("print('hello')") +result = await run_shell("ls -la") +``` + +### 旧方式:ShipyardClient(仍支持) + +```python +# 低级 API,提供更多控制 +from shipyard import ShipyardClient + +client = ShipyardClient(endpoint_url, access_token) +ship = await client.create_ship(ttl=3600) +result = await ship.python.exec("print('hello')") +``` + +### 新方式(推荐) + +**方式一:get_or_create_session** +```python +session = await client.get_or_create_session( + session_id="my-task-123", + ttl=3600 +) +result = await session.python.exec("print('hello')") + +# 获取执行历史(用于技能库) +history = await session.get_execution_history(success_only=True) +``` + +**方式二:上下文管理器** +```python +async with client.session("my-task-123") as session: + result = await session.python.exec("print('hello')") + # 会话结束后资源由 TTL 管理 +``` + +**方式三:便捷函数** +```python +from shipyard import create_session_ship + +session = await create_session_ship( + session_id="my-task-123", + ttl=3600 +) +``` + +--- + +## 技能库构建示例 + +基于 VOYAGER 论文思想,使用执行历史构建技能库: + +```python +# 获取成功的 Python 执行记录 +history = await session.get_execution_history( + exec_type="python", + success_only=True +) + +# 筛选高效代码(执行时间短) +efficient_skills = [ + entry for entry in history["entries"] + if entry["execution_time_ms"] < 1000 +] + +# 存入技能库 +for skill in efficient_skills: + skill_library.add( + code=skill["code"], + execution_time_ms=skill["execution_time_ms"], + created_at=skill["created_at"] + ) +``` + +--- + +## 迁移指南 + +### 1. 移除 max_session_num + +如果代码中使用了 `max_session_num` 参数: +```python +# 旧代码(会产生弃用警告) +ship = await client.create_ship(ttl=3600, max_session_num=3) + +# 新代码 +session = await client.get_or_create_session( + session_id="my-session", + ttl=3600 +) +``` + +### 2. 使用 Session-First API + +```python +# 旧模式:Ship 优先 +ship = await client.create_ship(ttl=3600) + +# 新模式:Session 优先 +session = await client.get_or_create_session("my-session", ttl=3600) +``` + +### 3. 利用执行历史 + +```python +# 记录自动进行,无需额外代码 +result = await session.python.exec(code) + +# 查询历史 +history = await session.get_execution_history() +``` + +### 4. 使用 MCP Server + +**方式一:npm 包安装** +```bash +# 全局安装 +npm install -g shipyard-mcp + +# 运行 +SHIPYARD_TOKEN=your-token shipyard-mcp +``` + +**方式二:Python 模块运行** +```bash +cd pkgs/bay +pip install -e . +python -m app.mcp.run +``` + +**方式三:HTTP 模式部署** +```bash +shipyard-mcp --transport http --port 8000 +``` + +### 5. 使用新 SDK Sandbox 类 + +```python +# 旧方式 +from shipyard import ShipyardClient +client = ShipyardClient(endpoint, token) +ship = await client.create_ship(ttl=3600) +result = await ship.python.exec(code) + +# 新方式(推荐) +from shipyard import Sandbox +async with Sandbox() as sandbox: + result = await sandbox.python.exec(code) +``` + +--- + +## MCP 客户端配置 + +### Claude Desktop + +`~/.config/claude/claude_desktop_config.json`: +```json +{ + "mcpServers": { + "shipyard": { + "command": "shipyard-mcp", + "env": { + "SHIPYARD_ENDPOINT": "http://localhost:8156", + "SHIPYARD_TOKEN": "your-access-token" + } + } + } +} +``` + +### Cursor + +`~/.cursor/mcp.json`: +```json +{ + "mcpServers": { + "shipyard": { + "command": "shipyard-mcp", + "env": { + "SHIPYARD_ENDPOINT": "http://localhost:8156", + "SHIPYARD_TOKEN": "your-access-token" + } + } + } +} +``` + +### ChatGPT Desktop / VS Code + +参考 `pkgs/mcp-server/README.md` 获取详细配置说明。 + +--- + +## 参考文献 + +1. **VOYAGER** (2023) - "VOYAGER: An Open-Ended Embodied Agent with Large Language Models" + - 技能库自动构建 + - 代码 + 成功状态 + 执行时间的记录模式 + +2. **Reflexion** (2023) - "Reflexion: Language Agents with Verbal Reinforcement Learning" + - 执行反馈用于 Agent 自我改进 + +3. **LearnAct** (2024) - "LearnAct: Few-Shot Mobile App Testing" + - 从执行历史中学习可复用技能 + +--- + +## MCP HTTP 模式多客户端 Session 隔离 + +### 问题背景 + +当 MCP Server 以 HTTP 模式(`--transport http`)部署时,多个客户端连接到同一个服务器进程。原有实现使用 `lifespan` 创建单一 Sandbox,导致所有客户端共享同一个 Session,存在: + +1. **状态污染**: 不同客户端共享 Python 变量、文件系统 +2. **安全风险**: 一个客户端可以访问/修改另一个客户端的数据 +3. **资源冲突**: 包安装、文件操作相互影响 + +### 解决方案 + +使用 FastMCP 原生的 per-session state 机制实现客户端隔离: + +**架构变更:** +``` +# 旧架构(所有客户端共享) +lifespan 启动 → Sandbox (session-123, ship-456) + ↑ +客户端 A ──────┘ +客户端 B ──────┘ + +# 新架构(每客户端独立) +lifespan → GlobalConfig (endpoint, token) + +客户端 A (mcp-session-aaa) → ctx.get_state → Sandbox A (ship-111) +客户端 B (mcp-session-bbb) → ctx.get_state → Sandbox B (ship-222) +``` + +**核心实现:** +```python +@dataclass +class GlobalConfig: + """全局配置,在 lifespan 中初始化""" + endpoint: str + token: str + default_ttl: int = 1800 # 30 分钟 + +@asynccontextmanager +async def mcp_lifespan(server: FastMCP) -> AsyncIterator[GlobalConfig]: + # 只存储配置,不创建 Sandbox + yield GlobalConfig(endpoint=endpoint, token=token) + +async def get_or_create_sandbox(ctx: Context) -> Sandbox: + """获取或创建当前 MCP session 的 Sandbox""" + sandbox = await ctx.get_state("sandbox") + + if sandbox is None: + config = ctx.request_context.lifespan_context + sandbox = Sandbox( + endpoint=config.endpoint, + token=config.token, + session_id=ctx.session_id, # 使用 MCP session ID + ttl=config.default_ttl, + ) + await sandbox.start() + await ctx.set_state("sandbox", sandbox) + else: + # 续期 TTL + await sandbox.extend_ttl(config.default_ttl) + + return sandbox +``` + +### Session 清理机制 + +- **TTL 自动续期**: 每次 tool 调用时自动续期(每 10 分钟) +- **过期自动清理**: 如果客户端断开且 TTL 到期,Bay 自动清理 Ship +- **失效重建**: 如果 Sandbox 已过期,自动创建新的 + +### 配置 + +新增环境变量: +- `SHIPYARD_SANDBOX_TTL`: Sandbox TTL 秒数(默认 1800,即 30 分钟) + +### 兼容性 + +- **stdio 模式**: 无影响(一个进程 = 一个 session = 一个 Sandbox) +- **HTTP 模式**: 每个 MCP 客户端获得独立的 Sandbox +- **现有配置**: 无需修改 + +### 验证 + +HTTP 模式隔离测试: +```python +async def test_http_mode_isolation(): + """测试 HTTP 模式下多客户端隔离""" + async with aiohttp.ClientSession() as client_a: + async with aiohttp.ClientSession() as client_b: + # A 设置变量 + await call_tool(client_a, "execute_python", {"code": "x = 123"}) + + # B 看不到 A 的变量 + result = await call_tool(client_b, "execute_python", {"code": "print(x)"}) + assert "NameError" in result +``` + +--- + +## 技能库增强(Skill Library Enhancement) + +### 问题背景 + +原有的执行历史功能仅提供基础的 `get_execution_history` 查询,无法满足 Agent 构建技能库的完整需求: + +1. **执行记录不完整**: 无法精确获取单条执行的完整代码 +2. **缺乏标注能力**: Agent 无法为执行记录添加描述、标签或笔记 +3. **查询粒度粗**: 无法按标签或有无笔记过滤 + +### 解决方案 + +基于 VOYAGER、Reflexion、LearnAct 论文的需求分析,增强执行历史功能: + +**核心原则**: Sandbox 是执行环境,提供完整的执行素材;Agent 负责分析和学习。 + +### 新增功能 + +#### 1. 增强执行返回值 + +`execute_python` 和 `execute_shell` 新增参数: + +```python +@mcp.tool() +async def execute_python( + code: str, + timeout: int = 30, + include_code: bool = False, # 返回执行的代码和 execution_id + description: str = None, # 代码描述(存入执行历史) + tags: str = None, # 标签(逗号分隔) +) -> str: + """执行 Python 代码 + + 当 include_code=True 时,返回格式: + execution_id: abc-123 + Code: + print('hello') + + Output: + hello + + Execution time: 5ms + """ +``` + +#### 2. 精确查询工具 + +**get_execution**: 按 ID 查询单条记录 +```python +@mcp.tool() +async def get_execution(execution_id: str) -> str: + """获取指定 ID 的执行记录的完整信息""" +``` + +**get_last_execution**: 获取最近一次执行 +```python +@mcp.tool() +async def get_last_execution(exec_type: str = None) -> str: + """获取最近一次执行的完整记录,可按类型过滤""" +``` + +#### 3. 标注工具 + +**annotate_execution**: 为执行记录添加标注 +```python +@mcp.tool() +async def annotate_execution( + execution_id: str, + description: str = None, # 描述 + tags: str = None, # 标签 + notes: str = None, # Agent 笔记 +) -> str: + """为执行记录添加/更新元数据""" +``` + +#### 4. 增强查询 + +**get_execution_history** 新增过滤参数: +```python +@mcp.tool() +async def get_execution_history( + exec_type: str = None, + success_only: bool = False, + limit: int = 50, + tags: str = None, # 按标签过滤(任意匹配) + has_notes: bool = False, # 只返回有笔记的 + has_description: bool = False, # 只返回有描述的 +) -> str: +``` + +### 数据模型变更 + +**ExecutionHistory 新增字段:** +```python +class ExecutionHistory(SQLModel, table=True): + # ... 原有字段 ... + description: Optional[str] # 执行描述 + tags: Optional[str] # 标签(逗号分隔) + notes: Optional[str] # Agent 笔记 +``` + +**ExecutionHistoryEntry 同步更新:** +```python +class ExecutionHistoryEntry(BaseModel): + # ... 原有字段 ... + description: Optional[str] = None + tags: Optional[str] = None + notes: Optional[str] = None +``` + +### 数据库迁移 + +新增迁移脚本:`pkgs/bay/alembic/versions/001_add_execution_history_metadata.py` + +```python +def upgrade(): + op.add_column('execution_history', sa.Column('description', sa.String())) + op.add_column('execution_history', sa.Column('tags', sa.String())) + op.add_column('execution_history', sa.Column('notes', sa.String())) +``` + +### API 变更 + +**新增端点:** +- `GET /sessions/{session_id}/history/{execution_id}` - 获取单条记录 +- `GET /sessions/{session_id}/history/last` - 获取最近一条 +- `PATCH /sessions/{session_id}/history/{execution_id}` - 更新标注 + +**更新端点:** +- `GET /sessions/{session_id}/history` - 新增 `tags`, `has_notes`, `has_description` 参数 + +### SDK 变更 + +**Sandbox 类新增方法:** +```python +async def get_execution(self, execution_id: str) -> Dict +async def get_last_execution(self, exec_type: str = None) -> Dict +async def annotate_execution( + self, + execution_id: str, + description: str = None, + tags: str = None, + notes: str = None, +) -> Dict +``` + +**get_execution_history 新增参数:** +```python +async def get_execution_history( + self, + exec_type: str = None, + success_only: bool = False, + limit: int = 100, + tags: str = None, # 新增 + has_notes: bool = False, # 新增 + has_description: bool = False, # 新增 +) -> Dict +``` + +**PythonExecutor/ShellExecutor 新增参数:** +```python +async def exec( + self, + code: str, + timeout: int = 30, + description: str = None, # 新增 + tags: str = None, # 新增 +) -> ExecResult +``` + +**ExecResult 新增字段:** +```python +@dataclass +class ExecResult: + # ... 原有字段 ... + execution_id: Optional[str] = None # 新增:用于精确查询 +``` + +### 使用示例 + +**基础工作流:** +```python +# 1. 执行代码(自动记录) +result = await sandbox.python.exec( + "import pandas as pd; df = pd.read_csv('data.csv')", + description="加载数据文件", + tags="data-processing,pandas" +) + +# 2. 获取完整记录 +entry = await sandbox.get_last_execution() +print(entry["code"]) # 完整代码 + +# 3. 添加笔记 +await sandbox.annotate_execution( + entry["id"], + notes="这段代码可以复用于任何 CSV 文件加载" +) +``` + +**技能库构建:** +```python +# 获取所有带笔记的成功执行(这些是 Agent 认为有价值的) +history = await sandbox.get_execution_history( + success_only=True, + has_notes=True, +) + +# 构建技能库 +for entry in history["entries"]: + skill_library.add( + code=entry["code"], + description=entry["description"], + tags=entry["tags"].split(",") if entry["tags"] else [], + notes=entry["notes"], + ) +``` + +**按标签检索:** +```python +# 获取所有数据处理相关的执行 +history = await sandbox.get_execution_history( + tags="data-processing,etl", # 匹配任一标签 + success_only=True, +) +``` + +### MCP 工具列表(更新后) + +| 工具 | 描述 | 新增参数 | +|------|------|----------| +| `execute_python` | 执行 Python 代码 | `include_code`, `description`, `tags` | +| `execute_shell` | 执行 Shell 命令 | `include_code`, `description`, `tags` | +| `get_execution` | 获取单条执行记录 | (新工具) | +| `get_last_execution` | 获取最近执行 | (新工具) | +| `annotate_execution` | 标注执行记录 | (新工具) | +| `get_execution_history` | 查询执行历史 | `tags`, `has_notes`, `has_description` | +| `read_file` | 读取文件 | - | +| `write_file` | 写入文件 | - | +| `list_files` | 列出目录 | - | +| `install_package` | 安装包 | - | +| `get_sandbox_info` | 获取沙箱信息 | - | + +### 文件变更 + +| 文件 | 变更类型 | +|------|----------| +| `pkgs/bay/app/models.py` | 模型扩展 | +| `pkgs/bay/app/database.py` | 新增方法 | +| `pkgs/bay/app/routes/sessions.py` | 新增端点 | +| `pkgs/bay/app/services/ship/service.py` | 支持元数据 | +| `pkgs/bay/app/mcp/server.py` | 新增工具 | +| `pkgs/mcp-server/python/server.py` | 同步更新 | +| `shipyard_python_sdk/shipyard/sandbox.py` | SDK 更新 | +| `pkgs/bay/alembic/versions/001_*.py` | 数据库迁移 | diff --git a/pkgs/bay/alembic/versions/001_add_execution_history_metadata.py b/pkgs/bay/alembic/versions/001_add_execution_history_metadata.py new file mode 100644 index 0000000..0e0af44 --- /dev/null +++ b/pkgs/bay/alembic/versions/001_add_execution_history_metadata.py @@ -0,0 +1,46 @@ +"""Add metadata fields to execution_history table + +Revision ID: 001 +Create Date: 2025-01-28 + +This migration adds description, tags, and notes fields to the execution_history +table to support skill library functionality. +""" + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers +revision = '001' +down_revision = None +branch_labels = None +depends_on = None + + +def upgrade() -> None: + """Add metadata columns to execution_history table.""" + # Add description column + op.add_column( + 'execution_history', + sa.Column('description', sa.String(), nullable=True) + ) + + # Add tags column (comma-separated string) + op.add_column( + 'execution_history', + sa.Column('tags', sa.String(), nullable=True) + ) + + # Add notes column + op.add_column( + 'execution_history', + sa.Column('notes', sa.String(), nullable=True) + ) + + +def downgrade() -> None: + """Remove metadata columns from execution_history table.""" + op.drop_column('execution_history', 'notes') + op.drop_column('execution_history', 'tags') + op.drop_column('execution_history', 'description') diff --git a/pkgs/bay/app/config.py b/pkgs/bay/app/config.py index 8025d95..39102a9 100644 --- a/pkgs/bay/app/config.py +++ b/pkgs/bay/app/config.py @@ -15,6 +15,20 @@ class Settings(BaseSettings): default="wait", description="Behavior when max ships reached" ) + # Warm Pool settings + warm_pool_enabled: bool = Field( + default=True, description="Enable warm pool for faster ship allocation" + ) + warm_pool_min_size: int = Field( + default=2, description="Minimum number of ships to keep in the warm pool" + ) + warm_pool_max_size: int = Field( + default=5, description="Maximum number of ships in the warm pool" + ) + warm_pool_replenish_interval: int = Field( + default=30, description="Interval in seconds to check and replenish the warm pool" + ) + # Authentication access_token: str = Field( default="secret-token", description="Access token for ship operations" diff --git a/pkgs/bay/app/database.py b/pkgs/bay/app/database.py index 2a153ad..3eb4972 100644 --- a/pkgs/bay/app/database.py +++ b/pkgs/bay/app/database.py @@ -3,7 +3,7 @@ from sqlalchemy.pool import StaticPool from typing import Optional, List from app.config import settings -from app.models import Ship, SessionShip, ShipStatus +from app.models import Ship, SessionShip, ShipStatus, ExecutionHistory from datetime import datetime, timezone @@ -186,25 +186,21 @@ async def update_session_ship(self, session_ship: SessionShip) -> SessionShip: finally: await session.close() - async def find_available_ship(self, session_id: str) -> Optional[Ship]: - """Find an available ship that can accept a new session""" + async def find_ship_for_session(self, session_id: str) -> Optional[Ship]: + """Find a running ship that belongs to this session (1:1 binding).""" session = self.get_session() try: - # Find ships that have available session slots (only RUNNING ships) - statement = select(Ship).where( - Ship.status == ShipStatus.RUNNING, Ship.current_session_num < Ship.max_session_num + # With 1:1 binding, each session has exactly one ship + statement = ( + select(Ship) + .join(SessionShip, Ship.id == SessionShip.ship_id) + .where( + SessionShip.session_id == session_id, + Ship.status == ShipStatus.RUNNING, + ) ) result = await session.execute(statement) - ships = list(result.scalars().all()) - - # Check if this session already has access to any ship - for ship in ships: - existing_session = await self.get_session_ship(session_id, ship.id) - if existing_session: - return ship - - # Return the first available ship - return ships[0] if ships else None + return result.scalars().first() finally: await session.close() @@ -256,44 +252,6 @@ async def find_stopped_ship_for_session(self, session_id: str) -> Optional[Ship] finally: await session.close() - async def increment_ship_session_count(self, ship_id: str) -> Optional[Ship]: - """Increment the current session count for a ship""" - session = self.get_session() - try: - statement = select(Ship).where(Ship.id == ship_id) - result = await session.execute(statement) - ship = result.scalar_one_or_none() - - if ship: - ship.current_session_num += 1 - ship.updated_at = datetime.now(timezone.utc) - session.add(ship) - await session.commit() - await session.refresh(ship) - - return ship - finally: - await session.close() - - async def decrement_ship_session_count(self, ship_id: str) -> Optional[Ship]: - """Decrement the current session count for a ship""" - session = self.get_session() - try: - statement = select(Ship).where(Ship.id == ship_id) - result = await session.execute(statement) - ship = result.scalar_one_or_none() - - if ship and ship.current_session_num > 0: - ship.current_session_num -= 1 - ship.updated_at = datetime.now(timezone.utc) - session.add(ship) - await session.commit() - await session.refresh(ship) - - return ship - finally: - await session.close() - async def delete_sessions_for_ship(self, ship_id: str) -> List[str]: """Delete all session-ship relationships for a ship and return deleted session IDs""" session = self.get_session() @@ -377,5 +335,215 @@ async def expire_sessions_for_ship(self, ship_id: str) -> int: finally: await session.close() + async def find_warm_pool_ship(self) -> Optional[Ship]: + """Find an available ship from the warm pool (running ship with no session).""" + session = self.get_session() + try: + # Find running ships that have no session attached + statement = ( + select(Ship) + .outerjoin(SessionShip, Ship.id == SessionShip.ship_id) + .where( + Ship.status == ShipStatus.RUNNING, + SessionShip.id == None, # noqa: E711 + ) + .order_by(Ship.created_at.asc()) # Oldest first (FIFO) + ) + result = await session.execute(statement) + return result.scalars().first() + finally: + await session.close() + + async def count_warm_pool_ships(self) -> int: + """Count ships in the warm pool (running ships with no session).""" + session = self.get_session() + try: + statement = ( + select(Ship) + .outerjoin(SessionShip, Ship.id == SessionShip.ship_id) + .where( + Ship.status == ShipStatus.RUNNING, + SessionShip.id == None, # noqa: E711 + ) + ) + result = await session.execute(statement) + return len(list(result.scalars().all())) + finally: + await session.close() + + # Execution History operations + async def create_execution_history( + self, + session_id: str, + exec_type: str, + success: bool, + code: Optional[str] = None, + command: Optional[str] = None, + execution_time_ms: Optional[int] = None, + description: Optional[str] = None, + tags: Optional[str] = None, + ) -> ExecutionHistory: + """Record an execution in history.""" + history = ExecutionHistory( + session_id=session_id, + exec_type=exec_type, + code=code, + command=command, + success=success, + execution_time_ms=execution_time_ms, + description=description, + tags=tags, + ) + session = self.get_session() + try: + session.add(history) + await session.commit() + await session.refresh(history) + return history + finally: + await session.close() + + async def get_execution_history( + self, + session_id: str, + exec_type: Optional[str] = None, + success_only: bool = False, + limit: int = 100, + offset: int = 0, + tags: Optional[str] = None, + has_notes: bool = False, + has_description: bool = False, + ) -> tuple[List[ExecutionHistory], int]: + """Get execution history for a session. + + Args: + session_id: The session ID + exec_type: Filter by 'python' or 'shell' + success_only: Only return successful executions + limit: Maximum entries to return + offset: Number of entries to skip + tags: Filter by tags (comma-separated, matches if any tag is present) + has_notes: Only return entries with notes + has_description: Only return entries with description + """ + session = self.get_session() + try: + # Build query + conditions = [ExecutionHistory.session_id == session_id] + if exec_type: + conditions.append(ExecutionHistory.exec_type == exec_type) + if success_only: + conditions.append(ExecutionHistory.success == True) # noqa: E712 + if has_notes: + conditions.append(ExecutionHistory.notes != None) # noqa: E711 + conditions.append(ExecutionHistory.notes != "") + if has_description: + conditions.append(ExecutionHistory.description != None) # noqa: E711 + conditions.append(ExecutionHistory.description != "") + if tags: + # Match any of the provided tags + tag_list = [t.strip() for t in tags.split(",") if t.strip()] + if tag_list: + from sqlalchemy import or_ + tag_conditions = [ExecutionHistory.tags.contains(tag) for tag in tag_list] + conditions.append(or_(*tag_conditions)) + + # Count total + count_stmt = select(ExecutionHistory).where(*conditions) + count_result = await session.execute(count_stmt) + total = len(list(count_result.scalars().all())) + + # Get entries + statement = ( + select(ExecutionHistory) + .where(*conditions) + .order_by(ExecutionHistory.created_at.desc()) + .offset(offset) + .limit(limit) + ) + result = await session.execute(statement) + entries = list(result.scalars().all()) + + return entries, total + finally: + await session.close() + + async def get_execution_by_id( + self, + session_id: str, + execution_id: str, + ) -> Optional[ExecutionHistory]: + """Get a specific execution record by ID.""" + session = self.get_session() + try: + statement = select(ExecutionHistory).where( + ExecutionHistory.session_id == session_id, + ExecutionHistory.id == execution_id, + ) + result = await session.execute(statement) + return result.scalar_one_or_none() + finally: + await session.close() + + async def get_last_execution( + self, + session_id: str, + exec_type: Optional[str] = None, + ) -> Optional[ExecutionHistory]: + """Get the most recent execution for a session.""" + session = self.get_session() + try: + conditions = [ExecutionHistory.session_id == session_id] + if exec_type: + conditions.append(ExecutionHistory.exec_type == exec_type) + + statement = ( + select(ExecutionHistory) + .where(*conditions) + .order_by(ExecutionHistory.created_at.desc()) + .limit(1) + ) + result = await session.execute(statement) + return result.scalar_one_or_none() + finally: + await session.close() + + async def update_execution_history( + self, + session_id: str, + execution_id: str, + description: Optional[str] = None, + tags: Optional[str] = None, + notes: Optional[str] = None, + ) -> Optional[ExecutionHistory]: + """Update metadata for an execution history record. + + Only updates fields that are provided (not None). + """ + session = self.get_session() + try: + statement = select(ExecutionHistory).where( + ExecutionHistory.session_id == session_id, + ExecutionHistory.id == execution_id, + ) + result = await session.execute(statement) + history = result.scalar_one_or_none() + + if history: + if description is not None: + history.description = description + if tags is not None: + history.tags = tags + if notes is not None: + history.notes = notes + + session.add(history) + await session.commit() + await session.refresh(history) + + return history + finally: + await session.close() + db_service = DatabaseService() diff --git a/pkgs/bay/app/main.py b/pkgs/bay/app/main.py index ee805c6..c6ac74e 100644 --- a/pkgs/bay/app/main.py +++ b/pkgs/bay/app/main.py @@ -6,6 +6,7 @@ from app.database import db_service from app.drivers import initialize_driver, close_driver from app.services.status import status_checker +from app.services.ship import ship_service from app.routes import health, ships, stat, sessions # Configure logging @@ -35,6 +36,11 @@ async def lifespan(app: FastAPI): await status_checker.start() logger.info("Status checker started") + # Start warm pool + await ship_service.start_warm_pool() + if settings.warm_pool_enabled: + logger.info(f"Warm pool started (min_size={settings.warm_pool_min_size})") + logger.info("Bay API service started successfully") except Exception as e: @@ -46,6 +52,13 @@ async def lifespan(app: FastAPI): # Shutdown logger.info("Shutting down Bay API service...") + # Stop warm pool + try: + await ship_service.stop_warm_pool() + logger.info("Warm pool stopped") + except Exception as e: + logger.error(f"Error stopping warm pool: {e}") + # Stop status checker try: await status_checker.stop() diff --git a/pkgs/bay/app/mcp/README.md b/pkgs/bay/app/mcp/README.md new file mode 100644 index 0000000..ce31367 --- /dev/null +++ b/pkgs/bay/app/mcp/README.md @@ -0,0 +1,311 @@ +# Shipyard MCP Server + +Shipyard provides an MCP (Model Context Protocol) server that enables AI assistants to execute Python code and shell commands in isolated sandbox environments. + +## Compatibility + +Shipyard MCP Server is compatible with all MCP clients: + +| Client | Status | Notes | +|--------|--------|-------| +| Claude Desktop | ✅ Supported | Anthropic's official client | +| ChatGPT Desktop | ✅ Supported | OpenAI adopted MCP in March 2025 | +| Cursor | ✅ Supported | Built-in MCP support | +| VS Code (Copilot) | ✅ Supported | GitHub Copilot Agent Mode | +| Gemini | ✅ Supported | Google DeepMind MCP support | +| Any MCP Client | ✅ Supported | Standard MCP protocol | + +## Installation + +### Prerequisites + +1. Install the MCP SDK: +```bash +pip install mcp +``` + +2. Set up environment variables: +```bash +export SHIPYARD_ENDPOINT=http://localhost:8156 # Bay API URL +export SHIPYARD_TOKEN=your-access-token # Required +export SHIPYARD_SANDBOX_TTL=1800 # Optional: TTL in seconds (default: 30 min) +``` + +### Install from source + +```bash +cd pkgs/bay +pip install -e . +``` + +## Usage + +### stdio mode (for desktop apps) + +This is the default mode for integration with Claude Desktop, ChatGPT, Cursor, etc. + +```bash +python -m app.mcp.run +``` + +Or using the installed script: +```bash +shipyard-mcp +``` + +### HTTP mode (for remote deployments) + +For hosted/remote MCP servers. **Each client session gets its own isolated sandbox.** + +```bash +python -m app.mcp.run --transport http --port 8000 +``` + +## Configuration + +### Claude Desktop + +Add to `~/.config/claude/claude_desktop_config.json` (Linux) or `~/Library/Application Support/Claude/claude_desktop_config.json` (macOS): + +```json +{ + "mcpServers": { + "shipyard": { + "command": "python", + "args": ["-m", "app.mcp.run"], + "cwd": "/path/to/shipyard/pkgs/bay", + "env": { + "SHIPYARD_ENDPOINT": "http://localhost:8156", + "SHIPYARD_TOKEN": "your-access-token" + } + } + } +} +``` + +### Cursor + +Add to Cursor settings (`~/.cursor/mcp.json`): + +```json +{ + "mcpServers": { + "shipyard": { + "command": "shipyard-mcp", + "env": { + "SHIPYARD_ENDPOINT": "http://localhost:8156", + "SHIPYARD_TOKEN": "your-access-token" + } + } + } +} +``` + +### VS Code with GitHub Copilot + +Add to VS Code settings: + +```json +{ + "github.copilot.chat.mcpServers": { + "shipyard": { + "command": "shipyard-mcp", + "env": { + "SHIPYARD_ENDPOINT": "http://localhost:8156", + "SHIPYARD_TOKEN": "your-access-token" + } + } + } +} +``` + +## Available Tools + +The MCP server exposes the following tools: + +| Tool | Description | +|------|-------------| +| `execute_python` | Execute Python code in the sandbox | +| `execute_shell` | Execute shell commands | +| `read_file` | Read file contents | +| `write_file` | Write to files | +| `list_files` | List directory contents | +| `install_package` | Install Python packages via pip | +| `get_sandbox_info` | Get current sandbox information | +| `get_execution_history` | View past executions (supports tag/notes filtering) | +| `get_execution` | Get specific execution by ID | +| `get_last_execution` | Get most recent execution | +| `annotate_execution` | Add notes/tags to an execution record | + +### Skill Library Support + +The tools support building Agent skill libraries (inspired by VOYAGER): + +```python +# Execute with metadata +execute_python( + code="import pandas as pd; df = pd.read_csv('data.csv')", + include_code=True, # Return code and execution_id + description="Load CSV file", + tags="data-processing,pandas" +) + +# Annotate after execution +annotate_execution( + execution_id="abc-123", + notes="Reusable for any CSV loading task" +) + +# Query annotated executions +get_execution_history( + success_only=True, + has_notes=True, # Only entries with notes + tags="pandas" # Filter by tag +) +``` + +### Example: execute_python + +```python +# Basic execution +{ + "tool": "execute_python", + "arguments": { + "code": "import pandas as pd\ndf = pd.DataFrame({'a': [1,2,3]})\nprint(df)", + "timeout": 30 + } +} + +# Response +"Output:\n a\n0 1\n1 2\n2 3" + +# With skill library metadata +{ + "tool": "execute_python", + "arguments": { + "code": "df = pd.read_csv('data.csv')", + "include_code": true, + "description": "Load CSV data", + "tags": "data-processing,pandas" + } +} + +# Response +"execution_id: abc-123\n\nCode:\ndf = pd.read_csv('data.csv')\n\nExecuted successfully (no output)\n\nExecution time: 15ms" +``` + +### Example: execute_shell + +```python +# Request +{ + "tool": "execute_shell", + "arguments": { + "command": "ls -la /workspace" + } +} + +# Response +"total 4\ndrwxr-xr-x 2 user user 4096 Jan 27 00:00 .\n..." +``` + +## Resources + +The server provides an informational resource: + +- `sandbox://info` - Information about the Shipyard sandbox service + +## Transport Modes + +### stdio (Default) + +Standard I/O transport for local integration with desktop apps. One process = one session = one sandbox. + +### HTTP (Streamable HTTP) + +HTTP transport for remote/hosted deployments. Each MCP client session gets its own isolated sandbox: + +- **Session isolation**: Client A cannot see Client B's variables or files +- **Automatic TTL renewal**: Sandbox TTL is renewed on each tool call +- **Cleanup on disconnect**: Sandboxes are cleaned up via TTL when clients disconnect + +``` +Client A (mcp-session-aaa) ──► Sandbox A (ship-111) + │ + │ execute_python("x = 123") ✓ + │ +Client B (mcp-session-bbb) ──► Sandbox B (ship-222) + │ + │ execute_python("print(x)") ✗ NameError (isolated!) +``` + +## Architecture + +``` +┌─────────────────┐ MCP Protocol ┌─────────────────┐ +│ MCP Client │◄────────────────────►│ Shipyard MCP │ +│ (Claude/Cursor) │ (stdio or HTTP) │ Server │ +└─────────────────┘ └────────┬────────┘ + │ + │ HTTP/REST + ▼ + ┌─────────────────┐ + │ Bay API │ + │ (FastAPI) │ + └────────┬────────┘ + │ + │ Container API + ▼ + ┌─────────────────┐ + │ Ship Container │ + │ (Python/Shell) │ + └─────────────────┘ +``` + +## Security + +- Each session gets a dedicated, isolated container +- Code execution is sandboxed +- Containers have configurable network access +- Resources are automatically cleaned up via TTL +- HTTP mode provides per-client session isolation + +## Troubleshooting + +### "SHIPYARD_TOKEN environment variable is required" + +Set the `SHIPYARD_TOKEN` environment variable to your Bay API access token. + +### Connection refused + +Ensure the Bay API is running at the configured `SHIPYARD_ENDPOINT`. + +### Tool execution timeout + +Increase the `timeout` parameter in tool arguments (default: 30 seconds). + +### Sandbox expired + +If a sandbox expires due to inactivity, a new one will be automatically created on the next tool call. + +## Development + +### Testing with MCP Inspector + +```bash +# Install MCP Inspector +npm install -g @modelcontextprotocol/inspector + +# Start the MCP server +python -m app.mcp.run + +# In another terminal, run the inspector +npx @modelcontextprotocol/inspector +``` + +### Running tests + +```bash +cd pkgs/bay +uv run pytest tests/ -v +``` diff --git a/pkgs/bay/app/mcp/__init__.py b/pkgs/bay/app/mcp/__init__.py new file mode 100644 index 0000000..c679d62 --- /dev/null +++ b/pkgs/bay/app/mcp/__init__.py @@ -0,0 +1,7 @@ +""" +Shipyard MCP Server Module +""" + +from .server import ShipyardMCPServer + +__all__ = ["ShipyardMCPServer"] diff --git a/pkgs/bay/app/mcp/run.py b/pkgs/bay/app/mcp/run.py new file mode 100644 index 0000000..9b82e26 --- /dev/null +++ b/pkgs/bay/app/mcp/run.py @@ -0,0 +1,23 @@ +#!/usr/bin/env python3 +""" +Shipyard MCP Server Entry Point + +Run the MCP server in stdio mode for integration with +MCP-compatible clients (Claude Desktop, ChatGPT, Cursor, VS Code, etc.) + +Usage: + # Default stdio mode + python -m app.mcp.run + + # HTTP mode for remote deployments + python -m app.mcp.run --transport http --port 8000 + +Environment variables: + SHIPYARD_ENDPOINT: Bay API URL (default: http://localhost:8156) + SHIPYARD_TOKEN: Access token for Bay API authentication (required) +""" + +from .server import main + +if __name__ == "__main__": + main() diff --git a/pkgs/bay/app/mcp/server.py b/pkgs/bay/app/mcp/server.py new file mode 100644 index 0000000..21b11ef --- /dev/null +++ b/pkgs/bay/app/mcp/server.py @@ -0,0 +1,629 @@ +""" +Shipyard MCP Server + +This module provides an MCP server that allows MCP-compatible clients +(Claude Desktop, ChatGPT Desktop, Cursor, VS Code, etc.) to interact +with Shipyard sandboxes. + +The MCP Server internally uses the Shipyard SDK to communicate with Bay. + +Supported transports: +- stdio (default): For local integration with desktop apps +- streamable-http: For remote/hosted deployments + +In HTTP mode, each MCP client session gets its own isolated Sandbox. +Session state (including the Sandbox) persists across tool calls within +the same MCP session and is automatically cleaned up when the session ends. + +Usage: + # stdio mode (default) + python -m app.mcp.run + + # HTTP mode + python -m app.mcp.run --transport http --port 8000 + +Environment variables: + SHIPYARD_ENDPOINT: Bay API URL (default: http://localhost:8156) + SHIPYARD_TOKEN: Access token for Bay API authentication (required) + SHIPYARD_SANDBOX_TTL: Sandbox TTL in seconds (default: 1800) +""" + +import asyncio +import os +import sys +from collections.abc import AsyncIterator +from contextlib import asynccontextmanager +from dataclasses import dataclass +from datetime import datetime +from typing import Optional + +# Add SDK to path if running standalone +sdk_path = os.path.join(os.path.dirname(__file__), "..", "..", "..", "..", "shipyard_python_sdk") +if os.path.exists(sdk_path): + sys.path.insert(0, sdk_path) + +from shipyard import Sandbox, ExecResult + +from mcp.server.fastmcp import Context, FastMCP + + +@dataclass +class GlobalConfig: + """Global configuration initialized during server lifespan. + + This contains configuration that is shared across all sessions. + Per-session state (like Sandbox) is stored via ctx.set_state(). + """ + endpoint: str + token: str + default_ttl: int = 1800 # 30 minutes + ttl_renew_threshold: int = 600 # Renew when < 10 minutes remaining + + +@asynccontextmanager +async def mcp_lifespan(server: FastMCP) -> AsyncIterator[GlobalConfig]: + """Manage MCP server lifecycle. + + Only initializes global configuration here. Per-session Sandbox + instances are created lazily via get_or_create_sandbox(). + """ + endpoint = os.getenv("SHIPYARD_ENDPOINT", "http://localhost:8156") + token = os.getenv("SHIPYARD_TOKEN", "") + ttl = int(os.getenv("SHIPYARD_SANDBOX_TTL", "1800")) + + if not token: + raise ValueError( + "SHIPYARD_TOKEN environment variable is required. " + "Set it to your Bay API access token." + ) + + yield GlobalConfig(endpoint=endpoint, token=token, default_ttl=ttl) + + +# Create MCP server +mcp = FastMCP( + "Shipyard", + version="1.0.0", + lifespan=mcp_lifespan, +) + +# Lock for preventing race conditions during sandbox creation +_sandbox_locks: dict[str, asyncio.Lock] = {} + + +async def get_or_create_sandbox(ctx: Context) -> Sandbox: + """Get or create a Sandbox for the current MCP session. + + This function manages per-session Sandbox instances: + - First call in a session creates a new Sandbox + - Subsequent calls return the existing Sandbox + - TTL is automatically renewed to keep the Sandbox alive + + The Sandbox is stored in session state (ctx.set_state) which is + automatically isolated per MCP session by FastMCP. + + Args: + ctx: MCP request context + + Returns: + Sandbox instance for this session + """ + session_id = ctx.session_id + + # Get or create lock for this session + if session_id not in _sandbox_locks: + _sandbox_locks[session_id] = asyncio.Lock() + + async with _sandbox_locks[session_id]: + sandbox = await ctx.get_state("sandbox") + last_renew = await ctx.get_state("last_ttl_renew") + config: GlobalConfig = ctx.request_context.lifespan_context + + if sandbox is None: + # First call in this session - create new Sandbox + sandbox = Sandbox( + endpoint=config.endpoint, + token=config.token, + ttl=config.default_ttl, + session_id=session_id, # Use MCP session ID + ) + try: + await sandbox.start() + except Exception as e: + raise RuntimeError(f"Failed to create sandbox: {e}") + + await ctx.set_state("sandbox", sandbox) + await ctx.set_state("last_ttl_renew", datetime.now()) + await ctx.info(f"Created new sandbox for session {session_id[:8]}...") + else: + # Existing sandbox - check if TTL renewal is needed + now = datetime.now() + if last_renew is None or (now - last_renew).total_seconds() > config.ttl_renew_threshold: + # Renew TTL + try: + await sandbox.extend_ttl(config.default_ttl) + await ctx.set_state("last_ttl_renew", now) + except Exception: + # If renewal fails, sandbox may have expired - recreate + sandbox = Sandbox( + endpoint=config.endpoint, + token=config.token, + ttl=config.default_ttl, + session_id=session_id, + ) + await sandbox.start() + await ctx.set_state("sandbox", sandbox) + await ctx.set_state("last_ttl_renew", now) + await ctx.warning(f"Sandbox expired, created new one for session {session_id[:8]}...") + + return sandbox + + +def _format_exec_result( + result: ExecResult, + include_code: bool = False, +) -> str: + """Format execution result for LLM consumption.""" + parts = [] + + # Always include execution_id if available + if result.execution_id: + parts.append(f"execution_id: {result.execution_id}") + + # Include code if requested + if include_code and result.code: + parts.append(f"Code:\n{result.code}") + + if result.stdout: + parts.append(f"Output:\n{result.stdout}") + if result.stderr: + parts.append(f"Errors:\n{result.stderr}") + if result.result is not None: + parts.append(f"Result: {result.result}") + if result.exit_code != 0: + parts.append(f"Exit code: {result.exit_code}") + + # Include execution time if code is included + if include_code and result.execution_time_ms: + parts.append(f"Execution time: {result.execution_time_ms}ms") + + if not parts: + return "Executed successfully (no output)" + + return "\n\n".join(parts) + + +# ============================================================================= +# MCP Tools +# ============================================================================= + + +@mcp.tool() +async def execute_python( + code: str, + timeout: int = 30, + include_code: bool = False, + description: str = None, + tags: str = None, + ctx: Context = None, +) -> str: + """Execute Python code in an isolated sandbox. + + The sandbox provides a full Python environment with common libraries + pre-installed. Code execution is isolated and secure. + + Args: + code: Python code to execute + timeout: Execution timeout in seconds (default: 30) + include_code: If True, include the executed code and execution_id in the response. + Useful for recording skills or analyzing execution. + description: Human-readable description of what this code does (for skill library) + tags: Comma-separated tags for categorization (e.g., 'data-processing,pandas') + + Returns: + Execution result including stdout, stderr, and any return value. + When include_code=True, also includes execution_id, code, and execution time. + """ + sandbox = await get_or_create_sandbox(ctx) + result = await sandbox.python.exec(code, timeout=timeout, description=description, tags=tags) + return _format_exec_result(result, include_code=include_code) + + +@mcp.tool() +async def execute_shell( + command: str, + cwd: str = None, + timeout: int = 30, + include_code: bool = False, + description: str = None, + tags: str = None, + ctx: Context = None, +) -> str: + """Execute a shell command in an isolated sandbox. + + The sandbox provides a Linux environment with common tools available. + Command execution is isolated and secure. + + Args: + command: Shell command to execute + cwd: Working directory (relative to workspace, optional) + timeout: Execution timeout in seconds (default: 30) + include_code: If True, include the executed command and execution_id in the response. + Useful for recording skills or analyzing execution. + description: Human-readable description of what this command does (for skill library) + tags: Comma-separated tags for categorization (e.g., 'file-ops,cleanup') + + Returns: + Command output including stdout and stderr. + When include_code=True, also includes execution_id, command, and execution time. + """ + sandbox = await get_or_create_sandbox(ctx) + result = await sandbox.shell.exec(command, cwd=cwd, timeout=timeout, description=description, tags=tags) + return _format_exec_result(result, include_code=include_code) + + +@mcp.tool() +async def read_file( + path: str, + ctx: Context = None, +) -> str: + """Read file content from the sandbox. + + Args: + path: File path (relative to workspace or absolute) + + Returns: + File content as string + """ + sandbox = await get_or_create_sandbox(ctx) + return await sandbox.fs.read(path) + + +@mcp.tool() +async def write_file( + path: str, + content: str, + ctx: Context = None, +) -> str: + """Write content to a file in the sandbox. + + Creates the file if it doesn't exist, or overwrites if it does. + Parent directories are created automatically. + + Args: + path: File path (relative to workspace or absolute) + content: Content to write + + Returns: + Confirmation message + """ + sandbox = await get_or_create_sandbox(ctx) + await sandbox.fs.write(path, content) + return f"File written: {path}" + + +@mcp.tool() +async def list_files( + path: str = ".", + ctx: Context = None, +) -> str: + """List files and directories in the sandbox. + + Args: + path: Directory path (default: current workspace) + + Returns: + List of files and directories + """ + sandbox = await get_or_create_sandbox(ctx) + entries = await sandbox.fs.list(path) + + if not entries: + return f"Directory '{path}' is empty" + + lines = [] + for entry in entries: + name = entry.get("name", "") + entry_type = entry.get("type", "file") + if entry_type == "directory": + lines.append(f" {name}/") + else: + lines.append(f" {name}") + + return f"Contents of '{path}':\n" + "\n".join(lines) + + +@mcp.tool() +async def install_package( + package: str, + ctx: Context = None, +) -> str: + """Install a Python package in the sandbox using pip. + + Args: + package: Package name (e.g., 'requests', 'pandas==2.0.0') + + Returns: + Installation result + """ + sandbox = await get_or_create_sandbox(ctx) + result = await sandbox.shell.exec(f"pip install {package}", timeout=120) + + if result.success: + return f"Successfully installed: {package}" + return f"Installation failed: {result.stderr}" + + +@mcp.tool() +async def get_sandbox_info(ctx: Context = None) -> str: + """Get information about the current sandbox environment. + + Returns: + Sandbox information including session ID, ship ID, etc. + """ + sandbox = await get_or_create_sandbox(ctx) + return f"Session ID: {sandbox.session_id}\nShip ID: {sandbox.ship_id}" + + +@mcp.tool() +async def get_execution_history( + exec_type: str = None, + success_only: bool = False, + limit: int = 50, + tags: str = None, + has_notes: bool = False, + has_description: bool = False, + ctx: Context = None, +) -> str: + """Get execution history for this session. + + Useful for reviewing past executions or building skill libraries. + + Args: + exec_type: Filter by 'python' or 'shell' (optional) + success_only: Only return successful executions + limit: Maximum entries to return (default: 50) + tags: Filter by tags (comma-separated, matches if any tag is present) + has_notes: Only return entries with notes + has_description: Only return entries with description + + Returns: + Execution history entries + """ + sandbox = await get_or_create_sandbox(ctx) + history = await sandbox.get_execution_history( + exec_type=exec_type, + success_only=success_only, + limit=limit, + tags=tags, + has_notes=has_notes, + has_description=has_description, + ) + + entries = history.get("entries", []) + if not entries: + return "No execution history found" + + lines = [f"Execution History ({history.get('total', 0)} total):"] + for entry in entries: + status = "✓" if entry.get("success") else "✗" + exec_t = entry.get("exec_type", "?") + time_ms = entry.get("execution_time_ms", 0) + code = entry.get("code", "")[:50] # Truncate long code + if len(entry.get("code", "")) > 50: + code += "..." + # Show metadata if present + meta = [] + if entry.get("tags"): + meta.append(f"tags:{entry.get('tags')}") + if entry.get("notes"): + meta.append("has_notes") + meta_str = f" [{', '.join(meta)}]" if meta else "" + lines.append(f" {status} [{exec_t}] {time_ms}ms{meta_str}: {code}") + + return "\n".join(lines) + + +@mcp.tool() +async def get_execution( + execution_id: str, + ctx: Context = None, +) -> str: + """Get a specific execution record by ID. + + Use this to retrieve the full details of a previous execution, + including the complete code, output, and timing information. + + Args: + execution_id: The execution ID (returned by execute_python/execute_shell + when include_code=True) + + Returns: + Full execution record including code, success status, output, and timing. + """ + sandbox = await get_or_create_sandbox(ctx) + try: + entry = await sandbox.get_execution(execution_id) + except RuntimeError as e: + return f"Error: {e}" + + lines = [f"Execution ID: {entry.get('id', execution_id)}"] + lines.append(f"Type: {entry.get('exec_type', 'unknown')}") + lines.append(f"Success: {entry.get('success', False)}") + + code = entry.get("code") or entry.get("command") + if code: + lines.append(f"\nCode:\n{code}") + + if entry.get("execution_time_ms"): + lines.append(f"\nExecution time: {entry.get('execution_time_ms')}ms") + + if entry.get("created_at"): + lines.append(f"Created at: {entry.get('created_at')}") + + return "\n".join(lines) + + +@mcp.tool() +async def get_last_execution( + exec_type: str = None, + ctx: Context = None, +) -> str: + """Get the most recent execution for this session. + + Useful for retrieving the full record of what was just executed, + including the complete code for analysis or skill recording. + + Args: + exec_type: Filter by 'python' or 'shell' (optional) + + Returns: + Full execution record including code, success status, output, and timing. + """ + sandbox = await get_or_create_sandbox(ctx) + try: + entry = await sandbox.get_last_execution(exec_type=exec_type) + except RuntimeError as e: + return f"Error: {e}" + + lines = [f"Execution ID: {entry.get('id')}"] + lines.append(f"Type: {entry.get('exec_type', 'unknown')}") + lines.append(f"Success: {entry.get('success', False)}") + + code = entry.get("code") or entry.get("command") + if code: + lines.append(f"\nCode:\n{code}") + + if entry.get("execution_time_ms"): + lines.append(f"\nExecution time: {entry.get('execution_time_ms')}ms") + + if entry.get("created_at"): + lines.append(f"Created at: {entry.get('created_at')}") + + return "\n".join(lines) + + +@mcp.tool() +async def annotate_execution( + execution_id: str, + description: str = None, + tags: str = None, + notes: str = None, + ctx: Context = None, +) -> str: + """Annotate an execution record with metadata. + + Use this to add descriptions, tags, or notes to an execution after + it has been recorded. Useful for skill library construction. + + At least one of description, tags, or notes must be provided. + + Args: + execution_id: The execution ID to annotate + description: Human-readable description of what this execution does + tags: Comma-separated tags for categorization (e.g., 'data-processing,pandas') + notes: Agent notes/annotations about this execution (e.g., learnings, issues) + + Returns: + Updated execution record. + """ + if description is None and tags is None and notes is None: + return "Error: At least one of description, tags, or notes must be provided" + + sandbox = await get_or_create_sandbox(ctx) + try: + entry = await sandbox.annotate_execution( + execution_id=execution_id, + description=description, + tags=tags, + notes=notes, + ) + except RuntimeError as e: + return f"Error: {e}" + + lines = [f"Execution ID: {entry.get('id', execution_id)} updated"] + if entry.get("description"): + lines.append(f"Description: {entry.get('description')}") + if entry.get("tags"): + lines.append(f"Tags: {entry.get('tags')}") + if entry.get("notes"): + lines.append(f"Notes: {entry.get('notes')}") + + return "\n".join(lines) + + +# ============================================================================= +# MCP Resources +# ============================================================================= + + +@mcp.resource("sandbox://info") +async def sandbox_info_resource() -> str: + """Information about the Shipyard sandbox service.""" + return """Shipyard Sandbox Service + +Shipyard provides secure, isolated Python and shell execution environments +for AI agents and assistants. + +Available tools: +- execute_python: Run Python code (supports description, tags for skill library) +- execute_shell: Run shell commands (supports description, tags for skill library) +- read_file: Read file contents +- write_file: Write to files +- list_files: List directory contents +- install_package: Install Python packages via pip +- get_sandbox_info: Get current sandbox information +- get_execution_history: View past executions +- get_execution: Get specific execution by ID +- get_last_execution: Get most recent execution +- annotate_execution: Add notes/tags to an execution record + +Each session gets a dedicated container with: +- Full Python environment (3.13+) +- Node.js LTS +- Common CLI tools (git, curl, etc.) +- Isolated filesystem +- Network access + +Session state persists across tool calls within the same MCP session. +""" + + +# ============================================================================= +# Entry Point +# ============================================================================= + + +def main(): + """Entry point for the MCP server.""" + import argparse + + parser = argparse.ArgumentParser(description="Shipyard MCP Server") + parser.add_argument( + "--transport", + choices=["stdio", "http"], + default="stdio", + help="Transport mode (default: stdio)", + ) + parser.add_argument( + "--port", + type=int, + default=8000, + help="HTTP port (only used with --transport http)", + ) + parser.add_argument( + "--host", + default="0.0.0.0", + help="HTTP host (only used with --transport http)", + ) + + args = parser.parse_args() + + if args.transport == "http": + mcp.run(transport="streamable-http", host=args.host, port=args.port) + else: + mcp.run(transport="stdio") + + +if __name__ == "__main__": + main() diff --git a/pkgs/bay/app/models.py b/pkgs/bay/app/models.py index 339c44d..e4f8922 100644 --- a/pkgs/bay/app/models.py +++ b/pkgs/bay/app/models.py @@ -28,12 +28,6 @@ class ShipBase(SQLModel): container_id: Optional[str] = Field(default=None) ip_address: Optional[str] = Field(default=None) ttl: int = Field(description="Time to live in seconds") - max_session_num: int = Field( - default=1, description="Maximum number of sessions that can use this ship" - ) - current_session_num: int = Field( - default=0, description="Current number of active sessions" - ) expires_at: Optional[datetime] = Field( default=None, description="When this ship will expire based on all sessions", @@ -71,6 +65,38 @@ class SessionShip(SessionShipBase, table=True): __tablename__ = "session_ships" # type: ignore +# Execution History for skill library support +class ExecutionHistoryBase(SQLModel): + id: str = Field(default_factory=lambda: str(uuid.uuid4()), primary_key=True) + session_id: str = Field(description="Session ID", index=True) + exec_type: str = Field(description="Execution type: 'python' or 'shell'") + code: Optional[str] = Field(default=None, description="Executed code (for python)") + command: Optional[str] = Field(default=None, description="Executed command (for shell)") + success: bool = Field(description="Whether execution succeeded") + execution_time_ms: Optional[int] = Field(default=None, description="Execution time in ms") + created_at: datetime = Field( + default_factory=lambda: datetime.now(timezone.utc), + sa_column=Column(DateTime(timezone=True)), + ) + # Skill library metadata fields + description: Optional[str] = Field( + default=None, + description="Human-readable description of what this execution does" + ) + tags: Optional[str] = Field( + default=None, + description="Comma-separated tags for categorization (e.g., 'data-processing,pandas')" + ) + notes: Optional[str] = Field( + default=None, + description="Agent notes/annotations about this execution" + ) + + +class ExecutionHistory(ExecutionHistoryBase, table=True): + __tablename__ = "execution_history" # type: ignore + + # API Request/Response Models class ShipSpec(BaseModel): model_config = ConfigDict(extra="forbid") @@ -92,9 +118,6 @@ class CreateShipRequest(BaseModel): ttl: int = Field(..., gt=0, description="Time to live in seconds") spec: Optional[ShipSpec] = Field(None, description="Ship specifications") - max_session_num: int = Field( - default=1, gt=0, description="Maximum number of sessions that can use this ship" - ) force_create: bool = Field( default=False, description="If True, skip all reuse logic and always create a new container" @@ -111,10 +134,8 @@ class ShipResponse(BaseModel): container_id: Optional[str] ip_address: Optional[str] ttl: int - max_session_num: int - current_session_num: int expires_at: Optional[datetime] = Field( - None, description="When this ship will expire based on all sessions" + None, description="When this ship will expire based on session expiration" ) @@ -131,6 +152,10 @@ class ExecResponse(BaseModel): success: bool data: Optional[Dict[str, Any]] = None error: Optional[str] = None + execution_id: Optional[str] = Field( + default=None, + description="Execution history ID for this operation (only for python/shell exec)" + ) class ExtendTTLRequest(BaseModel): @@ -164,3 +189,26 @@ class DownloadFileResponse(BaseModel): success: bool message: str error: Optional[str] = None + + +# Execution History API Models +class ExecutionHistoryEntry(BaseModel): + model_config = ConfigDict(from_attributes=True) + + id: str + session_id: str + exec_type: str + code: Optional[str] = None + command: Optional[str] = None + success: bool + execution_time_ms: Optional[int] = None + created_at: datetime + # Skill library metadata fields + description: Optional[str] = None + tags: Optional[str] = None + notes: Optional[str] = None + + +class ExecutionHistoryResponse(BaseModel): + entries: list[ExecutionHistoryEntry] + total: int diff --git a/pkgs/bay/app/routes/sessions.py b/pkgs/bay/app/routes/sessions.py index ec1696e..3a144f6 100644 --- a/pkgs/bay/app/routes/sessions.py +++ b/pkgs/bay/app/routes/sessions.py @@ -6,6 +6,7 @@ from datetime import datetime, timezone from app.database import db_service from app.auth import verify_token +from app.models import ExecutionHistoryResponse, ExecutionHistoryEntry router = APIRouter() @@ -211,16 +212,143 @@ async def delete_session(session_id: str, token: str = Depends(verify_token)): status_code=status.HTTP_404_NOT_FOUND, detail="Session not found" ) - - # Try to decrement the ship's session count (may fail if ship already deleted) - try: - await db_service.decrement_ship_session_count(session_ship.ship_id) - except Exception: - # Ship may have been deleted, ignore the error - pass - + # Delete the session await session.delete(session_ship) await session.commit() finally: await session.close() + + +@router.get("/sessions/{session_id}/history", response_model=ExecutionHistoryResponse) +async def get_execution_history( + session_id: str, + exec_type: Optional[str] = None, + success_only: bool = False, + limit: int = 100, + offset: int = 0, + tags: Optional[str] = None, + has_notes: bool = False, + has_description: bool = False, + token: str = Depends(verify_token), +): + """Get execution history for a session. + + This enables agents to retrieve their successful execution paths + for skill library construction (inspired by VOYAGER). + + Args: + session_id: The session ID + exec_type: Filter by type ('python' or 'shell') + success_only: If True, only return successful executions + limit: Maximum number of entries to return + offset: Number of entries to skip + tags: Filter by tags (comma-separated, matches if any tag is present) + has_notes: If True, only return entries with notes + has_description: If True, only return entries with description + """ + entries, total = await db_service.get_execution_history( + session_id=session_id, + exec_type=exec_type, + success_only=success_only, + limit=limit, + offset=offset, + tags=tags, + has_notes=has_notes, + has_description=has_description, + ) + + return ExecutionHistoryResponse( + entries=[ + ExecutionHistoryEntry.model_validate(e) + for e in entries + ], + total=total, + ) + + +@router.get("/sessions/{session_id}/history/last", response_model=ExecutionHistoryEntry) +async def get_last_execution( + session_id: str, + exec_type: Optional[str] = None, + token: str = Depends(verify_token), +): + """Get the most recent execution for a session. + + Args: + session_id: The session ID + exec_type: Filter by type ('python' or 'shell'), optional + """ + entry = await db_service.get_last_execution(session_id, exec_type) + + if not entry: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="No execution history found" + ) + + return ExecutionHistoryEntry.model_validate(entry) + + +@router.get("/sessions/{session_id}/history/{execution_id}", response_model=ExecutionHistoryEntry) +async def get_execution_by_id( + session_id: str, + execution_id: str, + token: str = Depends(verify_token), +): + """Get a specific execution record by ID. + + Args: + session_id: The session ID + execution_id: The execution history ID + """ + entry = await db_service.get_execution_by_id(session_id, execution_id) + + if not entry: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Execution not found" + ) + + return ExecutionHistoryEntry.model_validate(entry) + + +class AnnotateExecutionRequest(BaseModel): + """Request model for annotating an execution.""" + description: Optional[str] = None + tags: Optional[str] = None + notes: Optional[str] = None + + +@router.patch("/sessions/{session_id}/history/{execution_id}", response_model=ExecutionHistoryEntry) +async def annotate_execution( + session_id: str, + execution_id: str, + request: AnnotateExecutionRequest, + token: str = Depends(verify_token), +): + """Annotate an execution record with metadata. + + Use this to add descriptions, tags, or notes to an execution after + it has been recorded. Useful for skill library construction. + + Args: + session_id: The session ID + execution_id: The execution history ID + request: Annotation data (description, tags, notes) + """ + entry = await db_service.update_execution_history( + session_id=session_id, + execution_id=execution_id, + description=request.description, + tags=request.tags, + notes=request.notes, + ) + + if not entry: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Execution not found" + ) + + return ExecutionHistoryEntry.model_validate(entry) diff --git a/pkgs/bay/app/services/ship/service.py b/pkgs/bay/app/services/ship/service.py index e33c2c8..38b1f5f 100644 --- a/pkgs/bay/app/services/ship/service.py +++ b/pkgs/bay/app/services/ship/service.py @@ -8,6 +8,7 @@ import asyncio import logging from typing import Optional, List, Dict + from datetime import datetime, timedelta, timezone from app.config import settings @@ -38,147 +39,159 @@ class ShipService: def __init__(self): # Track cleanup tasks for each ship to enable cancellation self._cleanup_tasks: Dict[str, asyncio.Task] = {} + # Warm pool replenishment task + self._warm_pool_task: Optional[asyncio.Task] = None + + async def start_warm_pool(self): + """Start the warm pool replenishment background task.""" + if not settings.warm_pool_enabled: + logger.info("Warm pool is disabled") + return + + if self._warm_pool_task and not self._warm_pool_task.done(): + return # Already running + + self._warm_pool_task = asyncio.create_task(self._warm_pool_replenisher()) + logger.info(f"Warm pool started (min_size={settings.warm_pool_min_size})") + + async def stop_warm_pool(self): + """Stop the warm pool replenishment background task.""" + if self._warm_pool_task and not self._warm_pool_task.done(): + self._warm_pool_task.cancel() + try: + await self._warm_pool_task + except asyncio.CancelledError: + pass + self._warm_pool_task = None + + async def _warm_pool_replenisher(self): + """Background task to maintain warm pool size.""" + while True: + try: + await self._replenish_warm_pool() + await asyncio.sleep(settings.warm_pool_replenish_interval) + except asyncio.CancelledError: + logger.info("Warm pool replenisher stopped") + break + except Exception as e: + logger.error(f"Warm pool replenisher error: {e}") + await asyncio.sleep(settings.warm_pool_replenish_interval) + + async def _replenish_warm_pool(self): + """Ensure warm pool has enough ships.""" + current_count = await db_service.count_warm_pool_ships() + active_count = await db_service.count_active_ships() + + # Calculate how many ships we need to create + needed = settings.warm_pool_min_size - current_count + + # Respect max ship limit + available_slots = settings.max_ship_num - active_count + to_create = min(needed, available_slots, settings.warm_pool_max_size - current_count) + + if to_create > 0: + logger.info(f"Replenishing warm pool: creating {to_create} ships (current={current_count})") + for _ in range(to_create): + try: + await self._create_warm_pool_ship() + except Exception as e: + logger.error(f"Failed to create warm pool ship: {e}") + break # Stop on first failure + + async def _create_warm_pool_ship(self) -> Ship: + """Create a ship for the warm pool (no session attached).""" + ship = Ship(ttl=settings.default_ship_ttl, status=ShipStatus.CREATING) + ship = await db_service.create_ship(ship) + + try: + container_info = await get_driver().create_ship_container(ship, None) + ship.container_id = container_info.container_id + ship.ip_address = container_info.ip_address + ship = await db_service.update_ship(ship) + + if not ship.ip_address: + await db_service.delete_ship(ship.id) + raise RuntimeError("Ship has no IP address") + + is_ready = await wait_for_ship_ready(ship.ip_address) + if not is_ready: + if ship.container_id: + await get_driver().stop_ship_container(ship.container_id) + await db_service.delete_ship(ship.id) + raise RuntimeError("Ship failed health check") + + ship.status = ShipStatus.RUNNING + ship = await db_service.update_ship(ship) + + logger.info(f"Warm pool ship {ship.id} created and ready") + return ship + + except Exception: + await db_service.delete_ship(ship.id) + raise async def create_ship(self, request: CreateShipRequest, session_id: str) -> Ship: """Create a new ship or reuse an existing one for the session. - - If request.force_create is True, skip all reuse logic and always create a new container. + + With 1:1 Session-Ship binding: + 1. If session has an active ship, return it + 2. If session has a stopped ship with data, restore it + 3. Try to allocate from warm pool + 4. Create a new ship on-demand """ # If force_create is True, skip all reuse logic if not request.force_create: # First, check if this session already has an active running ship active_ship = await db_service.find_active_ship_for_session(session_id) if active_ship: - # Verify that the container actually exists and is running + # Verify container is actually running if active_ship.container_id and await get_driver().is_container_running( active_ship.container_id ): - # Update last activity and return the existing active ship await db_service.update_session_activity(session_id, active_ship.id) - logger.info( - f"Session {session_id} already has active ship {active_ship.id}, returning it" - ) + logger.info(f"Session {session_id} reusing active ship {active_ship.id}") return active_ship else: - # Container doesn't exist or isn't running, mark ship as stopped and restore it - logger.warning( - f"Ship {active_ship.id} is marked active but container is not running, restoring..." - ) + # Container not running, mark as stopped and restore + logger.warning(f"Ship {active_ship.id} container not running, restoring...") active_ship.status = ShipStatus.STOPPED await db_service.update_ship(active_ship) - # Restore the ship return await self._restore_ship(active_ship, request, session_id) - # Second, check if this session has a stopped ship with existing data + # Second, check for stopped ship with existing data stopped_ship = await db_service.find_stopped_ship_for_session(session_id) if stopped_ship and get_driver().ship_data_exists(stopped_ship.id): - # Restore the stopped ship - logger.info( - f"Restoring stopped ship {stopped_ship.id} for session {session_id}" - ) + logger.info(f"Restoring stopped ship {stopped_ship.id} for session {session_id}") return await self._restore_ship(stopped_ship, request, session_id) - # Third, try to find an available ship that can accept this session - # NOTE: This only applies to NEW sessions that don't have any ship yet. - # If a session already has a ship (active or stopped), it should NOT join another ship. - # The checks in steps 1 and 2 above ensure we only reach here for truly new sessions. - logger.debug(f"Looking for available ship for new session {session_id}") - available_ship = await db_service.find_available_ship(session_id) - logger.debug(f"find_available_ship returned: {available_ship}") - - if available_ship: - # Verify that the container actually exists and is running - logger.debug(f"Checking container status for ship {available_ship.id}, container_id: {available_ship.container_id}") - if ( - not available_ship.container_id - or not await get_driver().is_container_running( - available_ship.container_id - ) - ): - # Container doesn't exist or isn't running, mark ship as stopped - logger.warning( - f"Ship {available_ship.id} is marked active but container is not running, marking as stopped" - ) - available_ship.status = ShipStatus.STOPPED - await db_service.update_ship(available_ship) - # Don't use this ship, continue to create a new one - available_ship = None - - if available_ship: - # Check if this session already has access to this ship - logger.debug(f"Checking if session {session_id} already has access to ship {available_ship.id}") - existing_session = await db_service.get_session_ship( - session_id, available_ship.id - ) - logger.debug(f"Existing session: {existing_session}") - - if existing_session: - # Update last activity and return existing ship - logger.info(f"Session {session_id} already has access to ship {available_ship.id}, updating activity") - await db_service.update_session_activity(session_id, available_ship.id) - return available_ship - else: - # Calculate expiration time for this session - expires_at = datetime.now(timezone.utc) + timedelta(seconds=request.ttl) - - # Add this session to the ship - logger.info(f"Adding session {session_id} to ship {available_ship.id}") - session_ship = SessionShip( - session_id=session_id, - ship_id=available_ship.id, - expires_at=expires_at, - initial_ttl=request.ttl, - ) - await db_service.create_session_ship(session_ship) - logger.debug(f"Created session_ship record: {session_ship.id}") - - updated_ship = await db_service.increment_ship_session_count(available_ship.id) - logger.debug(f"increment_ship_session_count returned: {updated_ship}") - - if updated_ship is None: - logger.error(f"Failed to increment session count for ship {available_ship.id}") - raise ValueError(f"Failed to update ship {available_ship.id} session count") - - available_ship = updated_ship - - # Recalculate ship's TTL based on all sessions' expiration times - logger.debug(f"Recalculating cleanup for ship {available_ship.id}") - await self._recalculate_and_schedule_cleanup(available_ship.id) - - logger.info( - f"Session {session_id} joined ship {available_ship.id}, expires at {expires_at}" - ) - return available_ship + # Third, try to allocate from warm pool + if settings.warm_pool_enabled: + warm_ship = await db_service.find_warm_pool_ship() + if warm_ship: + logger.info(f"Allocating warm pool ship {warm_ship.id} to session {session_id}") + return await self._assign_ship_to_session(warm_ship, request, session_id) else: logger.info(f"force_create=True, skipping reuse logic for session {session_id}") - # Fourth (or First if force_create), no available ship found, create a new one + # Fourth, create a new ship on-demand # Check ship limits if settings.behavior_after_max_ship == "reject": active_count = await db_service.count_active_ships() if active_count >= settings.max_ship_num: raise ValueError("Maximum number of ships reached") elif settings.behavior_after_max_ship == "wait": - # Wait for available slot await self._wait_for_available_slot() - # Create ship record with CREATING status (status=2) - # This prevents status_checker from marking it as stopped during creation - ship = Ship(ttl=request.ttl, max_session_num=request.max_session_num, status=ShipStatus.CREATING) + # Create new ship + ship = Ship(ttl=request.ttl, status=ShipStatus.CREATING) ship = await db_service.create_ship(ship) try: - # Create container - container_info = await get_driver().create_ship_container( - ship, request.spec - ) - - # Update ship with container info + container_info = await get_driver().create_ship_container(ship, request.spec) ship.container_id = container_info.container_id ship.ip_address = container_info.ip_address ship = await db_service.update_ship(ship) - # Wait for ship to be ready if not ship.ip_address: logger.error(f"Ship {ship.id} has no IP address") await db_service.delete_ship(ship.id) @@ -188,7 +201,6 @@ async def create_ship(self, request: CreateShipRequest, session_id: str) -> Ship is_ready = await wait_for_ship_ready(ship.ip_address) if not is_ready: - # Ship failed to become ready, cleanup logger.error(f"Ship {ship.id} failed health check, cleaning up") if ship.container_id: await get_driver().stop_ship_container(ship.container_id) @@ -197,33 +209,41 @@ async def create_ship(self, request: CreateShipRequest, session_id: str) -> Ship f"Ship failed to become ready within {settings.ship_health_check_timeout} seconds" ) - # Create session-ship relationship - expires_at = datetime.now(timezone.utc) + timedelta(seconds=request.ttl) - session_ship = SessionShip( - session_id=session_id, - ship_id=ship.id, - expires_at=expires_at, - initial_ttl=request.ttl, - ) - await db_service.create_session_ship(session_ship) - ship = await db_service.increment_ship_session_count(ship.id) - - # Mark ship as RUNNING now that it's fully ready - ship.status = ShipStatus.RUNNING - ship = await db_service.update_ship(ship) - - # Schedule TTL cleanup - await self._schedule_cleanup(ship.id, ship.ttl) + # Assign to session + ship = await self._assign_ship_to_session(ship, request, session_id) - logger.info(f"Ship {ship.id} created successfully and is ready") + logger.info(f"Ship {ship.id} created successfully for session {session_id}") return ship except Exception as e: - # Cleanup on failure await db_service.delete_ship(ship.id) logger.error(f"Failed to create ship {ship.id}: {e}") raise + async def _assign_ship_to_session( + self, ship: Ship, request: CreateShipRequest, session_id: str + ) -> Ship: + """Assign a ship to a session (1:1 binding).""" + expires_at = datetime.now(timezone.utc) + timedelta(seconds=request.ttl) + + session_ship = SessionShip( + session_id=session_id, + ship_id=ship.id, + expires_at=expires_at, + initial_ttl=request.ttl, + ) + await db_service.create_session_ship(session_ship) + + # Update ship TTL and status + ship.ttl = request.ttl + ship.status = ShipStatus.RUNNING + ship = await db_service.update_ship(ship) + + # Schedule cleanup + await self._schedule_cleanup(ship.id, request.ttl) + + return ship + async def get_ship(self, ship_id: str) -> Optional[Ship]: """Get ship by ID.""" ship = await db_service.get_ship(ship_id) @@ -355,6 +375,29 @@ async def execute_operation( # Forward request to ship container result = await forward_request_to_ship(ship.ip_address, request, session_id) + # Record execution history for python and shell operations + if request.type in ("ipython/exec", "shell/exec"): + exec_type = "python" if request.type == "ipython/exec" else "shell" + code = request.payload.get("code") if request.payload and exec_type == "python" else None + command = request.payload.get("command") if request.payload and exec_type == "shell" else None + execution_time_ms = result.data.get("execution_time_ms") if result.data else None + # Extract optional metadata from payload + description = request.payload.get("description") if request.payload else None + tags = request.payload.get("tags") if request.payload else None + + history = await db_service.create_execution_history( + session_id=session_id, + exec_type=exec_type, + success=result.success, + code=code, + command=command, + execution_time_ms=execution_time_ms, + description=description, + tags=tags, + ) + # Attach execution_id to response + result.execution_id = history.id + # Extend TTL after successful operation if result.success: await self._extend_ttl_after_operation(ship_id, session_id) @@ -647,7 +690,7 @@ async def start_ship( # Ship is stopped, restore it try: # Create a minimal CreateShipRequest for restoration - from app.models import CreateShipRequest, ShipSpec + from app.models import CreateShipRequest request = CreateShipRequest(ttl=ttl) return await self._restore_ship(ship, request, session_id) diff --git a/pkgs/bay/dashboard/src/types/api.ts b/pkgs/bay/dashboard/src/types/api.ts index 63c1649..38cfb8b 100644 --- a/pkgs/bay/dashboard/src/types/api.ts +++ b/pkgs/bay/dashboard/src/types/api.ts @@ -35,8 +35,6 @@ export interface Ship { container_id: string | null ip_address: string | null ttl: number - max_session_num: number - current_session_num: number expires_at: string | null } @@ -51,7 +49,6 @@ export interface ShipSpec { export interface CreateShipRequest { ttl: number spec?: ShipSpec - max_session_num?: number force_create?: boolean // If true, skip reuse logic and always create new container } diff --git a/pkgs/bay/dashboard/src/views/ship-create/useCreateShip.ts b/pkgs/bay/dashboard/src/views/ship-create/useCreateShip.ts index 73e649a..e1cffd8 100644 --- a/pkgs/bay/dashboard/src/views/ship-create/useCreateShip.ts +++ b/pkgs/bay/dashboard/src/views/ship-create/useCreateShip.ts @@ -15,7 +15,6 @@ export function useCreateShip() { // 表单数据 const ttlMinutes = ref(60) // 默认60分钟 - const maxSessionNum = ref(1) // 默认1个会话 const cpus = ref(undefined) const memory = ref('') const disk = ref('') @@ -40,9 +39,6 @@ export function useCreateShip() { if (ttlMinutes.value > 1440 * 7) { errs.ttl = 'TTL 最大为 7 天' } - if (maxSessionNum.value < 1) { - errs.maxSessionNum = '最大会话数必须大于 0' - } if (memory.value && !/^\d+(m|g|M|G|Mi|Gi)?$/.test(memory.value)) { errs.memory = '内存格式无效,例如:512m, 1g' } @@ -75,7 +71,6 @@ export function useCreateShip() { const request: CreateShipRequest = { ttl: ttlMinutes.value * 60, // 转换为秒 - max_session_num: createMode.value === 'custom' ? maxSessionNum.value : 1, // 自定义模式下强制创建新容器,确保配置生效 force_create: createMode.value === 'custom', } @@ -102,7 +97,6 @@ export function useCreateShip() { return { createMode, ttlMinutes, - maxSessionNum, cpus, memory, disk, diff --git a/pkgs/bay/dashboard/src/views/ship-detail/index.vue b/pkgs/bay/dashboard/src/views/ship-detail/index.vue index 8ed263c..47a4632 100644 --- a/pkgs/bay/dashboard/src/views/ship-detail/index.vue +++ b/pkgs/bay/dashboard/src/views/ship-detail/index.vue @@ -217,14 +217,6 @@ const {
IP 地址
{{ ship.ip_address || '-' }}
-
-
会话数
-
- {{ ship.current_session_num }} - / - {{ ship.max_session_num }} -
-
TTL 配置
{{ Math.floor(ship.ttl / 60) }} 分钟
diff --git a/pkgs/bay/dashboard/src/views/ships/index.vue b/pkgs/bay/dashboard/src/views/ships/index.vue index 308f59b..b6e864b 100644 --- a/pkgs/bay/dashboard/src/views/ships/index.vue +++ b/pkgs/bay/dashboard/src/views/ships/index.vue @@ -164,7 +164,6 @@ const closeDropdownWithDelay = () => { ID IP 地址 状态 - 会话数 剩余时间 (TTL) 操作 @@ -186,15 +185,6 @@ const closeDropdownWithDelay = () => { - -
- - {{ ship.current_session_num }} - - / - {{ ship.max_session_num }} -
- =2.0.0", "kubernetes-asyncio>=34.3.3", "websocket-client>=1.9.0", + "mcp>=1.0.0", ] +[project.scripts] +shipyard-mcp = "app.mcp.run:main" + [project.optional-dependencies] +mcp = ["mcp[cli]>=1.0.0"] test = [ "pytest>=8.0.0", "pytest-asyncio>=0.23.0", diff --git a/pkgs/bay/tests/integration/test_mcp_http.py b/pkgs/bay/tests/integration/test_mcp_http.py new file mode 100644 index 0000000..c496deb --- /dev/null +++ b/pkgs/bay/tests/integration/test_mcp_http.py @@ -0,0 +1,385 @@ +""" +MCP HTTP 模式多客户端隔离测试 + +测试 HTTP 模式下不同 MCP 客户端的 Session 隔离。 +""" + +import asyncio +import os +import sys +from pathlib import Path +from typing import Any, Dict +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest + +# 添加 app 路径以便测试导入 +sys.path.insert(0, str(Path(__file__).parent.parent.parent)) +sys.path.insert(0, str(Path(__file__).parent.parent.parent.parent.parent / "shipyard_python_sdk")) + + +class MockContext: + """模拟 FastMCP Context 对象用于测试""" + + def __init__(self, session_id: str, lifespan_context: Any = None): + self._session_id = session_id + self._state: Dict[str, Any] = {} + self._lifespan_context = lifespan_context + + @property + def session_id(self) -> str: + return self._session_id + + @property + def request_context(self): + class RequestContext: + lifespan_context = self._lifespan_context + return RequestContext() + + async def get_state(self, key: str) -> Any: + return self._state.get(key) + + async def set_state(self, key: str, value: Any) -> None: + self._state[key] = value + + async def info(self, msg: str) -> None: + pass + + async def warning(self, msg: str) -> None: + pass + + +class MockSandbox: + """模拟 Sandbox 对象""" + + def __init__(self, session_id: str): + self.session_id = session_id + self.ship_id = f"ship-{session_id[:8]}" + self._variables: Dict[str, Any] = {} + self._files: Dict[str, str] = {} + self.started = False + + async def start(self): + self.started = True + return self + + async def stop(self): + self.started = False + + async def extend_ttl(self, ttl: int): + pass + + +@pytest.mark.unit +class TestMCPSessionIsolation: + """测试 MCP Session 隔离逻辑""" + + def test_different_sessions_get_different_state(self): + """测试不同 session 获得独立的 state""" + ctx_a = MockContext("session-aaa") + ctx_b = MockContext("session-bbb") + + # 验证 session_id 不同 + assert ctx_a.session_id != ctx_b.session_id + assert ctx_a.session_id == "session-aaa" + assert ctx_b.session_id == "session-bbb" + + @pytest.mark.asyncio + async def test_state_isolation_between_sessions(self): + """测试 state 在不同 session 间隔离""" + ctx_a = MockContext("session-aaa") + ctx_b = MockContext("session-bbb") + + # A 设置状态 + await ctx_a.set_state("sandbox", MockSandbox("session-aaa")) + await ctx_a.set_state("counter", 10) + + # B 看不到 A 的状态 + sandbox_b = await ctx_b.get_state("sandbox") + counter_b = await ctx_b.get_state("counter") + + assert sandbox_b is None + assert counter_b is None + + # A 可以读取自己的状态 + sandbox_a = await ctx_a.get_state("sandbox") + counter_a = await ctx_a.get_state("counter") + + assert sandbox_a is not None + assert sandbox_a.session_id == "session-aaa" + assert counter_a == 10 + + @pytest.mark.asyncio + async def test_same_session_shares_state(self): + """测试同一 session 的多次调用共享状态""" + shared_state = {} + + class SharedMockContext(MockContext): + def __init__(self, session_id: str): + super().__init__(session_id) + self._state = shared_state + + ctx1 = SharedMockContext("session-same") + ctx2 = SharedMockContext("session-same") + + # 第一次调用设置状态 + sandbox = MockSandbox("session-same") + await ctx1.set_state("sandbox", sandbox) + + # 第二次调用(同一 session)可以读取 + sandbox2 = await ctx2.get_state("sandbox") + + assert sandbox2 is sandbox + assert sandbox2.session_id == "session-same" + + +@pytest.mark.unit +class TestGetOrCreateSandbox: + """测试 get_or_create_sandbox 函数逻辑""" + + @pytest.mark.asyncio + async def test_creates_sandbox_on_first_call(self): + """测试首次调用创建新 Sandbox""" + from dataclasses import dataclass + + @dataclass + class GlobalConfig: + endpoint: str = "http://localhost:8156" + token: str = "test-token" + default_ttl: int = 1800 + ttl_renew_threshold: int = 600 + + ctx = MockContext("session-new", GlobalConfig()) + + # 首次调用 - sandbox 为空 + sandbox = await ctx.get_state("sandbox") + assert sandbox is None + + # 模拟创建 + new_sandbox = MockSandbox("session-new") + await new_sandbox.start() + await ctx.set_state("sandbox", new_sandbox) + + # 再次获取 + sandbox = await ctx.get_state("sandbox") + assert sandbox is not None + assert sandbox.session_id == "session-new" + assert sandbox.started is True + + @pytest.mark.asyncio + async def test_reuses_existing_sandbox(self): + """测试复用已存在的 Sandbox""" + ctx = MockContext("session-existing") + + # 预设已有 sandbox + existing_sandbox = MockSandbox("session-existing") + await existing_sandbox.start() + await ctx.set_state("sandbox", existing_sandbox) + + # 获取 - 应该返回同一个对象 + sandbox = await ctx.get_state("sandbox") + + assert sandbox is existing_sandbox + assert sandbox.session_id == "session-existing" + + +@pytest.mark.unit +class TestMultiClientIsolationScenario: + """测试多客户端隔离场景""" + + @pytest.mark.asyncio + async def test_client_a_variable_invisible_to_client_b(self): + """ + 场景:客户端 A 设置变量,客户端 B 看不到 + + 模拟: + - Client A: execute_python("x = 123") + - Client B: execute_python("print(x)") -> NameError + """ + # 模拟两个独立的 session 状态 + session_a_state = {} + session_b_state = {} + + class ClientAContext(MockContext): + def __init__(self): + super().__init__("mcp-session-aaa") + self._state = session_a_state + + class ClientBContext(MockContext): + def __init__(self): + super().__init__("mcp-session-bbb") + self._state = session_b_state + + ctx_a = ClientAContext() + ctx_b = ClientBContext() + + # Client A 创建 sandbox 并设置变量 + sandbox_a = MockSandbox("mcp-session-aaa") + sandbox_a._variables["x"] = 123 + await ctx_a.set_state("sandbox", sandbox_a) + + # Client B 获取自己的 sandbox + sandbox_b = await ctx_b.get_state("sandbox") + + # B 没有 sandbox(尚未创建) + assert sandbox_b is None + + # 即使 B 创建了 sandbox,也看不到 A 的变量 + sandbox_b = MockSandbox("mcp-session-bbb") + await ctx_b.set_state("sandbox", sandbox_b) + + # A 的变量在 A 的 sandbox 中 + assert "x" in (await ctx_a.get_state("sandbox"))._variables + # B 的 sandbox 中没有 x + assert "x" not in (await ctx_b.get_state("sandbox"))._variables + + @pytest.mark.asyncio + async def test_client_a_file_invisible_to_client_b(self): + """ + 场景:客户端 A 写文件,客户端 B 看不到 + + 模拟: + - Client A: write_file("/workspace/test.txt", "hello") + - Client B: read_file("/workspace/test.txt") -> FileNotFoundError + """ + session_a_state = {} + session_b_state = {} + + class ClientAContext(MockContext): + def __init__(self): + super().__init__("mcp-session-aaa") + self._state = session_a_state + + class ClientBContext(MockContext): + def __init__(self): + super().__init__("mcp-session-bbb") + self._state = session_b_state + + ctx_a = ClientAContext() + ctx_b = ClientBContext() + + # Client A 创建 sandbox 并写入文件 + sandbox_a = MockSandbox("mcp-session-aaa") + sandbox_a._files["/workspace/test.txt"] = "hello from A" + await ctx_a.set_state("sandbox", sandbox_a) + + # Client B 创建自己的 sandbox + sandbox_b = MockSandbox("mcp-session-bbb") + await ctx_b.set_state("sandbox", sandbox_b) + + # A 的文件在 A 的 sandbox 中 + assert "/workspace/test.txt" in (await ctx_a.get_state("sandbox"))._files + # B 的 sandbox 中没有这个文件 + assert "/workspace/test.txt" not in (await ctx_b.get_state("sandbox"))._files + + +@pytest.mark.unit +class TestSessionIdGeneration: + """测试 Session ID 生成""" + + def test_different_contexts_have_different_session_ids(self): + """不同的 context 应该有不同的 session_id""" + ctx1 = MockContext("session-111") + ctx2 = MockContext("session-222") + ctx3 = MockContext("session-333") + + session_ids = {ctx1.session_id, ctx2.session_id, ctx3.session_id} + + assert len(session_ids) == 3 + + def test_session_id_format(self): + """Session ID 应该是可用的字符串""" + ctx = MockContext("mcp-session-abc123") + + assert isinstance(ctx.session_id, str) + assert len(ctx.session_id) > 0 + + +@pytest.mark.unit +class TestTTLRenewal: + """测试 TTL 续期逻辑""" + + @pytest.mark.asyncio + async def test_ttl_renewal_on_activity(self): + """测试活动时自动续期""" + from datetime import datetime, timedelta + + ctx = MockContext("session-ttl-test") + + # 设置 sandbox 和上次续期时间(超过阈值) + sandbox = MockSandbox("session-ttl-test") + old_renew_time = datetime.now() - timedelta(minutes=15) # 15 分钟前 + + await ctx.set_state("sandbox", sandbox) + await ctx.set_state("last_ttl_renew", old_renew_time) + + # 检查续期逻辑 + last_renew = await ctx.get_state("last_ttl_renew") + ttl_renew_threshold = 600 # 10 分钟 + + now = datetime.now() + should_renew = (now - last_renew).total_seconds() > ttl_renew_threshold + + assert should_renew is True + + @pytest.mark.asyncio + async def test_no_renewal_within_threshold(self): + """测试阈值内不续期""" + from datetime import datetime, timedelta + + ctx = MockContext("session-ttl-test") + + # 设置上次续期时间(在阈值内) + recent_renew_time = datetime.now() - timedelta(minutes=5) # 5 分钟前 + + await ctx.set_state("last_ttl_renew", recent_renew_time) + + last_renew = await ctx.get_state("last_ttl_renew") + ttl_renew_threshold = 600 # 10 分钟 + + now = datetime.now() + should_renew = (now - last_renew).total_seconds() > ttl_renew_threshold + + assert should_renew is False + + +@pytest.mark.unit +class TestStdioModeCompatibility: + """测试 stdio 模式兼容性""" + + def test_stdio_mode_single_session(self): + """stdio 模式下只有一个 session""" + # 在 stdio 模式下,一个进程 = 一个 session + # 所有请求共享同一个 session_id + single_session_id = "stdio-single-session" + + ctx1 = MockContext(single_session_id) + ctx2 = MockContext(single_session_id) + + # 共享同一个 session_id + assert ctx1.session_id == ctx2.session_id + + @pytest.mark.asyncio + async def test_stdio_mode_state_persistence(self): + """stdio 模式下状态应该持久""" + shared_state = {} + single_session_id = "stdio-single-session" + + class StdioContext(MockContext): + def __init__(self): + super().__init__(single_session_id) + self._state = shared_state + + ctx1 = StdioContext() + ctx2 = StdioContext() + + # 第一次请求创建 sandbox + sandbox = MockSandbox(single_session_id) + sandbox._variables["x"] = 42 + await ctx1.set_state("sandbox", sandbox) + + # 第二次请求应该能获取到 + sandbox2 = await ctx2.get_state("sandbox") + + assert sandbox2 is sandbox + assert sandbox2._variables["x"] == 42 diff --git a/pkgs/bay/tests/unit/test_ships.py b/pkgs/bay/tests/unit/test_ships.py index c16665a..757e188 100644 --- a/pkgs/bay/tests/unit/test_ships.py +++ b/pkgs/bay/tests/unit/test_ships.py @@ -26,17 +26,18 @@ def test_create_ship_request_model(self): from app.models import CreateShipRequest, ShipSpec # 基本创建请求 - request = CreateShipRequest(ttl=3600, max_session_num=1) + request = CreateShipRequest(ttl=3600) assert request.ttl == 3600 - assert request.max_session_num == 1 assert request.spec is None + assert request.force_create is False # 带规格的创建请求 spec = ShipSpec(cpus=0.5, memory="256m", disk="1Gi") - request_with_spec = CreateShipRequest(ttl=3600, spec=spec) + request_with_spec = CreateShipRequest(ttl=3600, spec=spec, force_create=True) assert request_with_spec.spec.cpus == 0.5 assert request_with_spec.spec.memory == "256m" assert request_with_spec.spec.disk == "1Gi" + assert request_with_spec.force_create is True def test_create_ship_request_validation(self): """测试 CreateShipRequest 验证""" @@ -45,14 +46,10 @@ def test_create_ship_request_validation(self): # ttl 必须大于 0 with pytest.raises(ValidationError): - CreateShipRequest(ttl=0, max_session_num=1) + CreateShipRequest(ttl=0) with pytest.raises(ValidationError): - CreateShipRequest(ttl=-1, max_session_num=1) - - # max_session_num 必须大于 0 - with pytest.raises(ValidationError): - CreateShipRequest(ttl=3600, max_session_num=0) + CreateShipRequest(ttl=-1) def test_ship_spec_model(self): """测试 ShipSpec 模型""" @@ -97,8 +94,6 @@ def test_ship_response_model(self): container_id="container-abc", ip_address="172.17.0.2", ttl=3600, - max_session_num=2, - current_session_num=1, expires_at=expires_at, ) @@ -107,8 +102,6 @@ def test_ship_response_model(self): assert response.container_id == "container-abc" assert response.ip_address == "172.17.0.2" assert response.ttl == 3600 - assert response.max_session_num == 2 - assert response.current_session_num == 1 def test_ship_response_optional_fields(self): """测试 ShipResponse 可选字段""" @@ -124,8 +117,6 @@ def test_ship_response_optional_fields(self): container_id=None, ip_address=None, ttl=3600, - max_session_num=1, - current_session_num=0, expires_at=None, ) @@ -343,8 +334,6 @@ def test_ship_base_defaults(self): ship = Ship(ttl=3600) assert ship.status == ShipStatus.CREATING - assert ship.max_session_num == 1 - assert ship.current_session_num == 0 assert ship.container_id is None assert ship.ip_address is None assert ship.id is not None # 自动生成 @@ -362,8 +351,6 @@ def test_ship_with_all_fields(self): container_id="container-123", ip_address="10.0.0.1", ttl=7200, - max_session_num=5, - current_session_num=3, ) assert ship.id == "custom-id" @@ -371,8 +358,6 @@ def test_ship_with_all_fields(self): assert ship.container_id == "container-123" assert ship.ip_address == "10.0.0.1" assert ship.ttl == 7200 - assert ship.max_session_num == 5 - assert ship.current_session_num == 3 class TestWebSocketTerminalLogic: @@ -500,3 +485,64 @@ def test_ship_ip_address_required_for_websocket(self): ip_address=None ) assert ship_without_ip.ip_address is None + + +class TestExecutionHistory: + """Execution History 模型测试""" + + def test_execution_history_model(self): + """测试 ExecutionHistory 模型""" + from app.models import ExecutionHistory + + history = ExecutionHistory( + session_id="test-session", + exec_type="python", + code="print('hello')", + success=True, + execution_time_ms=42, + ) + + assert history.session_id == "test-session" + assert history.exec_type == "python" + assert history.code == "print('hello')" + assert history.success is True + assert history.execution_time_ms == 42 + assert history.id is not None + + def test_execution_history_shell(self): + """测试 ExecutionHistory Shell 命令""" + from app.models import ExecutionHistory + + history = ExecutionHistory( + session_id="test-session", + exec_type="shell", + command="ls -la", + success=True, + execution_time_ms=15, + ) + + assert history.exec_type == "shell" + assert history.command == "ls -la" + assert history.code is None + + def test_execution_history_response_model(self): + """测试 ExecutionHistoryResponse 模型""" + from app.models import ExecutionHistoryResponse, ExecutionHistoryEntry + from datetime import datetime, timezone + + now = datetime.now(timezone.utc) + entry = ExecutionHistoryEntry( + id="entry-1", + session_id="test-session", + exec_type="python", + code="print('hello')", + command=None, + success=True, + execution_time_ms=42, + created_at=now, + ) + + response = ExecutionHistoryResponse(entries=[entry], total=1) + assert len(response.entries) == 1 + assert response.total == 1 + assert response.entries[0].code == "print('hello')" diff --git a/pkgs/bay/uv.lock b/pkgs/bay/uv.lock index 8163f78..153776a 100644 --- a/pkgs/bay/uv.lock +++ b/pkgs/bay/uv.lock @@ -175,6 +175,7 @@ dependencies = [ { name = "fastapi" }, { name = "greenlet" }, { name = "kubernetes-asyncio" }, + { name = "mcp" }, { name = "podman" }, { name = "pydantic" }, { name = "pydantic-settings" }, @@ -186,6 +187,9 @@ dependencies = [ ] [package.optional-dependencies] +mcp = [ + { name = "mcp", extra = ["cli"] }, +] test = [ { name = "pytest" }, { name = "pytest-asyncio" }, @@ -202,6 +206,8 @@ requires-dist = [ { name = "fastapi" }, { name = "greenlet", specifier = ">=3.2.4" }, { name = "kubernetes-asyncio", specifier = ">=34.3.3" }, + { name = "mcp", specifier = ">=1.0.0" }, + { name = "mcp", extras = ["cli"], marker = "extra == 'mcp'", specifier = ">=1.0.0" }, { name = "podman", specifier = ">=5.0.0" }, { name = "pydantic" }, { name = "pydantic-settings" }, @@ -214,7 +220,7 @@ requires-dist = [ { name = "uvicorn", extras = ["standard"] }, { name = "websocket-client", specifier = ">=1.9.0" }, ] -provides-extras = ["test"] +provides-extras = ["mcp", "test"] [[package]] name = "certifi" @@ -225,6 +231,76 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e5/48/1549795ba7742c948d2ad169c1c8cdbae65bc450d6cd753d124b17c8cd32/certifi-2025.8.3-py3-none-any.whl", hash = "sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5", size = 161216, upload-time = "2025-08-03T03:07:45.777Z" }, ] +[[package]] +name = "cffi" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pycparser", marker = "implementation_name != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588, upload-time = "2025-09-08T23:24:04.541Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/12/4a/3dfd5f7850cbf0d06dc84ba9aa00db766b52ca38d8b86e3a38314d52498c/cffi-2.0.0-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:b4c854ef3adc177950a8dfc81a86f5115d2abd545751a304c5bcf2c2c7283cfe", size = 184344, upload-time = "2025-09-08T23:22:26.456Z" }, + { url = "https://files.pythonhosted.org/packages/4f/8b/f0e4c441227ba756aafbe78f117485b25bb26b1c059d01f137fa6d14896b/cffi-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2de9a304e27f7596cd03d16f1b7c72219bd944e99cc52b84d0145aefb07cbd3c", size = 180560, upload-time = "2025-09-08T23:22:28.197Z" }, + { url = "https://files.pythonhosted.org/packages/b1/b7/1200d354378ef52ec227395d95c2576330fd22a869f7a70e88e1447eb234/cffi-2.0.0-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:baf5215e0ab74c16e2dd324e8ec067ef59e41125d3eade2b863d294fd5035c92", size = 209613, upload-time = "2025-09-08T23:22:29.475Z" }, + { url = "https://files.pythonhosted.org/packages/b8/56/6033f5e86e8cc9bb629f0077ba71679508bdf54a9a5e112a3c0b91870332/cffi-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:730cacb21e1bdff3ce90babf007d0a0917cc3e6492f336c2f0134101e0944f93", size = 216476, upload-time = "2025-09-08T23:22:31.063Z" }, + { url = "https://files.pythonhosted.org/packages/dc/7f/55fecd70f7ece178db2f26128ec41430d8720f2d12ca97bf8f0a628207d5/cffi-2.0.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:6824f87845e3396029f3820c206e459ccc91760e8fa24422f8b0c3d1731cbec5", size = 203374, upload-time = "2025-09-08T23:22:32.507Z" }, + { url = "https://files.pythonhosted.org/packages/84/ef/a7b77c8bdc0f77adc3b46888f1ad54be8f3b7821697a7b89126e829e676a/cffi-2.0.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9de40a7b0323d889cf8d23d1ef214f565ab154443c42737dfe52ff82cf857664", size = 202597, upload-time = "2025-09-08T23:22:34.132Z" }, + { url = "https://files.pythonhosted.org/packages/d7/91/500d892b2bf36529a75b77958edfcd5ad8e2ce4064ce2ecfeab2125d72d1/cffi-2.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8941aaadaf67246224cee8c3803777eed332a19d909b47e29c9842ef1e79ac26", size = 215574, upload-time = "2025-09-08T23:22:35.443Z" }, + { url = "https://files.pythonhosted.org/packages/44/64/58f6255b62b101093d5df22dcb752596066c7e89dd725e0afaed242a61be/cffi-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a05d0c237b3349096d3981b727493e22147f934b20f6f125a3eba8f994bec4a9", size = 218971, upload-time = "2025-09-08T23:22:36.805Z" }, + { url = "https://files.pythonhosted.org/packages/ab/49/fa72cebe2fd8a55fbe14956f9970fe8eb1ac59e5df042f603ef7c8ba0adc/cffi-2.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94698a9c5f91f9d138526b48fe26a199609544591f859c870d477351dc7b2414", size = 211972, upload-time = "2025-09-08T23:22:38.436Z" }, + { url = "https://files.pythonhosted.org/packages/0b/28/dd0967a76aab36731b6ebfe64dec4e981aff7e0608f60c2d46b46982607d/cffi-2.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5fed36fccc0612a53f1d4d9a816b50a36702c28a2aa880cb8a122b3466638743", size = 217078, upload-time = "2025-09-08T23:22:39.776Z" }, + { url = "https://files.pythonhosted.org/packages/2b/c0/015b25184413d7ab0a410775fdb4a50fca20f5589b5dab1dbbfa3baad8ce/cffi-2.0.0-cp311-cp311-win32.whl", hash = "sha256:c649e3a33450ec82378822b3dad03cc228b8f5963c0c12fc3b1e0ab940f768a5", size = 172076, upload-time = "2025-09-08T23:22:40.95Z" }, + { url = "https://files.pythonhosted.org/packages/ae/8f/dc5531155e7070361eb1b7e4c1a9d896d0cb21c49f807a6c03fd63fc877e/cffi-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:66f011380d0e49ed280c789fbd08ff0d40968ee7b665575489afa95c98196ab5", size = 182820, upload-time = "2025-09-08T23:22:42.463Z" }, + { url = "https://files.pythonhosted.org/packages/95/5c/1b493356429f9aecfd56bc171285a4c4ac8697f76e9bbbbb105e537853a1/cffi-2.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:c6638687455baf640e37344fe26d37c404db8b80d037c3d29f58fe8d1c3b194d", size = 177635, upload-time = "2025-09-08T23:22:43.623Z" }, + { url = "https://files.pythonhosted.org/packages/ea/47/4f61023ea636104d4f16ab488e268b93008c3d0bb76893b1b31db1f96802/cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d", size = 185271, upload-time = "2025-09-08T23:22:44.795Z" }, + { url = "https://files.pythonhosted.org/packages/df/a2/781b623f57358e360d62cdd7a8c681f074a71d445418a776eef0aadb4ab4/cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c", size = 181048, upload-time = "2025-09-08T23:22:45.938Z" }, + { url = "https://files.pythonhosted.org/packages/ff/df/a4f0fbd47331ceeba3d37c2e51e9dfc9722498becbeec2bd8bc856c9538a/cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe", size = 212529, upload-time = "2025-09-08T23:22:47.349Z" }, + { url = "https://files.pythonhosted.org/packages/d5/72/12b5f8d3865bf0f87cf1404d8c374e7487dcf097a1c91c436e72e6badd83/cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062", size = 220097, upload-time = "2025-09-08T23:22:48.677Z" }, + { url = "https://files.pythonhosted.org/packages/c2/95/7a135d52a50dfa7c882ab0ac17e8dc11cec9d55d2c18dda414c051c5e69e/cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e", size = 207983, upload-time = "2025-09-08T23:22:50.06Z" }, + { url = "https://files.pythonhosted.org/packages/3a/c8/15cb9ada8895957ea171c62dc78ff3e99159ee7adb13c0123c001a2546c1/cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037", size = 206519, upload-time = "2025-09-08T23:22:51.364Z" }, + { url = "https://files.pythonhosted.org/packages/78/2d/7fa73dfa841b5ac06c7b8855cfc18622132e365f5b81d02230333ff26e9e/cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba", size = 219572, upload-time = "2025-09-08T23:22:52.902Z" }, + { url = "https://files.pythonhosted.org/packages/07/e0/267e57e387b4ca276b90f0434ff88b2c2241ad72b16d31836adddfd6031b/cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94", size = 222963, upload-time = "2025-09-08T23:22:54.518Z" }, + { url = "https://files.pythonhosted.org/packages/b6/75/1f2747525e06f53efbd878f4d03bac5b859cbc11c633d0fb81432d98a795/cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187", size = 221361, upload-time = "2025-09-08T23:22:55.867Z" }, + { url = "https://files.pythonhosted.org/packages/7b/2b/2b6435f76bfeb6bbf055596976da087377ede68df465419d192acf00c437/cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18", size = 172932, upload-time = "2025-09-08T23:22:57.188Z" }, + { url = "https://files.pythonhosted.org/packages/f8/ed/13bd4418627013bec4ed6e54283b1959cf6db888048c7cf4b4c3b5b36002/cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5", size = 183557, upload-time = "2025-09-08T23:22:58.351Z" }, + { url = "https://files.pythonhosted.org/packages/95/31/9f7f93ad2f8eff1dbc1c3656d7ca5bfd8fb52c9d786b4dcf19b2d02217fa/cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6", size = 177762, upload-time = "2025-09-08T23:22:59.668Z" }, + { url = "https://files.pythonhosted.org/packages/4b/8d/a0a47a0c9e413a658623d014e91e74a50cdd2c423f7ccfd44086ef767f90/cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb", size = 185230, upload-time = "2025-09-08T23:23:00.879Z" }, + { url = "https://files.pythonhosted.org/packages/4a/d2/a6c0296814556c68ee32009d9c2ad4f85f2707cdecfd7727951ec228005d/cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca", size = 181043, upload-time = "2025-09-08T23:23:02.231Z" }, + { url = "https://files.pythonhosted.org/packages/b0/1e/d22cc63332bd59b06481ceaac49d6c507598642e2230f201649058a7e704/cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b", size = 212446, upload-time = "2025-09-08T23:23:03.472Z" }, + { url = "https://files.pythonhosted.org/packages/a9/f5/a2c23eb03b61a0b8747f211eb716446c826ad66818ddc7810cc2cc19b3f2/cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b", size = 220101, upload-time = "2025-09-08T23:23:04.792Z" }, + { url = "https://files.pythonhosted.org/packages/f2/7f/e6647792fc5850d634695bc0e6ab4111ae88e89981d35ac269956605feba/cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2", size = 207948, upload-time = "2025-09-08T23:23:06.127Z" }, + { url = "https://files.pythonhosted.org/packages/cb/1e/a5a1bd6f1fb30f22573f76533de12a00bf274abcdc55c8edab639078abb6/cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3", size = 206422, upload-time = "2025-09-08T23:23:07.753Z" }, + { url = "https://files.pythonhosted.org/packages/98/df/0a1755e750013a2081e863e7cd37e0cdd02664372c754e5560099eb7aa44/cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26", size = 219499, upload-time = "2025-09-08T23:23:09.648Z" }, + { url = "https://files.pythonhosted.org/packages/50/e1/a969e687fcf9ea58e6e2a928ad5e2dd88cc12f6f0ab477e9971f2309b57c/cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c", size = 222928, upload-time = "2025-09-08T23:23:10.928Z" }, + { url = "https://files.pythonhosted.org/packages/36/54/0362578dd2c9e557a28ac77698ed67323ed5b9775ca9d3fe73fe191bb5d8/cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b", size = 221302, upload-time = "2025-09-08T23:23:12.42Z" }, + { url = "https://files.pythonhosted.org/packages/eb/6d/bf9bda840d5f1dfdbf0feca87fbdb64a918a69bca42cfa0ba7b137c48cb8/cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27", size = 172909, upload-time = "2025-09-08T23:23:14.32Z" }, + { url = "https://files.pythonhosted.org/packages/37/18/6519e1ee6f5a1e579e04b9ddb6f1676c17368a7aba48299c3759bbc3c8b3/cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75", size = 183402, upload-time = "2025-09-08T23:23:15.535Z" }, + { url = "https://files.pythonhosted.org/packages/cb/0e/02ceeec9a7d6ee63bb596121c2c8e9b3a9e150936f4fbef6ca1943e6137c/cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91", size = 177780, upload-time = "2025-09-08T23:23:16.761Z" }, + { url = "https://files.pythonhosted.org/packages/92/c4/3ce07396253a83250ee98564f8d7e9789fab8e58858f35d07a9a2c78de9f/cffi-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fc33c5141b55ed366cfaad382df24fe7dcbc686de5be719b207bb248e3053dc5", size = 185320, upload-time = "2025-09-08T23:23:18.087Z" }, + { url = "https://files.pythonhosted.org/packages/59/dd/27e9fa567a23931c838c6b02d0764611c62290062a6d4e8ff7863daf9730/cffi-2.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c654de545946e0db659b3400168c9ad31b5d29593291482c43e3564effbcee13", size = 181487, upload-time = "2025-09-08T23:23:19.622Z" }, + { url = "https://files.pythonhosted.org/packages/d6/43/0e822876f87ea8a4ef95442c3d766a06a51fc5298823f884ef87aaad168c/cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b", size = 220049, upload-time = "2025-09-08T23:23:20.853Z" }, + { url = "https://files.pythonhosted.org/packages/b4/89/76799151d9c2d2d1ead63c2429da9ea9d7aac304603de0c6e8764e6e8e70/cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c", size = 207793, upload-time = "2025-09-08T23:23:22.08Z" }, + { url = "https://files.pythonhosted.org/packages/bb/dd/3465b14bb9e24ee24cb88c9e3730f6de63111fffe513492bf8c808a3547e/cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef", size = 206300, upload-time = "2025-09-08T23:23:23.314Z" }, + { url = "https://files.pythonhosted.org/packages/47/d9/d83e293854571c877a92da46fdec39158f8d7e68da75bf73581225d28e90/cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775", size = 219244, upload-time = "2025-09-08T23:23:24.541Z" }, + { url = "https://files.pythonhosted.org/packages/2b/0f/1f177e3683aead2bb00f7679a16451d302c436b5cbf2505f0ea8146ef59e/cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205", size = 222828, upload-time = "2025-09-08T23:23:26.143Z" }, + { url = "https://files.pythonhosted.org/packages/c6/0f/cafacebd4b040e3119dcb32fed8bdef8dfe94da653155f9d0b9dc660166e/cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1", size = 220926, upload-time = "2025-09-08T23:23:27.873Z" }, + { url = "https://files.pythonhosted.org/packages/3e/aa/df335faa45b395396fcbc03de2dfcab242cd61a9900e914fe682a59170b1/cffi-2.0.0-cp314-cp314-win32.whl", hash = "sha256:087067fa8953339c723661eda6b54bc98c5625757ea62e95eb4898ad5e776e9f", size = 175328, upload-time = "2025-09-08T23:23:44.61Z" }, + { url = "https://files.pythonhosted.org/packages/bb/92/882c2d30831744296ce713f0feb4c1cd30f346ef747b530b5318715cc367/cffi-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:203a48d1fb583fc7d78a4c6655692963b860a417c0528492a6bc21f1aaefab25", size = 185650, upload-time = "2025-09-08T23:23:45.848Z" }, + { url = "https://files.pythonhosted.org/packages/9f/2c/98ece204b9d35a7366b5b2c6539c350313ca13932143e79dc133ba757104/cffi-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:dbd5c7a25a7cb98f5ca55d258b103a2054f859a46ae11aaf23134f9cc0d356ad", size = 180687, upload-time = "2025-09-08T23:23:47.105Z" }, + { url = "https://files.pythonhosted.org/packages/3e/61/c768e4d548bfa607abcda77423448df8c471f25dbe64fb2ef6d555eae006/cffi-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9a67fc9e8eb39039280526379fb3a70023d77caec1852002b4da7e8b270c4dd9", size = 188773, upload-time = "2025-09-08T23:23:29.347Z" }, + { url = "https://files.pythonhosted.org/packages/2c/ea/5f76bce7cf6fcd0ab1a1058b5af899bfbef198bea4d5686da88471ea0336/cffi-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7a66c7204d8869299919db4d5069a82f1561581af12b11b3c9f48c584eb8743d", size = 185013, upload-time = "2025-09-08T23:23:30.63Z" }, + { url = "https://files.pythonhosted.org/packages/be/b4/c56878d0d1755cf9caa54ba71e5d049479c52f9e4afc230f06822162ab2f/cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c", size = 221593, upload-time = "2025-09-08T23:23:31.91Z" }, + { url = "https://files.pythonhosted.org/packages/e0/0d/eb704606dfe8033e7128df5e90fee946bbcb64a04fcdaa97321309004000/cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8", size = 209354, upload-time = "2025-09-08T23:23:33.214Z" }, + { url = "https://files.pythonhosted.org/packages/d8/19/3c435d727b368ca475fb8742ab97c9cb13a0de600ce86f62eab7fa3eea60/cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc", size = 208480, upload-time = "2025-09-08T23:23:34.495Z" }, + { url = "https://files.pythonhosted.org/packages/d0/44/681604464ed9541673e486521497406fadcc15b5217c3e326b061696899a/cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592", size = 221584, upload-time = "2025-09-08T23:23:36.096Z" }, + { url = "https://files.pythonhosted.org/packages/25/8e/342a504ff018a2825d395d44d63a767dd8ebc927ebda557fecdaca3ac33a/cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512", size = 224443, upload-time = "2025-09-08T23:23:37.328Z" }, + { url = "https://files.pythonhosted.org/packages/e1/5e/b666bacbbc60fbf415ba9988324a132c9a7a0448a9a8f125074671c0f2c3/cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4", size = 223437, upload-time = "2025-09-08T23:23:38.945Z" }, + { url = "https://files.pythonhosted.org/packages/a0/1d/ec1a60bd1a10daa292d3cd6bb0b359a81607154fb8165f3ec95fe003b85c/cffi-2.0.0-cp314-cp314t-win32.whl", hash = "sha256:1fc9ea04857caf665289b7a75923f2c6ed559b8298a1b8c49e59f7dd95c8481e", size = 180487, upload-time = "2025-09-08T23:23:40.423Z" }, + { url = "https://files.pythonhosted.org/packages/bf/41/4c1168c74fac325c0c8156f04b6749c8b6a8f405bbf91413ba088359f60d/cffi-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:d68b6cef7827e8641e8ef16f4494edda8b36104d79773a334beaa1e3521430f6", size = 191726, upload-time = "2025-09-08T23:23:41.742Z" }, + { url = "https://files.pythonhosted.org/packages/ae/3a/dbeec9d1ee0844c679f6bb5d6ad4e9f198b1224f4e7a32825f47f6192b0c/cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9", size = 184195, upload-time = "2025-09-08T23:23:43.004Z" }, +] + [[package]] name = "charset-normalizer" version = "3.4.3" @@ -299,6 +375,68 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, ] +[[package]] +name = "cryptography" +version = "46.0.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9f/33/c00162f49c0e2fe8064a62cb92b93e50c74a72bc370ab92f86112b33ff62/cryptography-46.0.3.tar.gz", hash = "sha256:a8b17438104fed022ce745b362294d9ce35b4c2e45c1d958ad4a4b019285f4a1", size = 749258, upload-time = "2025-10-15T23:18:31.74Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1d/42/9c391dd801d6cf0d561b5890549d4b27bafcc53b39c31a817e69d87c625b/cryptography-46.0.3-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:109d4ddfadf17e8e7779c39f9b18111a09efb969a301a31e987416a0191ed93a", size = 7225004, upload-time = "2025-10-15T23:16:52.239Z" }, + { url = "https://files.pythonhosted.org/packages/1c/67/38769ca6b65f07461eb200e85fc1639b438bdc667be02cf7f2cd6a64601c/cryptography-46.0.3-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:09859af8466b69bc3c27bdf4f5d84a665e0f7ab5088412e9e2ec49758eca5cbc", size = 4296667, upload-time = "2025-10-15T23:16:54.369Z" }, + { url = "https://files.pythonhosted.org/packages/5c/49/498c86566a1d80e978b42f0d702795f69887005548c041636df6ae1ca64c/cryptography-46.0.3-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:01ca9ff2885f3acc98c29f1860552e37f6d7c7d013d7334ff2a9de43a449315d", size = 4450807, upload-time = "2025-10-15T23:16:56.414Z" }, + { url = "https://files.pythonhosted.org/packages/4b/0a/863a3604112174c8624a2ac3c038662d9e59970c7f926acdcfaed8d61142/cryptography-46.0.3-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6eae65d4c3d33da080cff9c4ab1f711b15c1d9760809dad6ea763f3812d254cb", size = 4299615, upload-time = "2025-10-15T23:16:58.442Z" }, + { url = "https://files.pythonhosted.org/packages/64/02/b73a533f6b64a69f3cd3872acb6ebc12aef924d8d103133bb3ea750dc703/cryptography-46.0.3-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5bf0ed4490068a2e72ac03d786693adeb909981cc596425d09032d372bcc849", size = 4016800, upload-time = "2025-10-15T23:17:00.378Z" }, + { url = "https://files.pythonhosted.org/packages/25/d5/16e41afbfa450cde85a3b7ec599bebefaef16b5c6ba4ec49a3532336ed72/cryptography-46.0.3-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:5ecfccd2329e37e9b7112a888e76d9feca2347f12f37918facbb893d7bb88ee8", size = 4984707, upload-time = "2025-10-15T23:17:01.98Z" }, + { url = "https://files.pythonhosted.org/packages/c9/56/e7e69b427c3878352c2fb9b450bd0e19ed552753491d39d7d0a2f5226d41/cryptography-46.0.3-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a2c0cd47381a3229c403062f764160d57d4d175e022c1df84e168c6251a22eec", size = 4482541, upload-time = "2025-10-15T23:17:04.078Z" }, + { url = "https://files.pythonhosted.org/packages/78/f6/50736d40d97e8483172f1bb6e698895b92a223dba513b0ca6f06b2365339/cryptography-46.0.3-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:549e234ff32571b1f4076ac269fcce7a808d3bf98b76c8dd560e42dbc66d7d91", size = 4299464, upload-time = "2025-10-15T23:17:05.483Z" }, + { url = "https://files.pythonhosted.org/packages/00/de/d8e26b1a855f19d9994a19c702fa2e93b0456beccbcfe437eda00e0701f2/cryptography-46.0.3-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:c0a7bb1a68a5d3471880e264621346c48665b3bf1c3759d682fc0864c540bd9e", size = 4950838, upload-time = "2025-10-15T23:17:07.425Z" }, + { url = "https://files.pythonhosted.org/packages/8f/29/798fc4ec461a1c9e9f735f2fc58741b0daae30688f41b2497dcbc9ed1355/cryptography-46.0.3-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:10b01676fc208c3e6feeb25a8b83d81767e8059e1fe86e1dc62d10a3018fa926", size = 4481596, upload-time = "2025-10-15T23:17:09.343Z" }, + { url = "https://files.pythonhosted.org/packages/15/8d/03cd48b20a573adfff7652b76271078e3045b9f49387920e7f1f631d125e/cryptography-46.0.3-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0abf1ffd6e57c67e92af68330d05760b7b7efb243aab8377e583284dbab72c71", size = 4426782, upload-time = "2025-10-15T23:17:11.22Z" }, + { url = "https://files.pythonhosted.org/packages/fa/b1/ebacbfe53317d55cf33165bda24c86523497a6881f339f9aae5c2e13e57b/cryptography-46.0.3-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a04bee9ab6a4da801eb9b51f1b708a1b5b5c9eb48c03f74198464c66f0d344ac", size = 4698381, upload-time = "2025-10-15T23:17:12.829Z" }, + { url = "https://files.pythonhosted.org/packages/96/92/8a6a9525893325fc057a01f654d7efc2c64b9de90413adcf605a85744ff4/cryptography-46.0.3-cp311-abi3-win32.whl", hash = "sha256:f260d0d41e9b4da1ed1e0f1ce571f97fe370b152ab18778e9e8f67d6af432018", size = 3055988, upload-time = "2025-10-15T23:17:14.65Z" }, + { url = "https://files.pythonhosted.org/packages/7e/bf/80fbf45253ea585a1e492a6a17efcb93467701fa79e71550a430c5e60df0/cryptography-46.0.3-cp311-abi3-win_amd64.whl", hash = "sha256:a9a3008438615669153eb86b26b61e09993921ebdd75385ddd748702c5adfddb", size = 3514451, upload-time = "2025-10-15T23:17:16.142Z" }, + { url = "https://files.pythonhosted.org/packages/2e/af/9b302da4c87b0beb9db4e756386a7c6c5b8003cd0e742277888d352ae91d/cryptography-46.0.3-cp311-abi3-win_arm64.whl", hash = "sha256:5d7f93296ee28f68447397bf5198428c9aeeab45705a55d53a6343455dcb2c3c", size = 2928007, upload-time = "2025-10-15T23:17:18.04Z" }, + { url = "https://files.pythonhosted.org/packages/f5/e2/a510aa736755bffa9d2f75029c229111a1d02f8ecd5de03078f4c18d91a3/cryptography-46.0.3-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:00a5e7e87938e5ff9ff5447ab086a5706a957137e6e433841e9d24f38a065217", size = 7158012, upload-time = "2025-10-15T23:17:19.982Z" }, + { url = "https://files.pythonhosted.org/packages/73/dc/9aa866fbdbb95b02e7f9d086f1fccfeebf8953509b87e3f28fff927ff8a0/cryptography-46.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c8daeb2d2174beb4575b77482320303f3d39b8e81153da4f0fb08eb5fe86a6c5", size = 4288728, upload-time = "2025-10-15T23:17:21.527Z" }, + { url = "https://files.pythonhosted.org/packages/c5/fd/bc1daf8230eaa075184cbbf5f8cd00ba9db4fd32d63fb83da4671b72ed8a/cryptography-46.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:39b6755623145ad5eff1dab323f4eae2a32a77a7abef2c5089a04a3d04366715", size = 4435078, upload-time = "2025-10-15T23:17:23.042Z" }, + { url = "https://files.pythonhosted.org/packages/82/98/d3bd5407ce4c60017f8ff9e63ffee4200ab3e23fe05b765cab805a7db008/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:db391fa7c66df6762ee3f00c95a89e6d428f4d60e7abc8328f4fe155b5ac6e54", size = 4293460, upload-time = "2025-10-15T23:17:24.885Z" }, + { url = "https://files.pythonhosted.org/packages/26/e9/e23e7900983c2b8af7a08098db406cf989d7f09caea7897e347598d4cd5b/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:78a97cf6a8839a48c49271cdcbd5cf37ca2c1d6b7fdd86cc864f302b5e9bf459", size = 3995237, upload-time = "2025-10-15T23:17:26.449Z" }, + { url = "https://files.pythonhosted.org/packages/91/15/af68c509d4a138cfe299d0d7ddb14afba15233223ebd933b4bbdbc7155d3/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:dfb781ff7eaa91a6f7fd41776ec37c5853c795d3b358d4896fdbb5df168af422", size = 4967344, upload-time = "2025-10-15T23:17:28.06Z" }, + { url = "https://files.pythonhosted.org/packages/ca/e3/8643d077c53868b681af077edf6b3cb58288b5423610f21c62aadcbe99f4/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:6f61efb26e76c45c4a227835ddeae96d83624fb0d29eb5df5b96e14ed1a0afb7", size = 4466564, upload-time = "2025-10-15T23:17:29.665Z" }, + { url = "https://files.pythonhosted.org/packages/0e/43/c1e8726fa59c236ff477ff2b5dc071e54b21e5a1e51aa2cee1676f1c986f/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:23b1a8f26e43f47ceb6d6a43115f33a5a37d57df4ea0ca295b780ae8546e8044", size = 4292415, upload-time = "2025-10-15T23:17:31.686Z" }, + { url = "https://files.pythonhosted.org/packages/42/f9/2f8fefdb1aee8a8e3256a0568cffc4e6d517b256a2fe97a029b3f1b9fe7e/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:b419ae593c86b87014b9be7396b385491ad7f320bde96826d0dd174459e54665", size = 4931457, upload-time = "2025-10-15T23:17:33.478Z" }, + { url = "https://files.pythonhosted.org/packages/79/30/9b54127a9a778ccd6d27c3da7563e9f2d341826075ceab89ae3b41bf5be2/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:50fc3343ac490c6b08c0cf0d704e881d0d660be923fd3076db3e932007e726e3", size = 4466074, upload-time = "2025-10-15T23:17:35.158Z" }, + { url = "https://files.pythonhosted.org/packages/ac/68/b4f4a10928e26c941b1b6a179143af9f4d27d88fe84a6a3c53592d2e76bf/cryptography-46.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:22d7e97932f511d6b0b04f2bfd818d73dcd5928db509460aaf48384778eb6d20", size = 4420569, upload-time = "2025-10-15T23:17:37.188Z" }, + { url = "https://files.pythonhosted.org/packages/a3/49/3746dab4c0d1979888f125226357d3262a6dd40e114ac29e3d2abdf1ec55/cryptography-46.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d55f3dffadd674514ad19451161118fd010988540cee43d8bc20675e775925de", size = 4681941, upload-time = "2025-10-15T23:17:39.236Z" }, + { url = "https://files.pythonhosted.org/packages/fd/30/27654c1dbaf7e4a3531fa1fc77986d04aefa4d6d78259a62c9dc13d7ad36/cryptography-46.0.3-cp314-cp314t-win32.whl", hash = "sha256:8a6e050cb6164d3f830453754094c086ff2d0b2f3a897a1d9820f6139a1f0914", size = 3022339, upload-time = "2025-10-15T23:17:40.888Z" }, + { url = "https://files.pythonhosted.org/packages/f6/30/640f34ccd4d2a1bc88367b54b926b781b5a018d65f404d409aba76a84b1c/cryptography-46.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:760f83faa07f8b64e9c33fc963d790a2edb24efb479e3520c14a45741cd9b2db", size = 3494315, upload-time = "2025-10-15T23:17:42.769Z" }, + { url = "https://files.pythonhosted.org/packages/ba/8b/88cc7e3bd0a8e7b861f26981f7b820e1f46aa9d26cc482d0feba0ecb4919/cryptography-46.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:516ea134e703e9fe26bcd1277a4b59ad30586ea90c365a87781d7887a646fe21", size = 2919331, upload-time = "2025-10-15T23:17:44.468Z" }, + { url = "https://files.pythonhosted.org/packages/fd/23/45fe7f376a7df8daf6da3556603b36f53475a99ce4faacb6ba2cf3d82021/cryptography-46.0.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:cb3d760a6117f621261d662bccc8ef5bc32ca673e037c83fbe565324f5c46936", size = 7218248, upload-time = "2025-10-15T23:17:46.294Z" }, + { url = "https://files.pythonhosted.org/packages/27/32/b68d27471372737054cbd34c84981f9edbc24fe67ca225d389799614e27f/cryptography-46.0.3-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4b7387121ac7d15e550f5cb4a43aef2559ed759c35df7336c402bb8275ac9683", size = 4294089, upload-time = "2025-10-15T23:17:48.269Z" }, + { url = "https://files.pythonhosted.org/packages/26/42/fa8389d4478368743e24e61eea78846a0006caffaf72ea24a15159215a14/cryptography-46.0.3-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:15ab9b093e8f09daab0f2159bb7e47532596075139dd74365da52ecc9cb46c5d", size = 4440029, upload-time = "2025-10-15T23:17:49.837Z" }, + { url = "https://files.pythonhosted.org/packages/5f/eb/f483db0ec5ac040824f269e93dd2bd8a21ecd1027e77ad7bdf6914f2fd80/cryptography-46.0.3-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:46acf53b40ea38f9c6c229599a4a13f0d46a6c3fa9ef19fc1a124d62e338dfa0", size = 4297222, upload-time = "2025-10-15T23:17:51.357Z" }, + { url = "https://files.pythonhosted.org/packages/fd/cf/da9502c4e1912cb1da3807ea3618a6829bee8207456fbbeebc361ec38ba3/cryptography-46.0.3-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:10ca84c4668d066a9878890047f03546f3ae0a6b8b39b697457b7757aaf18dbc", size = 4012280, upload-time = "2025-10-15T23:17:52.964Z" }, + { url = "https://files.pythonhosted.org/packages/6b/8f/9adb86b93330e0df8b3dcf03eae67c33ba89958fc2e03862ef1ac2b42465/cryptography-46.0.3-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:36e627112085bb3b81b19fed209c05ce2a52ee8b15d161b7c643a7d5a88491f3", size = 4978958, upload-time = "2025-10-15T23:17:54.965Z" }, + { url = "https://files.pythonhosted.org/packages/d1/a0/5fa77988289c34bdb9f913f5606ecc9ada1adb5ae870bd0d1054a7021cc4/cryptography-46.0.3-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1000713389b75c449a6e979ffc7dcc8ac90b437048766cef052d4d30b8220971", size = 4473714, upload-time = "2025-10-15T23:17:56.754Z" }, + { url = "https://files.pythonhosted.org/packages/14/e5/fc82d72a58d41c393697aa18c9abe5ae1214ff6f2a5c18ac470f92777895/cryptography-46.0.3-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:b02cf04496f6576afffef5ddd04a0cb7d49cf6be16a9059d793a30b035f6b6ac", size = 4296970, upload-time = "2025-10-15T23:17:58.588Z" }, + { url = "https://files.pythonhosted.org/packages/78/06/5663ed35438d0b09056973994f1aec467492b33bd31da36e468b01ec1097/cryptography-46.0.3-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:71e842ec9bc7abf543b47cf86b9a743baa95f4677d22baa4c7d5c69e49e9bc04", size = 4940236, upload-time = "2025-10-15T23:18:00.897Z" }, + { url = "https://files.pythonhosted.org/packages/fc/59/873633f3f2dcd8a053b8dd1d38f783043b5fce589c0f6988bf55ef57e43e/cryptography-46.0.3-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:402b58fc32614f00980b66d6e56a5b4118e6cb362ae8f3fda141ba4689bd4506", size = 4472642, upload-time = "2025-10-15T23:18:02.749Z" }, + { url = "https://files.pythonhosted.org/packages/3d/39/8e71f3930e40f6877737d6f69248cf74d4e34b886a3967d32f919cc50d3b/cryptography-46.0.3-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ef639cb3372f69ec44915fafcd6698b6cc78fbe0c2ea41be867f6ed612811963", size = 4423126, upload-time = "2025-10-15T23:18:04.85Z" }, + { url = "https://files.pythonhosted.org/packages/cd/c7/f65027c2810e14c3e7268353b1681932b87e5a48e65505d8cc17c99e36ae/cryptography-46.0.3-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b51b8ca4f1c6453d8829e1eb7299499ca7f313900dd4d89a24b8b87c0a780d4", size = 4686573, upload-time = "2025-10-15T23:18:06.908Z" }, + { url = "https://files.pythonhosted.org/packages/0a/6e/1c8331ddf91ca4730ab3086a0f1be19c65510a33b5a441cb334e7a2d2560/cryptography-46.0.3-cp38-abi3-win32.whl", hash = "sha256:6276eb85ef938dc035d59b87c8a7dc559a232f954962520137529d77b18ff1df", size = 3036695, upload-time = "2025-10-15T23:18:08.672Z" }, + { url = "https://files.pythonhosted.org/packages/90/45/b0d691df20633eff80955a0fc7695ff9051ffce8b69741444bd9ed7bd0db/cryptography-46.0.3-cp38-abi3-win_amd64.whl", hash = "sha256:416260257577718c05135c55958b674000baef9a1c7d9e8f306ec60d71db850f", size = 3501720, upload-time = "2025-10-15T23:18:10.632Z" }, + { url = "https://files.pythonhosted.org/packages/e8/cb/2da4cc83f5edb9c3257d09e1e7ab7b23f049c7962cae8d842bbef0a9cec9/cryptography-46.0.3-cp38-abi3-win_arm64.whl", hash = "sha256:d89c3468de4cdc4f08a57e214384d0471911a3830fcdaf7a8cc587e42a866372", size = 2918740, upload-time = "2025-10-15T23:18:12.277Z" }, + { url = "https://files.pythonhosted.org/packages/06/8a/e60e46adab4362a682cf142c7dcb5bf79b782ab2199b0dcb81f55970807f/cryptography-46.0.3-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7ce938a99998ed3c8aa7e7272dca1a610401ede816d36d0693907d863b10d9ea", size = 3698132, upload-time = "2025-10-15T23:18:17.056Z" }, + { url = "https://files.pythonhosted.org/packages/da/38/f59940ec4ee91e93d3311f7532671a5cef5570eb04a144bf203b58552d11/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:191bb60a7be5e6f54e30ba16fdfae78ad3a342a0599eb4193ba88e3f3d6e185b", size = 4243992, upload-time = "2025-10-15T23:18:18.695Z" }, + { url = "https://files.pythonhosted.org/packages/b0/0c/35b3d92ddebfdfda76bb485738306545817253d0a3ded0bfe80ef8e67aa5/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c70cc23f12726be8f8bc72e41d5065d77e4515efae3690326764ea1b07845cfb", size = 4409944, upload-time = "2025-10-15T23:18:20.597Z" }, + { url = "https://files.pythonhosted.org/packages/99/55/181022996c4063fc0e7666a47049a1ca705abb9c8a13830f074edb347495/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:9394673a9f4de09e28b5356e7fff97d778f8abad85c9d5ac4a4b7e25a0de7717", size = 4242957, upload-time = "2025-10-15T23:18:22.18Z" }, + { url = "https://files.pythonhosted.org/packages/ba/af/72cd6ef29f9c5f731251acadaeb821559fe25f10852f44a63374c9ca08c1/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:94cd0549accc38d1494e1f8de71eca837d0509d0d44bf11d158524b0e12cebf9", size = 4409447, upload-time = "2025-10-15T23:18:24.209Z" }, + { url = "https://files.pythonhosted.org/packages/0d/c3/e90f4a4feae6410f914f8ebac129b9ae7a8c92eb60a638012dde42030a9d/cryptography-46.0.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:6b5063083824e5509fdba180721d55909ffacccc8adbec85268b48439423d78c", size = 3438528, upload-time = "2025-10-15T23:18:26.227Z" }, +] + [[package]] name = "docker" version = "7.1.0" @@ -463,6 +601,19 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, ] +[[package]] +name = "httpcore" +version = "1.0.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" }, +] + [[package]] name = "httptools" version = "0.6.4" @@ -492,6 +643,30 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/4d/dc/7decab5c404d1d2cdc1bb330b1bf70e83d6af0396fd4fc76fc60c0d522bf/httptools-0.6.4-cp313-cp313-win_amd64.whl", hash = "sha256:28908df1b9bb8187393d5b5db91435ccc9c8e891657f9cbb42a2541b44c82fc8", size = 87682, upload-time = "2024-10-16T19:44:46.46Z" }, ] +[[package]] +name = "httpx" +version = "0.28.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "certifi" }, + { name = "httpcore" }, + { name = "idna" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, +] + +[[package]] +name = "httpx-sse" +version = "0.4.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0f/4c/751061ffa58615a32c31b2d82e8482be8dd4a89154f003147acee90f2be9/httpx_sse-0.4.3.tar.gz", hash = "sha256:9b1ed0127459a66014aec3c56bebd93da3c1bc8bb6618c8082039a44889a755d", size = 15943, upload-time = "2025-10-10T21:48:22.271Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d2/fd/6668e5aec43ab844de6fc74927e155a3b37bf40d7c3790e49fc0406b6578/httpx_sse-0.4.3-py3-none-any.whl", hash = "sha256:0ac1c9fe3c0afad2e0ebb25a934a59f4c7823b60792691f779fad2c5568830fc", size = 8960, upload-time = "2025-10-10T21:48:21.158Z" }, +] + [[package]] name = "idna" version = "3.10" @@ -510,6 +685,33 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" }, ] +[[package]] +name = "jsonschema" +version = "4.26.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "jsonschema-specifications" }, + { name = "referencing" }, + { name = "rpds-py" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b3/fc/e067678238fa451312d4c62bf6e6cf5ec56375422aee02f9cb5f909b3047/jsonschema-4.26.0.tar.gz", hash = "sha256:0c26707e2efad8aa1bfc5b7ce170f3fccc2e4918ff85989ba9ffa9facb2be326", size = 366583, upload-time = "2026-01-07T13:41:07.246Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/69/90/f63fb5873511e014207a475e2bb4e8b2e570d655b00ac19a9a0ca0a385ee/jsonschema-4.26.0-py3-none-any.whl", hash = "sha256:d489f15263b8d200f8387e64b4c3a75f06629559fb73deb8fdfb525f2dab50ce", size = 90630, upload-time = "2026-01-07T13:41:05.306Z" }, +] + +[[package]] +name = "jsonschema-specifications" +version = "2025.9.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "referencing" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/19/74/a633ee74eb36c44aa6d1095e7cc5569bebf04342ee146178e2d36600708b/jsonschema_specifications-2025.9.1.tar.gz", hash = "sha256:b540987f239e745613c7a9176f3edb72b832a4ac465cf02712288397832b5e8d", size = 32855, upload-time = "2025-09-08T01:34:59.186Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/41/45/1a4ed80516f02155c51f51e8cedb3c1902296743db0bbc66608a0db2814f/jsonschema_specifications-2025.9.1-py3-none-any.whl", hash = "sha256:98802fee3a11ee76ecaca44429fda8a41bff98b00a0f2838151b113f210cc6fe", size = 18437, upload-time = "2025-09-08T01:34:57.871Z" }, +] + [[package]] name = "kubernetes-asyncio" version = "34.3.3" @@ -539,6 +741,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/87/fb/99f81ac72ae23375f22b7afdb7642aba97c00a713c217124420147681a2f/mako-1.3.10-py3-none-any.whl", hash = "sha256:baef24a52fc4fc514a0887ac600f9f1cff3d82c61d4d700a1fa84d597b88db59", size = 78509, upload-time = "2025-04-10T12:50:53.297Z" }, ] +[[package]] +name = "markdown-it-py" +version = "4.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mdurl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5b/f5/4ec618ed16cc4f8fb3b701563655a69816155e79e24a17b651541804721d/markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3", size = 73070, upload-time = "2025-08-11T12:57:52.854Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321, upload-time = "2025-08-11T12:57:51.923Z" }, +] + [[package]] name = "markupsafe" version = "3.0.3" @@ -613,6 +827,46 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/70/bc/6f1c2f612465f5fa89b95bead1f44dcb607670fd42891d8fdcd5d039f4f4/markupsafe-3.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:32001d6a8fc98c8cb5c947787c5d08b0a50663d139f1305bac5885d98d9b40fa", size = 14146, upload-time = "2025-09-27T18:37:28.327Z" }, ] +[[package]] +name = "mcp" +version = "1.26.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "httpx" }, + { name = "httpx-sse" }, + { name = "jsonschema" }, + { name = "pydantic" }, + { name = "pydantic-settings" }, + { name = "pyjwt", extra = ["crypto"] }, + { name = "python-multipart" }, + { name = "pywin32", marker = "sys_platform == 'win32'" }, + { name = "sse-starlette" }, + { name = "starlette" }, + { name = "typing-extensions" }, + { name = "typing-inspection" }, + { name = "uvicorn", marker = "sys_platform != 'emscripten'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fc/6d/62e76bbb8144d6ed86e202b5edd8a4cb631e7c8130f3f4893c3f90262b10/mcp-1.26.0.tar.gz", hash = "sha256:db6e2ef491eecc1a0d93711a76f28dec2e05999f93afd48795da1c1137142c66", size = 608005, upload-time = "2026-01-24T19:40:32.468Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fd/d9/eaa1f80170d2b7c5ba23f3b59f766f3a0bb41155fbc32a69adfa1adaaef9/mcp-1.26.0-py3-none-any.whl", hash = "sha256:904a21c33c25aa98ddbeb47273033c435e595bbacfdb177f4bd87f6dceebe1ca", size = 233615, upload-time = "2026-01-24T19:40:30.652Z" }, +] + +[package.optional-dependencies] +cli = [ + { name = "python-dotenv" }, + { name = "typer" }, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload-time = "2022-08-14T12:40:10.846Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" }, +] + [[package]] name = "multidict" version = "6.6.4" @@ -798,6 +1052,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/cc/35/cc0aaecf278bb4575b8555f2b137de5ab821595ddae9da9d3cd1da4072c7/propcache-0.3.2-py3-none-any.whl", hash = "sha256:98f1ec44fb675f5052cccc8e609c46ed23a35a1cfd18545ad4e29002d858a43f", size = 12663, upload-time = "2025-06-09T22:56:04.484Z" }, ] +[[package]] +name = "pycparser" +version = "3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1b/7d/92392ff7815c21062bea51aa7b87d45576f649f16458d78b7cf94b9ab2e6/pycparser-3.0.tar.gz", hash = "sha256:600f49d217304a5902ac3c37e1281c9fe94e4d0489de643a9504c5cdfdfc6b29", size = 103492, upload-time = "2026-01-21T14:26:51.89Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0c/c3/44f3fbbfa403ea2a7c779186dc20772604442dde72947e7d01069cbe98e3/pycparser-3.0-py3-none-any.whl", hash = "sha256:b727414169a36b7d524c1c3e31839a521725078d7b2ff038656844266160a992", size = 48172, upload-time = "2026-01-21T14:26:50.693Z" }, +] + [[package]] name = "pydantic" version = "2.11.9" @@ -901,6 +1164,20 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, ] +[[package]] +name = "pyjwt" +version = "2.10.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e7/46/bd74733ff231675599650d3e47f361794b22ef3e3770998dda30d3b63726/pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953", size = 87785, upload-time = "2024-11-28T03:43:29.933Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/61/ad/689f02752eeec26aed679477e80e632ef1b682313be70793d798c1d5fc8f/PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb", size = 22997, upload-time = "2024-11-28T03:43:27.893Z" }, +] + +[package.optional-dependencies] +crypto = [ + { name = "cryptography" }, +] + [[package]] name = "pytest" version = "8.4.2" @@ -1034,6 +1311,20 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341, upload-time = "2025-09-25T21:32:56.828Z" }, ] +[[package]] +name = "referencing" +version = "0.37.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "rpds-py" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/22/f5/df4e9027acead3ecc63e50fe1e36aca1523e1719559c499951bb4b53188f/referencing-0.37.0.tar.gz", hash = "sha256:44aefc3142c5b842538163acb373e24cce6632bd54bdb01b21ad5863489f50d8", size = 78036, upload-time = "2025-10-13T15:30:48.871Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2c/58/ca301544e1fa93ed4f80d724bf5b194f6e4b945841c5bfd555878eea9fcb/referencing-0.37.0-py3-none-any.whl", hash = "sha256:381329a9f99628c9069361716891d34ad94af76e461dcb0335825aecc7692231", size = 26766, upload-time = "2025-10-13T15:30:47.625Z" }, +] + [[package]] name = "requests" version = "2.32.5" @@ -1049,6 +1340,136 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" }, ] +[[package]] +name = "rich" +version = "14.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markdown-it-py" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a1/84/4831f881aa6ff3c976f6d6809b58cdfa350593ffc0dc3c58f5f6586780fb/rich-14.3.1.tar.gz", hash = "sha256:b8c5f568a3a749f9290ec6bddedf835cec33696bfc1e48bcfecb276c7386e4b8", size = 230125, upload-time = "2026-01-24T21:40:44.847Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/87/2a/a1810c8627b9ec8c57ec5ec325d306701ae7be50235e8fd81266e002a3cc/rich-14.3.1-py3-none-any.whl", hash = "sha256:da750b1aebbff0b372557426fb3f35ba56de8ef954b3190315eb64076d6fb54e", size = 309952, upload-time = "2026-01-24T21:40:42.969Z" }, +] + +[[package]] +name = "rpds-py" +version = "0.30.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/20/af/3f2f423103f1113b36230496629986e0ef7e199d2aa8392452b484b38ced/rpds_py-0.30.0.tar.gz", hash = "sha256:dd8ff7cf90014af0c0f787eea34794ebf6415242ee1d6fa91eaba725cc441e84", size = 69469, upload-time = "2025-11-30T20:24:38.837Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4d/6e/f964e88b3d2abee2a82c1ac8366da848fce1c6d834dc2132c3fda3970290/rpds_py-0.30.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:a2bffea6a4ca9f01b3f8e548302470306689684e61602aa3d141e34da06cf425", size = 370157, upload-time = "2025-11-30T20:21:53.789Z" }, + { url = "https://files.pythonhosted.org/packages/94/ba/24e5ebb7c1c82e74c4e4f33b2112a5573ddc703915b13a073737b59b86e0/rpds_py-0.30.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dc4f992dfe1e2bc3ebc7444f6c7051b4bc13cd8e33e43511e8ffd13bf407010d", size = 359676, upload-time = "2025-11-30T20:21:55.475Z" }, + { url = "https://files.pythonhosted.org/packages/84/86/04dbba1b087227747d64d80c3b74df946b986c57af0a9f0c98726d4d7a3b/rpds_py-0.30.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:422c3cb9856d80b09d30d2eb255d0754b23e090034e1deb4083f8004bd0761e4", size = 389938, upload-time = "2025-11-30T20:21:57.079Z" }, + { url = "https://files.pythonhosted.org/packages/42/bb/1463f0b1722b7f45431bdd468301991d1328b16cffe0b1c2918eba2c4eee/rpds_py-0.30.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:07ae8a593e1c3c6b82ca3292efbe73c30b61332fd612e05abee07c79359f292f", size = 402932, upload-time = "2025-11-30T20:21:58.47Z" }, + { url = "https://files.pythonhosted.org/packages/99/ee/2520700a5c1f2d76631f948b0736cdf9b0acb25abd0ca8e889b5c62ac2e3/rpds_py-0.30.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12f90dd7557b6bd57f40abe7747e81e0c0b119bef015ea7726e69fe550e394a4", size = 525830, upload-time = "2025-11-30T20:21:59.699Z" }, + { url = "https://files.pythonhosted.org/packages/e0/ad/bd0331f740f5705cc555a5e17fdf334671262160270962e69a2bdef3bf76/rpds_py-0.30.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:99b47d6ad9a6da00bec6aabe5a6279ecd3c06a329d4aa4771034a21e335c3a97", size = 412033, upload-time = "2025-11-30T20:22:00.991Z" }, + { url = "https://files.pythonhosted.org/packages/f8/1e/372195d326549bb51f0ba0f2ecb9874579906b97e08880e7a65c3bef1a99/rpds_py-0.30.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33f559f3104504506a44bb666b93a33f5d33133765b0c216a5bf2f1e1503af89", size = 390828, upload-time = "2025-11-30T20:22:02.723Z" }, + { url = "https://files.pythonhosted.org/packages/ab/2b/d88bb33294e3e0c76bc8f351a3721212713629ffca1700fa94979cb3eae8/rpds_py-0.30.0-cp311-cp311-manylinux_2_31_riscv64.whl", hash = "sha256:946fe926af6e44f3697abbc305ea168c2c31d3e3ef1058cf68f379bf0335a78d", size = 404683, upload-time = "2025-11-30T20:22:04.367Z" }, + { url = "https://files.pythonhosted.org/packages/50/32/c759a8d42bcb5289c1fac697cd92f6fe01a018dd937e62ae77e0e7f15702/rpds_py-0.30.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:495aeca4b93d465efde585977365187149e75383ad2684f81519f504f5c13038", size = 421583, upload-time = "2025-11-30T20:22:05.814Z" }, + { url = "https://files.pythonhosted.org/packages/2b/81/e729761dbd55ddf5d84ec4ff1f47857f4374b0f19bdabfcf929164da3e24/rpds_py-0.30.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d9a0ca5da0386dee0655b4ccdf46119df60e0f10da268d04fe7cc87886872ba7", size = 572496, upload-time = "2025-11-30T20:22:07.713Z" }, + { url = "https://files.pythonhosted.org/packages/14/f6/69066a924c3557c9c30baa6ec3a0aa07526305684c6f86c696b08860726c/rpds_py-0.30.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8d6d1cc13664ec13c1b84241204ff3b12f9bb82464b8ad6e7a5d3486975c2eed", size = 598669, upload-time = "2025-11-30T20:22:09.312Z" }, + { url = "https://files.pythonhosted.org/packages/5f/48/905896b1eb8a05630d20333d1d8ffd162394127b74ce0b0784ae04498d32/rpds_py-0.30.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3896fa1be39912cf0757753826bc8bdc8ca331a28a7c4ae46b7a21280b06bb85", size = 561011, upload-time = "2025-11-30T20:22:11.309Z" }, + { url = "https://files.pythonhosted.org/packages/22/16/cd3027c7e279d22e5eb431dd3c0fbc677bed58797fe7581e148f3f68818b/rpds_py-0.30.0-cp311-cp311-win32.whl", hash = "sha256:55f66022632205940f1827effeff17c4fa7ae1953d2b74a8581baaefb7d16f8c", size = 221406, upload-time = "2025-11-30T20:22:13.101Z" }, + { url = "https://files.pythonhosted.org/packages/fa/5b/e7b7aa136f28462b344e652ee010d4de26ee9fd16f1bfd5811f5153ccf89/rpds_py-0.30.0-cp311-cp311-win_amd64.whl", hash = "sha256:a51033ff701fca756439d641c0ad09a41d9242fa69121c7d8769604a0a629825", size = 236024, upload-time = "2025-11-30T20:22:14.853Z" }, + { url = "https://files.pythonhosted.org/packages/14/a6/364bba985e4c13658edb156640608f2c9e1d3ea3c81b27aa9d889fff0e31/rpds_py-0.30.0-cp311-cp311-win_arm64.whl", hash = "sha256:47b0ef6231c58f506ef0b74d44e330405caa8428e770fec25329ed2cb971a229", size = 229069, upload-time = "2025-11-30T20:22:16.577Z" }, + { url = "https://files.pythonhosted.org/packages/03/e7/98a2f4ac921d82f33e03f3835f5bf3a4a40aa1bfdc57975e74a97b2b4bdd/rpds_py-0.30.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a161f20d9a43006833cd7068375a94d035714d73a172b681d8881820600abfad", size = 375086, upload-time = "2025-11-30T20:22:17.93Z" }, + { url = "https://files.pythonhosted.org/packages/4d/a1/bca7fd3d452b272e13335db8d6b0b3ecde0f90ad6f16f3328c6fb150c889/rpds_py-0.30.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6abc8880d9d036ecaafe709079969f56e876fcf107f7a8e9920ba6d5a3878d05", size = 359053, upload-time = "2025-11-30T20:22:19.297Z" }, + { url = "https://files.pythonhosted.org/packages/65/1c/ae157e83a6357eceff62ba7e52113e3ec4834a84cfe07fa4b0757a7d105f/rpds_py-0.30.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca28829ae5f5d569bb62a79512c842a03a12576375d5ece7d2cadf8abe96ec28", size = 390763, upload-time = "2025-11-30T20:22:21.661Z" }, + { url = "https://files.pythonhosted.org/packages/d4/36/eb2eb8515e2ad24c0bd43c3ee9cd74c33f7ca6430755ccdb240fd3144c44/rpds_py-0.30.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a1010ed9524c73b94d15919ca4d41d8780980e1765babf85f9a2f90d247153dd", size = 408951, upload-time = "2025-11-30T20:22:23.408Z" }, + { url = "https://files.pythonhosted.org/packages/d6/65/ad8dc1784a331fabbd740ef6f71ce2198c7ed0890dab595adb9ea2d775a1/rpds_py-0.30.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f8d1736cfb49381ba528cd5baa46f82fdc65c06e843dab24dd70b63d09121b3f", size = 514622, upload-time = "2025-11-30T20:22:25.16Z" }, + { url = "https://files.pythonhosted.org/packages/63/8e/0cfa7ae158e15e143fe03993b5bcd743a59f541f5952e1546b1ac1b5fd45/rpds_py-0.30.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d948b135c4693daff7bc2dcfc4ec57237a29bd37e60c2fabf5aff2bbacf3e2f1", size = 414492, upload-time = "2025-11-30T20:22:26.505Z" }, + { url = "https://files.pythonhosted.org/packages/60/1b/6f8f29f3f995c7ffdde46a626ddccd7c63aefc0efae881dc13b6e5d5bb16/rpds_py-0.30.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47f236970bccb2233267d89173d3ad2703cd36a0e2a6e92d0560d333871a3d23", size = 394080, upload-time = "2025-11-30T20:22:27.934Z" }, + { url = "https://files.pythonhosted.org/packages/6d/d5/a266341051a7a3ca2f4b750a3aa4abc986378431fc2da508c5034d081b70/rpds_py-0.30.0-cp312-cp312-manylinux_2_31_riscv64.whl", hash = "sha256:2e6ecb5a5bcacf59c3f912155044479af1d0b6681280048b338b28e364aca1f6", size = 408680, upload-time = "2025-11-30T20:22:29.341Z" }, + { url = "https://files.pythonhosted.org/packages/10/3b/71b725851df9ab7a7a4e33cf36d241933da66040d195a84781f49c50490c/rpds_py-0.30.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a8fa71a2e078c527c3e9dc9fc5a98c9db40bcc8a92b4e8858e36d329f8684b51", size = 423589, upload-time = "2025-11-30T20:22:31.469Z" }, + { url = "https://files.pythonhosted.org/packages/00/2b/e59e58c544dc9bd8bd8384ecdb8ea91f6727f0e37a7131baeff8d6f51661/rpds_py-0.30.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:73c67f2db7bc334e518d097c6d1e6fed021bbc9b7d678d6cc433478365d1d5f5", size = 573289, upload-time = "2025-11-30T20:22:32.997Z" }, + { url = "https://files.pythonhosted.org/packages/da/3e/a18e6f5b460893172a7d6a680e86d3b6bc87a54c1f0b03446a3c8c7b588f/rpds_py-0.30.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5ba103fb455be00f3b1c2076c9d4264bfcb037c976167a6047ed82f23153f02e", size = 599737, upload-time = "2025-11-30T20:22:34.419Z" }, + { url = "https://files.pythonhosted.org/packages/5c/e2/714694e4b87b85a18e2c243614974413c60aa107fd815b8cbc42b873d1d7/rpds_py-0.30.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7cee9c752c0364588353e627da8a7e808a66873672bcb5f52890c33fd965b394", size = 563120, upload-time = "2025-11-30T20:22:35.903Z" }, + { url = "https://files.pythonhosted.org/packages/6f/ab/d5d5e3bcedb0a77f4f613706b750e50a5a3ba1c15ccd3665ecc636c968fd/rpds_py-0.30.0-cp312-cp312-win32.whl", hash = "sha256:1ab5b83dbcf55acc8b08fc62b796ef672c457b17dbd7820a11d6c52c06839bdf", size = 223782, upload-time = "2025-11-30T20:22:37.271Z" }, + { url = "https://files.pythonhosted.org/packages/39/3b/f786af9957306fdc38a74cef405b7b93180f481fb48453a114bb6465744a/rpds_py-0.30.0-cp312-cp312-win_amd64.whl", hash = "sha256:a090322ca841abd453d43456ac34db46e8b05fd9b3b4ac0c78bcde8b089f959b", size = 240463, upload-time = "2025-11-30T20:22:39.021Z" }, + { url = "https://files.pythonhosted.org/packages/f3/d2/b91dc748126c1559042cfe41990deb92c4ee3e2b415f6b5234969ffaf0cc/rpds_py-0.30.0-cp312-cp312-win_arm64.whl", hash = "sha256:669b1805bd639dd2989b281be2cfd951c6121b65e729d9b843e9639ef1fd555e", size = 230868, upload-time = "2025-11-30T20:22:40.493Z" }, + { url = "https://files.pythonhosted.org/packages/ed/dc/d61221eb88ff410de3c49143407f6f3147acf2538c86f2ab7ce65ae7d5f9/rpds_py-0.30.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:f83424d738204d9770830d35290ff3273fbb02b41f919870479fab14b9d303b2", size = 374887, upload-time = "2025-11-30T20:22:41.812Z" }, + { url = "https://files.pythonhosted.org/packages/fd/32/55fb50ae104061dbc564ef15cc43c013dc4a9f4527a1f4d99baddf56fe5f/rpds_py-0.30.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e7536cd91353c5273434b4e003cbda89034d67e7710eab8761fd918ec6c69cf8", size = 358904, upload-time = "2025-11-30T20:22:43.479Z" }, + { url = "https://files.pythonhosted.org/packages/58/70/faed8186300e3b9bdd138d0273109784eea2396c68458ed580f885dfe7ad/rpds_py-0.30.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2771c6c15973347f50fece41fc447c054b7ac2ae0502388ce3b6738cd366e3d4", size = 389945, upload-time = "2025-11-30T20:22:44.819Z" }, + { url = "https://files.pythonhosted.org/packages/bd/a8/073cac3ed2c6387df38f71296d002ab43496a96b92c823e76f46b8af0543/rpds_py-0.30.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0a59119fc6e3f460315fe9d08149f8102aa322299deaa5cab5b40092345c2136", size = 407783, upload-time = "2025-11-30T20:22:46.103Z" }, + { url = "https://files.pythonhosted.org/packages/77/57/5999eb8c58671f1c11eba084115e77a8899d6e694d2a18f69f0ba471ec8b/rpds_py-0.30.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:76fec018282b4ead0364022e3c54b60bf368b9d926877957a8624b58419169b7", size = 515021, upload-time = "2025-11-30T20:22:47.458Z" }, + { url = "https://files.pythonhosted.org/packages/e0/af/5ab4833eadc36c0a8ed2bc5c0de0493c04f6c06de223170bd0798ff98ced/rpds_py-0.30.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:692bef75a5525db97318e8cd061542b5a79812d711ea03dbc1f6f8dbb0c5f0d2", size = 414589, upload-time = "2025-11-30T20:22:48.872Z" }, + { url = "https://files.pythonhosted.org/packages/b7/de/f7192e12b21b9e9a68a6d0f249b4af3fdcdff8418be0767a627564afa1f1/rpds_py-0.30.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9027da1ce107104c50c81383cae773ef5c24d296dd11c99e2629dbd7967a20c6", size = 394025, upload-time = "2025-11-30T20:22:50.196Z" }, + { url = "https://files.pythonhosted.org/packages/91/c4/fc70cd0249496493500e7cc2de87504f5aa6509de1e88623431fec76d4b6/rpds_py-0.30.0-cp313-cp313-manylinux_2_31_riscv64.whl", hash = "sha256:9cf69cdda1f5968a30a359aba2f7f9aa648a9ce4b580d6826437f2b291cfc86e", size = 408895, upload-time = "2025-11-30T20:22:51.87Z" }, + { url = "https://files.pythonhosted.org/packages/58/95/d9275b05ab96556fefff73a385813eb66032e4c99f411d0795372d9abcea/rpds_py-0.30.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a4796a717bf12b9da9d3ad002519a86063dcac8988b030e405704ef7d74d2d9d", size = 422799, upload-time = "2025-11-30T20:22:53.341Z" }, + { url = "https://files.pythonhosted.org/packages/06/c1/3088fc04b6624eb12a57eb814f0d4997a44b0d208d6cace713033ff1a6ba/rpds_py-0.30.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5d4c2aa7c50ad4728a094ebd5eb46c452e9cb7edbfdb18f9e1221f597a73e1e7", size = 572731, upload-time = "2025-11-30T20:22:54.778Z" }, + { url = "https://files.pythonhosted.org/packages/d8/42/c612a833183b39774e8ac8fecae81263a68b9583ee343db33ab571a7ce55/rpds_py-0.30.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ba81a9203d07805435eb06f536d95a266c21e5b2dfbf6517748ca40c98d19e31", size = 599027, upload-time = "2025-11-30T20:22:56.212Z" }, + { url = "https://files.pythonhosted.org/packages/5f/60/525a50f45b01d70005403ae0e25f43c0384369ad24ffe46e8d9068b50086/rpds_py-0.30.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:945dccface01af02675628334f7cf49c2af4c1c904748efc5cf7bbdf0b579f95", size = 563020, upload-time = "2025-11-30T20:22:58.2Z" }, + { url = "https://files.pythonhosted.org/packages/0b/5d/47c4655e9bcd5ca907148535c10e7d489044243cc9941c16ed7cd53be91d/rpds_py-0.30.0-cp313-cp313-win32.whl", hash = "sha256:b40fb160a2db369a194cb27943582b38f79fc4887291417685f3ad693c5a1d5d", size = 223139, upload-time = "2025-11-30T20:23:00.209Z" }, + { url = "https://files.pythonhosted.org/packages/f2/e1/485132437d20aa4d3e1d8b3fb5a5e65aa8139f1e097080c2a8443201742c/rpds_py-0.30.0-cp313-cp313-win_amd64.whl", hash = "sha256:806f36b1b605e2d6a72716f321f20036b9489d29c51c91f4dd29a3e3afb73b15", size = 240224, upload-time = "2025-11-30T20:23:02.008Z" }, + { url = "https://files.pythonhosted.org/packages/24/95/ffd128ed1146a153d928617b0ef673960130be0009c77d8fbf0abe306713/rpds_py-0.30.0-cp313-cp313-win_arm64.whl", hash = "sha256:d96c2086587c7c30d44f31f42eae4eac89b60dabbac18c7669be3700f13c3ce1", size = 230645, upload-time = "2025-11-30T20:23:03.43Z" }, + { url = "https://files.pythonhosted.org/packages/ff/1b/b10de890a0def2a319a2626334a7f0ae388215eb60914dbac8a3bae54435/rpds_py-0.30.0-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:eb0b93f2e5c2189ee831ee43f156ed34e2a89a78a66b98cadad955972548be5a", size = 364443, upload-time = "2025-11-30T20:23:04.878Z" }, + { url = "https://files.pythonhosted.org/packages/0d/bf/27e39f5971dc4f305a4fb9c672ca06f290f7c4e261c568f3dea16a410d47/rpds_py-0.30.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:922e10f31f303c7c920da8981051ff6d8c1a56207dbdf330d9047f6d30b70e5e", size = 353375, upload-time = "2025-11-30T20:23:06.342Z" }, + { url = "https://files.pythonhosted.org/packages/40/58/442ada3bba6e8e6615fc00483135c14a7538d2ffac30e2d933ccf6852232/rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdc62c8286ba9bf7f47befdcea13ea0e26bf294bda99758fd90535cbaf408000", size = 383850, upload-time = "2025-11-30T20:23:07.825Z" }, + { url = "https://files.pythonhosted.org/packages/14/14/f59b0127409a33c6ef6f5c1ebd5ad8e32d7861c9c7adfa9a624fc3889f6c/rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:47f9a91efc418b54fb8190a6b4aa7813a23fb79c51f4bb84e418f5476c38b8db", size = 392812, upload-time = "2025-11-30T20:23:09.228Z" }, + { url = "https://files.pythonhosted.org/packages/b3/66/e0be3e162ac299b3a22527e8913767d869e6cc75c46bd844aa43fb81ab62/rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f3587eb9b17f3789ad50824084fa6f81921bbf9a795826570bda82cb3ed91f2", size = 517841, upload-time = "2025-11-30T20:23:11.186Z" }, + { url = "https://files.pythonhosted.org/packages/3d/55/fa3b9cf31d0c963ecf1ba777f7cf4b2a2c976795ac430d24a1f43d25a6ba/rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:39c02563fc592411c2c61d26b6c5fe1e51eaa44a75aa2c8735ca88b0d9599daa", size = 408149, upload-time = "2025-11-30T20:23:12.864Z" }, + { url = "https://files.pythonhosted.org/packages/60/ca/780cf3b1a32b18c0f05c441958d3758f02544f1d613abf9488cd78876378/rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51a1234d8febafdfd33a42d97da7a43f5dcb120c1060e352a3fbc0c6d36e2083", size = 383843, upload-time = "2025-11-30T20:23:14.638Z" }, + { url = "https://files.pythonhosted.org/packages/82/86/d5f2e04f2aa6247c613da0c1dd87fcd08fa17107e858193566048a1e2f0a/rpds_py-0.30.0-cp313-cp313t-manylinux_2_31_riscv64.whl", hash = "sha256:eb2c4071ab598733724c08221091e8d80e89064cd472819285a9ab0f24bcedb9", size = 396507, upload-time = "2025-11-30T20:23:16.105Z" }, + { url = "https://files.pythonhosted.org/packages/4b/9a/453255d2f769fe44e07ea9785c8347edaf867f7026872e76c1ad9f7bed92/rpds_py-0.30.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6bdfdb946967d816e6adf9a3d8201bfad269c67efe6cefd7093ef959683c8de0", size = 414949, upload-time = "2025-11-30T20:23:17.539Z" }, + { url = "https://files.pythonhosted.org/packages/a3/31/622a86cdc0c45d6df0e9ccb6becdba5074735e7033c20e401a6d9d0e2ca0/rpds_py-0.30.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:c77afbd5f5250bf27bf516c7c4a016813eb2d3e116139aed0096940c5982da94", size = 565790, upload-time = "2025-11-30T20:23:19.029Z" }, + { url = "https://files.pythonhosted.org/packages/1c/5d/15bbf0fb4a3f58a3b1c67855ec1efcc4ceaef4e86644665fff03e1b66d8d/rpds_py-0.30.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:61046904275472a76c8c90c9ccee9013d70a6d0f73eecefd38c1ae7c39045a08", size = 590217, upload-time = "2025-11-30T20:23:20.885Z" }, + { url = "https://files.pythonhosted.org/packages/6d/61/21b8c41f68e60c8cc3b2e25644f0e3681926020f11d06ab0b78e3c6bbff1/rpds_py-0.30.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4c5f36a861bc4b7da6516dbdf302c55313afa09b81931e8280361a4f6c9a2d27", size = 555806, upload-time = "2025-11-30T20:23:22.488Z" }, + { url = "https://files.pythonhosted.org/packages/f9/39/7e067bb06c31de48de3eb200f9fc7c58982a4d3db44b07e73963e10d3be9/rpds_py-0.30.0-cp313-cp313t-win32.whl", hash = "sha256:3d4a69de7a3e50ffc214ae16d79d8fbb0922972da0356dcf4d0fdca2878559c6", size = 211341, upload-time = "2025-11-30T20:23:24.449Z" }, + { url = "https://files.pythonhosted.org/packages/0a/4d/222ef0b46443cf4cf46764d9c630f3fe4abaa7245be9417e56e9f52b8f65/rpds_py-0.30.0-cp313-cp313t-win_amd64.whl", hash = "sha256:f14fc5df50a716f7ece6a80b6c78bb35ea2ca47c499e422aa4463455dd96d56d", size = 225768, upload-time = "2025-11-30T20:23:25.908Z" }, + { url = "https://files.pythonhosted.org/packages/86/81/dad16382ebbd3d0e0328776d8fd7ca94220e4fa0798d1dc5e7da48cb3201/rpds_py-0.30.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:68f19c879420aa08f61203801423f6cd5ac5f0ac4ac82a2368a9fcd6a9a075e0", size = 362099, upload-time = "2025-11-30T20:23:27.316Z" }, + { url = "https://files.pythonhosted.org/packages/2b/60/19f7884db5d5603edf3c6bce35408f45ad3e97e10007df0e17dd57af18f8/rpds_py-0.30.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:ec7c4490c672c1a0389d319b3a9cfcd098dcdc4783991553c332a15acf7249be", size = 353192, upload-time = "2025-11-30T20:23:29.151Z" }, + { url = "https://files.pythonhosted.org/packages/bf/c4/76eb0e1e72d1a9c4703c69607cec123c29028bff28ce41588792417098ac/rpds_py-0.30.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f251c812357a3fed308d684a5079ddfb9d933860fc6de89f2b7ab00da481e65f", size = 384080, upload-time = "2025-11-30T20:23:30.785Z" }, + { url = "https://files.pythonhosted.org/packages/72/87/87ea665e92f3298d1b26d78814721dc39ed8d2c74b86e83348d6b48a6f31/rpds_py-0.30.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac98b175585ecf4c0348fd7b29c3864bda53b805c773cbf7bfdaffc8070c976f", size = 394841, upload-time = "2025-11-30T20:23:32.209Z" }, + { url = "https://files.pythonhosted.org/packages/77/ad/7783a89ca0587c15dcbf139b4a8364a872a25f861bdb88ed99f9b0dec985/rpds_py-0.30.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3e62880792319dbeb7eb866547f2e35973289e7d5696c6e295476448f5b63c87", size = 516670, upload-time = "2025-11-30T20:23:33.742Z" }, + { url = "https://files.pythonhosted.org/packages/5b/3c/2882bdac942bd2172f3da574eab16f309ae10a3925644e969536553cb4ee/rpds_py-0.30.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4e7fc54e0900ab35d041b0601431b0a0eb495f0851a0639b6ef90f7741b39a18", size = 408005, upload-time = "2025-11-30T20:23:35.253Z" }, + { url = "https://files.pythonhosted.org/packages/ce/81/9a91c0111ce1758c92516a3e44776920b579d9a7c09b2b06b642d4de3f0f/rpds_py-0.30.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47e77dc9822d3ad616c3d5759ea5631a75e5809d5a28707744ef79d7a1bcfcad", size = 382112, upload-time = "2025-11-30T20:23:36.842Z" }, + { url = "https://files.pythonhosted.org/packages/cf/8e/1da49d4a107027e5fbc64daeab96a0706361a2918da10cb41769244b805d/rpds_py-0.30.0-cp314-cp314-manylinux_2_31_riscv64.whl", hash = "sha256:b4dc1a6ff022ff85ecafef7979a2c6eb423430e05f1165d6688234e62ba99a07", size = 399049, upload-time = "2025-11-30T20:23:38.343Z" }, + { url = "https://files.pythonhosted.org/packages/df/5a/7ee239b1aa48a127570ec03becbb29c9d5a9eb092febbd1699d567cae859/rpds_py-0.30.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4559c972db3a360808309e06a74628b95eaccbf961c335c8fe0d590cf587456f", size = 415661, upload-time = "2025-11-30T20:23:40.263Z" }, + { url = "https://files.pythonhosted.org/packages/70/ea/caa143cf6b772f823bc7929a45da1fa83569ee49b11d18d0ada7f5ee6fd6/rpds_py-0.30.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:0ed177ed9bded28f8deb6ab40c183cd1192aa0de40c12f38be4d59cd33cb5c65", size = 565606, upload-time = "2025-11-30T20:23:42.186Z" }, + { url = "https://files.pythonhosted.org/packages/64/91/ac20ba2d69303f961ad8cf55bf7dbdb4763f627291ba3d0d7d67333cced9/rpds_py-0.30.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:ad1fa8db769b76ea911cb4e10f049d80bf518c104f15b3edb2371cc65375c46f", size = 591126, upload-time = "2025-11-30T20:23:44.086Z" }, + { url = "https://files.pythonhosted.org/packages/21/20/7ff5f3c8b00c8a95f75985128c26ba44503fb35b8e0259d812766ea966c7/rpds_py-0.30.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:46e83c697b1f1c72b50e5ee5adb4353eef7406fb3f2043d64c33f20ad1c2fc53", size = 553371, upload-time = "2025-11-30T20:23:46.004Z" }, + { url = "https://files.pythonhosted.org/packages/72/c7/81dadd7b27c8ee391c132a6b192111ca58d866577ce2d9b0ca157552cce0/rpds_py-0.30.0-cp314-cp314-win32.whl", hash = "sha256:ee454b2a007d57363c2dfd5b6ca4a5d7e2c518938f8ed3b706e37e5d470801ed", size = 215298, upload-time = "2025-11-30T20:23:47.696Z" }, + { url = "https://files.pythonhosted.org/packages/3e/d2/1aaac33287e8cfb07aab2e6b8ac1deca62f6f65411344f1433c55e6f3eb8/rpds_py-0.30.0-cp314-cp314-win_amd64.whl", hash = "sha256:95f0802447ac2d10bcc69f6dc28fe95fdf17940367b21d34e34c737870758950", size = 228604, upload-time = "2025-11-30T20:23:49.501Z" }, + { url = "https://files.pythonhosted.org/packages/e8/95/ab005315818cc519ad074cb7784dae60d939163108bd2b394e60dc7b5461/rpds_py-0.30.0-cp314-cp314-win_arm64.whl", hash = "sha256:613aa4771c99f03346e54c3f038e4cc574ac09a3ddfb0e8878487335e96dead6", size = 222391, upload-time = "2025-11-30T20:23:50.96Z" }, + { url = "https://files.pythonhosted.org/packages/9e/68/154fe0194d83b973cdedcdcc88947a2752411165930182ae41d983dcefa6/rpds_py-0.30.0-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:7e6ecfcb62edfd632e56983964e6884851786443739dbfe3582947e87274f7cb", size = 364868, upload-time = "2025-11-30T20:23:52.494Z" }, + { url = "https://files.pythonhosted.org/packages/83/69/8bbc8b07ec854d92a8b75668c24d2abcb1719ebf890f5604c61c9369a16f/rpds_py-0.30.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:a1d0bc22a7cdc173fedebb73ef81e07faef93692b8c1ad3733b67e31e1b6e1b8", size = 353747, upload-time = "2025-11-30T20:23:54.036Z" }, + { url = "https://files.pythonhosted.org/packages/ab/00/ba2e50183dbd9abcce9497fa5149c62b4ff3e22d338a30d690f9af970561/rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d08f00679177226c4cb8c5265012eea897c8ca3b93f429e546600c971bcbae7", size = 383795, upload-time = "2025-11-30T20:23:55.556Z" }, + { url = "https://files.pythonhosted.org/packages/05/6f/86f0272b84926bcb0e4c972262f54223e8ecc556b3224d281e6598fc9268/rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5965af57d5848192c13534f90f9dd16464f3c37aaf166cc1da1cae1fd5a34898", size = 393330, upload-time = "2025-11-30T20:23:57.033Z" }, + { url = "https://files.pythonhosted.org/packages/cb/e9/0e02bb2e6dc63d212641da45df2b0bf29699d01715913e0d0f017ee29438/rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a4e86e34e9ab6b667c27f3211ca48f73dba7cd3d90f8d5b11be56e5dbc3fb4e", size = 518194, upload-time = "2025-11-30T20:23:58.637Z" }, + { url = "https://files.pythonhosted.org/packages/ee/ca/be7bca14cf21513bdf9c0606aba17d1f389ea2b6987035eb4f62bd923f25/rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5d3e6b26f2c785d65cc25ef1e5267ccbe1b069c5c21b8cc724efee290554419", size = 408340, upload-time = "2025-11-30T20:24:00.2Z" }, + { url = "https://files.pythonhosted.org/packages/c2/c7/736e00ebf39ed81d75544c0da6ef7b0998f8201b369acf842f9a90dc8fce/rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:626a7433c34566535b6e56a1b39a7b17ba961e97ce3b80ec62e6f1312c025551", size = 383765, upload-time = "2025-11-30T20:24:01.759Z" }, + { url = "https://files.pythonhosted.org/packages/4a/3f/da50dfde9956aaf365c4adc9533b100008ed31aea635f2b8d7b627e25b49/rpds_py-0.30.0-cp314-cp314t-manylinux_2_31_riscv64.whl", hash = "sha256:acd7eb3f4471577b9b5a41baf02a978e8bdeb08b4b355273994f8b87032000a8", size = 396834, upload-time = "2025-11-30T20:24:03.687Z" }, + { url = "https://files.pythonhosted.org/packages/4e/00/34bcc2565b6020eab2623349efbdec810676ad571995911f1abdae62a3a0/rpds_py-0.30.0-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fe5fa731a1fa8a0a56b0977413f8cacac1768dad38d16b3a296712709476fbd5", size = 415470, upload-time = "2025-11-30T20:24:05.232Z" }, + { url = "https://files.pythonhosted.org/packages/8c/28/882e72b5b3e6f718d5453bd4d0d9cf8df36fddeb4ddbbab17869d5868616/rpds_py-0.30.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:74a3243a411126362712ee1524dfc90c650a503502f135d54d1b352bd01f2404", size = 565630, upload-time = "2025-11-30T20:24:06.878Z" }, + { url = "https://files.pythonhosted.org/packages/3b/97/04a65539c17692de5b85c6e293520fd01317fd878ea1995f0367d4532fb1/rpds_py-0.30.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:3e8eeb0544f2eb0d2581774be4c3410356eba189529a6b3e36bbbf9696175856", size = 591148, upload-time = "2025-11-30T20:24:08.445Z" }, + { url = "https://files.pythonhosted.org/packages/85/70/92482ccffb96f5441aab93e26c4d66489eb599efdcf96fad90c14bbfb976/rpds_py-0.30.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:dbd936cde57abfee19ab3213cf9c26be06d60750e60a8e4dd85d1ab12c8b1f40", size = 556030, upload-time = "2025-11-30T20:24:10.956Z" }, + { url = "https://files.pythonhosted.org/packages/20/53/7c7e784abfa500a2b6b583b147ee4bb5a2b3747a9166bab52fec4b5b5e7d/rpds_py-0.30.0-cp314-cp314t-win32.whl", hash = "sha256:dc824125c72246d924f7f796b4f63c1e9dc810c7d9e2355864b3c3a73d59ade0", size = 211570, upload-time = "2025-11-30T20:24:12.735Z" }, + { url = "https://files.pythonhosted.org/packages/d0/02/fa464cdfbe6b26e0600b62c528b72d8608f5cc49f96b8d6e38c95d60c676/rpds_py-0.30.0-cp314-cp314t-win_amd64.whl", hash = "sha256:27f4b0e92de5bfbc6f86e43959e6edd1425c33b5e69aab0984a72047f2bcf1e3", size = 226532, upload-time = "2025-11-30T20:24:14.634Z" }, + { url = "https://files.pythonhosted.org/packages/69/71/3f34339ee70521864411f8b6992e7ab13ac30d8e4e3309e07c7361767d91/rpds_py-0.30.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c2262bdba0ad4fc6fb5545660673925c2d2a5d9e2e0fb603aad545427be0fc58", size = 372292, upload-time = "2025-11-30T20:24:16.537Z" }, + { url = "https://files.pythonhosted.org/packages/57/09/f183df9b8f2d66720d2ef71075c59f7e1b336bec7ee4c48f0a2b06857653/rpds_py-0.30.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:ee6af14263f25eedc3bb918a3c04245106a42dfd4f5c2285ea6f997b1fc3f89a", size = 362128, upload-time = "2025-11-30T20:24:18.086Z" }, + { url = "https://files.pythonhosted.org/packages/7a/68/5c2594e937253457342e078f0cc1ded3dd7b2ad59afdbf2d354869110a02/rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3adbb8179ce342d235c31ab8ec511e66c73faa27a47e076ccc92421add53e2bb", size = 391542, upload-time = "2025-11-30T20:24:20.092Z" }, + { url = "https://files.pythonhosted.org/packages/49/5c/31ef1afd70b4b4fbdb2800249f34c57c64beb687495b10aec0365f53dfc4/rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:250fa00e9543ac9b97ac258bd37367ff5256666122c2d0f2bc97577c60a1818c", size = 404004, upload-time = "2025-11-30T20:24:22.231Z" }, + { url = "https://files.pythonhosted.org/packages/e3/63/0cfbea38d05756f3440ce6534d51a491d26176ac045e2707adc99bb6e60a/rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9854cf4f488b3d57b9aaeb105f06d78e5529d3145b1e4a41750167e8c213c6d3", size = 527063, upload-time = "2025-11-30T20:24:24.302Z" }, + { url = "https://files.pythonhosted.org/packages/42/e6/01e1f72a2456678b0f618fc9a1a13f882061690893c192fcad9f2926553a/rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:993914b8e560023bc0a8bf742c5f303551992dcb85e247b1e5c7f4a7d145bda5", size = 413099, upload-time = "2025-11-30T20:24:25.916Z" }, + { url = "https://files.pythonhosted.org/packages/b8/25/8df56677f209003dcbb180765520c544525e3ef21ea72279c98b9aa7c7fb/rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58edca431fb9b29950807e301826586e5bbf24163677732429770a697ffe6738", size = 392177, upload-time = "2025-11-30T20:24:27.834Z" }, + { url = "https://files.pythonhosted.org/packages/4a/b4/0a771378c5f16f8115f796d1f437950158679bcd2a7c68cf251cfb00ed5b/rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_31_riscv64.whl", hash = "sha256:dea5b552272a944763b34394d04577cf0f9bd013207bc32323b5a89a53cf9c2f", size = 406015, upload-time = "2025-11-30T20:24:29.457Z" }, + { url = "https://files.pythonhosted.org/packages/36/d8/456dbba0af75049dc6f63ff295a2f92766b9d521fa00de67a2bd6427d57a/rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ba3af48635eb83d03f6c9735dfb21785303e73d22ad03d489e88adae6eab8877", size = 423736, upload-time = "2025-11-30T20:24:31.22Z" }, + { url = "https://files.pythonhosted.org/packages/13/64/b4d76f227d5c45a7e0b796c674fd81b0a6c4fbd48dc29271857d8219571c/rpds_py-0.30.0-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:dff13836529b921e22f15cb099751209a60009731a68519630a24d61f0b1b30a", size = 573981, upload-time = "2025-11-30T20:24:32.934Z" }, + { url = "https://files.pythonhosted.org/packages/20/91/092bacadeda3edf92bf743cc96a7be133e13a39cdbfd7b5082e7ab638406/rpds_py-0.30.0-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:1b151685b23929ab7beec71080a8889d4d6d9fa9a983d213f07121205d48e2c4", size = 599782, upload-time = "2025-11-30T20:24:35.169Z" }, + { url = "https://files.pythonhosted.org/packages/d1/b7/b95708304cd49b7b6f82fdd039f1748b66ec2b21d6a45180910802f1abf1/rpds_py-0.30.0-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:ac37f9f516c51e5753f27dfdef11a88330f04de2d564be3991384b2f3535d02e", size = 562191, upload-time = "2025-11-30T20:24:36.853Z" }, +] + +[[package]] +name = "shellingham" +version = "1.5.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/58/15/8b3609fd3830ef7b27b655beb4b4e9c62313a4e8da8c676e142cc210d58e/shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de", size = 10310, upload-time = "2023-10-24T04:13:40.426Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755, upload-time = "2023-10-24T04:13:38.866Z" }, +] + [[package]] name = "six" version = "1.17.0" @@ -1117,6 +1538,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/57/cf/5d175ce8de07fe694ec4e3d4d65c2dd06cc30f6c79599b31f9d2f6dd2830/sqlmodel-0.0.25-py3-none-any.whl", hash = "sha256:c98234cda701fb77e9dcbd81688c23bb251c13bb98ce1dd8d4adc467374d45b7", size = 28893, upload-time = "2025-09-17T21:44:39.764Z" }, ] +[[package]] +name = "sse-starlette" +version = "3.0.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/db/3c/fa6517610dc641262b77cc7bf994ecd17465812c1b0585fe33e11be758ab/sse_starlette-3.0.3.tar.gz", hash = "sha256:88cfb08747e16200ea990c8ca876b03910a23b547ab3bd764c0d8eb81019b971", size = 21943, upload-time = "2025-10-30T18:44:20.117Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/23/a0/984525d19ca5c8a6c33911a0c164b11490dd0f90ff7fd689f704f84e9a11/sse_starlette-3.0.3-py3-none-any.whl", hash = "sha256:af5bf5a6f3933df1d9c7f8539633dc8444ca6a97ab2e2a7cd3b6e431ac03a431", size = 11765, upload-time = "2025-10-30T18:44:18.834Z" }, +] + [[package]] name = "starlette" version = "0.48.0" @@ -1184,6 +1617,21 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/23/d1/136eb2cb77520a31e1f64cbae9d33ec6df0d78bdf4160398e86eec8a8754/tomli-2.4.0-py3-none-any.whl", hash = "sha256:1f776e7d669ebceb01dee46484485f43a4048746235e683bcdffacdf1fb4785a", size = 14477, upload-time = "2026-01-11T11:22:37.446Z" }, ] +[[package]] +name = "typer" +version = "0.21.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "rich" }, + { name = "shellingham" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/36/bf/8825b5929afd84d0dabd606c67cd57b8388cb3ec385f7ef19c5cc2202069/typer-0.21.1.tar.gz", hash = "sha256:ea835607cd752343b6b2b7ce676893e5a0324082268b48f27aa058bdb7d2145d", size = 110371, upload-time = "2026-01-06T11:21:10.989Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/1d/d9257dd49ff2ca23ea5f132edf1281a0c4f9de8a762b9ae399b670a59235/typer-0.21.1-py3-none-any.whl", hash = "sha256:7985e89081c636b88d172c2ee0cfe33c253160994d47bdfdc302defd7d1f1d01", size = 47381, upload-time = "2026-01-06T11:21:09.824Z" }, +] + [[package]] name = "typing-extensions" version = "4.15.0" diff --git a/pkgs/mcp-server/README.md b/pkgs/mcp-server/README.md new file mode 100644 index 0000000..cf441c9 --- /dev/null +++ b/pkgs/mcp-server/README.md @@ -0,0 +1,234 @@ +# shipyard-mcp + +Shipyard MCP Server - Execute Python and shell commands in isolated sandboxes via Model Context Protocol. + +## Overview + +This package provides an MCP (Model Context Protocol) server that enables AI assistants to execute code in secure, isolated sandbox environments powered by [Shipyard](https://github.com/AstrBotDevs/shipyard). + +**Compatible with all major MCP clients:** +- Claude Desktop (Anthropic) +- ChatGPT Desktop (OpenAI) +- Cursor +- VS Code (GitHub Copilot) +- Gemini (Google) +- Any MCP-compatible client + +## Installation + +```bash +npm install -g shipyard-mcp +``` + +**Prerequisites:** +- Node.js 18+ +- Python 3.11+ +- A running Shipyard Bay instance + +## Quick Start + +1. Set your Shipyard access token: +```bash +export SHIPYARD_TOKEN=your-access-token +``` + +2. Run the MCP server: +```bash +shipyard-mcp +``` + +3. Configure your MCP client (see below) + +## Configuration + +### Claude Desktop + +Add to `~/.config/claude/claude_desktop_config.json`: + +```json +{ + "mcpServers": { + "shipyard": { + "command": "shipyard-mcp", + "env": { + "SHIPYARD_ENDPOINT": "http://localhost:8156", + "SHIPYARD_TOKEN": "your-access-token" + } + } + } +} +``` + +### Cursor + +Add to `~/.cursor/mcp.json`: + +```json +{ + "mcpServers": { + "shipyard": { + "command": "shipyard-mcp", + "env": { + "SHIPYARD_ENDPOINT": "http://localhost:8156", + "SHIPYARD_TOKEN": "your-access-token" + } + } + } +} +``` + +### VS Code (GitHub Copilot) + +Add to VS Code settings: + +```json +{ + "github.copilot.chat.mcpServers": { + "shipyard": { + "command": "shipyard-mcp", + "env": { + "SHIPYARD_ENDPOINT": "http://localhost:8156", + "SHIPYARD_TOKEN": "your-access-token" + } + } + } +} +``` + +## Available Tools + +| Tool | Description | +|------|-------------| +| `execute_python` | Execute Python code in the sandbox | +| `execute_shell` | Execute shell commands | +| `read_file` | Read file contents | +| `write_file` | Write to files | +| `list_files` | List directory contents | +| `install_package` | Install Python packages via pip | +| `get_sandbox_info` | Get current sandbox information | +| `get_execution_history` | View past executions | + +## CLI Options + +```bash +shipyard-mcp [options] + +Options: + --transport Transport mode (default: stdio) + --port HTTP port (default: 8000) + --host HTTP host (default: 0.0.0.0) + --help, -h Show help +``` + +## Environment Variables + +| Variable | Description | Default | +|----------|-------------|---------| +| `SHIPYARD_ENDPOINT` | Bay API URL | `http://localhost:8156` | +| `SHIPYARD_TOKEN` | Access token | (required) | +| `SHIPYARD_SANDBOX_TTL` | Sandbox TTL in seconds | `1800` (30 min) | + +## Transport Modes + +### stdio (Default) + +Standard I/O transport for local integration with desktop apps. One process = one session = one sandbox. + +```bash +shipyard-mcp +``` + +### HTTP (Streamable HTTP) + +HTTP transport for remote/hosted deployments. **Each MCP client session gets its own isolated sandbox.** + +```bash +shipyard-mcp --transport http --port 8000 +``` + +In HTTP mode: +- Each client connection gets a unique session ID +- Sessions are isolated - Client A cannot see Client B's variables or files +- Sandboxes are automatically cleaned up via TTL when clients disconnect +- TTL is renewed on each tool call to keep active sessions alive + +## Architecture + +``` +┌─────────────────┐ MCP Protocol ┌─────────────────┐ +│ MCP Client │◄────────────────────►│ shipyard-mcp │ +│ (Claude/Cursor) │ (stdio) │ (Node.js) │ +└─────────────────┘ └────────┬────────┘ + │ + │ spawns + ▼ + ┌─────────────────┐ + │ Python Server │ + │ (FastMCP) │ + └────────┬────────┘ + │ + │ HTTP/REST + ▼ + ┌─────────────────┐ + │ Bay API │ + └────────┬────────┘ + │ + ▼ + ┌─────────────────┐ + │ Ship Container │ + │ (Python/Shell) │ + └─────────────────┘ +``` + +## Session Isolation (HTTP Mode) + +When running in HTTP mode, each MCP client session is completely isolated: + +``` +Client A (mcp-session-aaa) ──► Sandbox A (ship-111) + │ + │ execute_python("x = 123") ✓ + │ +Client B (mcp-session-bbb) ──► Sandbox B (ship-222) + │ + │ execute_python("print(x)") ✗ NameError (isolated!) +``` + +This ensures: +- **State isolation**: Different clients have separate Python variables +- **File isolation**: File operations are container-specific +- **Security**: One client cannot access another's data +- **Resource management**: Each sandbox has its own TTL + +## Security + +- Each session gets a dedicated, isolated container +- Code execution is sandboxed +- Containers have configurable network access +- Resources are automatically cleaned up via TTL +- HTTP mode provides per-client session isolation + +## Development + +```bash +# Clone the repository +git clone https://github.com/AstrBotDevs/shipyard.git +cd shipyard/pkgs/mcp-server + +# Install dependencies +npm install + +# Run locally +npm run build +./bin/shipyard-mcp.js +``` + +## License + +MIT + +## Links + +- [Shipyard GitHub](https://github.com/AstrBotDevs/shipyard) +- [MCP Specification](https://modelcontextprotocol.io) +- [MCP Python SDK](https://github.com/modelcontextprotocol/python-sdk) diff --git a/pkgs/mcp-server/bin/shipyard-mcp.js b/pkgs/mcp-server/bin/shipyard-mcp.js new file mode 100644 index 0000000..8c6892c --- /dev/null +++ b/pkgs/mcp-server/bin/shipyard-mcp.js @@ -0,0 +1,125 @@ +#!/usr/bin/env node +/** + * Shipyard MCP Server CLI Entry Point + * + * This launcher finds and runs the Python-based MCP server. + */ + +import { spawn } from "node:child_process"; +import { fileURLToPath } from "node:url"; +import { dirname, join } from "node:path"; +import { existsSync } from "node:fs"; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = dirname(__filename); + +// Parse command line arguments +const args = process.argv.slice(2); +let transport = "stdio"; +let port = "8000"; +let host = "0.0.0.0"; +let showHelp = false; + +for (let i = 0; i < args.length; i++) { + if (args[i] === "--transport" && args[i + 1]) { + transport = args[++i]; + } else if (args[i] === "--port" && args[i + 1]) { + port = args[++i]; + } else if (args[i] === "--host" && args[i + 1]) { + host = args[++i]; + } else if (args[i] === "--help" || args[i] === "-h") { + showHelp = true; + } +} + +if (showHelp) { + console.log(` +Shipyard MCP Server + +Execute Python and shell commands in isolated sandboxes via MCP protocol. + +Usage: + shipyard-mcp [options] + +Options: + --transport Transport mode (default: stdio) + --port HTTP port (default: 8000) + --host HTTP host (default: 0.0.0.0) + --help, -h Show this help + +Environment: + SHIPYARD_ENDPOINT Bay API URL (default: http://localhost:8156) + SHIPYARD_TOKEN Access token for Bay API (required) + +Examples: + shipyard-mcp + shipyard-mcp --transport http --port 8000 + SHIPYARD_TOKEN=xxx shipyard-mcp + +More info: https://github.com/AstrBotDevs/shipyard +`); + process.exit(0); +} + +// Check for SHIPYARD_TOKEN +if (!process.env.SHIPYARD_TOKEN) { + console.error("Error: SHIPYARD_TOKEN environment variable is required."); + console.error("Example: export SHIPYARD_TOKEN=your-access-token"); + process.exit(1); +} + +// Find Python +const pythonCmds = ["python3", "python"]; +let pythonCmd = null; + +for (const cmd of pythonCmds) { + try { + const result = spawn(cmd, ["--version"], { stdio: "pipe" }); + if (result.pid) { + pythonCmd = cmd; + result.kill(); + break; + } + } catch { + // Continue + } +} + +if (!pythonCmd) { + console.error("Error: Python 3 is required but not found."); + process.exit(1); +} + +// Path to Python server +const pythonServerPath = join(__dirname, "..", "python"); + +if (!existsSync(pythonServerPath)) { + console.error("Error: Python MCP server not found at:", pythonServerPath); + process.exit(1); +} + +// Build args +const pythonArgs = ["-m", "server"]; +if (transport !== "stdio") { + pythonArgs.push("--transport", transport); +} +if (transport === "http") { + pythonArgs.push("--port", port, "--host", host); +} + +// Spawn Python +const child = spawn(pythonCmd, pythonArgs, { + cwd: pythonServerPath, + stdio: "inherit", + env: { ...process.env, PYTHONUNBUFFERED: "1" }, +}); + +child.on("error", (err) => { + console.error("Failed to start:", err.message); + process.exit(1); +}); + +child.on("exit", (code) => process.exit(code ?? 0)); + +process.on("SIGINT", () => child.kill("SIGINT")); +process.on("SIGTERM", () => child.kill("SIGTERM")); diff --git a/pkgs/mcp-server/package.json b/pkgs/mcp-server/package.json new file mode 100644 index 0000000..082b693 --- /dev/null +++ b/pkgs/mcp-server/package.json @@ -0,0 +1,50 @@ +{ + "name": "shipyard-mcp", + "version": "1.0.0", + "description": "Shipyard MCP Server - Execute Python and shell commands in isolated sandboxes via MCP protocol", + "keywords": [ + "mcp", + "model-context-protocol", + "sandbox", + "code-execution", + "ai-agents", + "claude", + "chatgpt", + "cursor", + "shipyard", + "anthropic", + "openai" + ], + "homepage": "https://github.com/AstrBotDevs/shipyard", + "repository": { + "type": "git", + "url": "https://github.com/AstrBotDevs/shipyard.git", + "directory": "pkgs/mcp-server" + }, + "license": "MIT", + "author": "AstrBot Team", + "type": "module", + "bin": { + "shipyard-mcp": "./bin/shipyard-mcp.js" + }, + "files": [ + "bin", + "python", + "README.md" + ], + "scripts": { + "test": "echo \"Tests not implemented yet\"", + "prepublishOnly": "echo 'Ready to publish'" + }, + "engines": { + "node": ">=18.0.0" + }, + "os": [ + "darwin", + "linux", + "win32" + ], + "publishConfig": { + "access": "public" + } +} diff --git a/pkgs/mcp-server/python/__init__.py b/pkgs/mcp-server/python/__init__.py new file mode 100644 index 0000000..e96a17e --- /dev/null +++ b/pkgs/mcp-server/python/__init__.py @@ -0,0 +1,5 @@ +"""Shipyard MCP Server - Python Module""" + +from .server import ShipyardMCPServer, main + +__all__ = ["ShipyardMCPServer", "main"] diff --git a/pkgs/mcp-server/python/__main__.py b/pkgs/mcp-server/python/__main__.py new file mode 100644 index 0000000..fccd31d --- /dev/null +++ b/pkgs/mcp-server/python/__main__.py @@ -0,0 +1,6 @@ +"""Shipyard MCP Server - Python Module Entry Point""" + +from .server import main + +if __name__ == "__main__": + main() diff --git a/pkgs/mcp-server/python/server.py b/pkgs/mcp-server/python/server.py new file mode 100644 index 0000000..bdd0476 --- /dev/null +++ b/pkgs/mcp-server/python/server.py @@ -0,0 +1,851 @@ +""" +Shipyard MCP Server - Standalone Python Module + +This module can be run directly or via the npm package launcher. +It provides MCP protocol support for Shipyard sandbox execution. + +Internally uses the Shipyard SDK to communicate with Bay. + +In HTTP mode, each MCP client session gets its own isolated Sandbox. +Session state persists across tool calls within the same MCP session. + +Usage: + python -m server [--transport stdio|http] [--port 8000] [--host 0.0.0.0] + +Environment: + SHIPYARD_ENDPOINT: Bay API URL (default: http://localhost:8156) + SHIPYARD_TOKEN: Access token (required) + SHIPYARD_SANDBOX_TTL: Sandbox TTL in seconds (default: 1800) +""" + +import argparse +import asyncio +import json +import os +import sys +import uuid +from dataclasses import dataclass +from datetime import datetime +from typing import Any, Optional + +# Try to import FastMCP for full MCP support +try: + from mcp.server.fastmcp import Context, FastMCP + FASTMCP_AVAILABLE = True +except ImportError: + FASTMCP_AVAILABLE = False + +# Try to import SDK, fall back to inline implementation if not available +try: + from shipyard import Sandbox, ExecResult + SDK_AVAILABLE = True +except ImportError: + SDK_AVAILABLE = False + +# Inline minimal SDK implementation for standalone npm package +if not SDK_AVAILABLE: + import aiohttp + + @dataclass + class ExecResult: + success: bool + stdout: str = "" + stderr: str = "" + result: Any = None + exit_code: int = 0 + execution_time_ms: int = 0 + code: str = "" + execution_id: Optional[str] = None # ID for precise history lookup + + class Sandbox: + def __init__(self, endpoint: str = None, token: str = None, ttl: int = 3600, session_id: str = None): + self.endpoint = (endpoint or os.getenv("SHIPYARD_ENDPOINT", "http://localhost:8156")).rstrip("/") + self.token = token or os.getenv("SHIPYARD_TOKEN", "") + self.ttl = ttl + self.session_id = session_id or str(uuid.uuid4()) + self._ship_id = None + self._http = None + + async def start(self): + if not self.token: + raise ValueError("SHIPYARD_TOKEN is required") + self._http = aiohttp.ClientSession(headers={"Authorization": f"Bearer {self.token}"}) + async with self._http.post( + f"{self.endpoint}/ship", + json={"ttl": self.ttl}, + headers={"X-SESSION-ID": self.session_id} + ) as resp: + if resp.status == 201: + data = await resp.json() + self._ship_id = data["id"] + else: + error = await resp.text() + await self._http.close() + raise RuntimeError(f"Failed to create sandbox: {error}") + self.python = _PythonExec(self) + self.shell = _ShellExec(self) + self.fs = _FileSystem(self) + return self + + async def stop(self): + if self._http: + await self._http.close() + + async def _exec(self, op_type: str, payload: dict) -> dict: + async with self._http.post( + f"{self.endpoint}/ship/{self._ship_id}/exec", + json={"type": op_type, "payload": payload}, + headers={"X-SESSION-ID": self.session_id} + ) as resp: + if resp.status == 200: + return await resp.json() + error = await resp.text() + raise RuntimeError(f"Execution failed: {error}") + + async def extend_ttl(self, ttl: int): + async with self._http.post( + f"{self.endpoint}/ship/{self._ship_id}/extend-ttl", + json={"ttl": ttl} + ) as resp: + if resp.status != 200: + error = await resp.text() + raise RuntimeError(f"Failed to extend TTL: {error}") + + async def get_execution_history(self, exec_type=None, success_only=False, limit=100, tags=None, has_notes=False, has_description=False): + params = {"limit": limit} + if exec_type: + params["exec_type"] = exec_type + if success_only: + params["success_only"] = "true" + if tags: + params["tags"] = tags + if has_notes: + params["has_notes"] = "true" + if has_description: + params["has_description"] = "true" + async with self._http.get( + f"{self.endpoint}/sessions/{self.session_id}/history", + params=params + ) as resp: + if resp.status == 200: + return await resp.json() + return {"entries": [], "total": 0} + + async def get_execution(self, execution_id: str) -> dict: + """Get a specific execution record by ID.""" + async with self._http.get( + f"{self.endpoint}/sessions/{self.session_id}/history/{execution_id}" + ) as resp: + if resp.status == 200: + return await resp.json() + elif resp.status == 404: + raise RuntimeError(f"Execution {execution_id} not found") + error = await resp.text() + raise RuntimeError(f"Failed to get execution: {error}") + + async def get_last_execution(self, exec_type: str = None) -> dict: + """Get the most recent execution for this session.""" + params = {} + if exec_type: + params["exec_type"] = exec_type + async with self._http.get( + f"{self.endpoint}/sessions/{self.session_id}/history/last", + params=params + ) as resp: + if resp.status == 200: + return await resp.json() + elif resp.status == 404: + raise RuntimeError("No execution history found") + error = await resp.text() + raise RuntimeError(f"Failed to get last execution: {error}") + + async def annotate_execution(self, execution_id: str, description: str = None, tags: str = None, notes: str = None) -> dict: + """Annotate an execution record with metadata.""" + payload = {} + if description is not None: + payload["description"] = description + if tags is not None: + payload["tags"] = tags + if notes is not None: + payload["notes"] = notes + async with self._http.patch( + f"{self.endpoint}/sessions/{self.session_id}/history/{execution_id}", + json=payload + ) as resp: + if resp.status == 200: + return await resp.json() + elif resp.status == 404: + raise RuntimeError(f"Execution {execution_id} not found") + error = await resp.text() + raise RuntimeError(f"Failed to annotate execution: {error}") + + @property + def ship_id(self): + return self._ship_id + + async def __aenter__(self): + return await self.start() + + async def __aexit__(self, *args): + await self.stop() + + class _PythonExec: + def __init__(self, sandbox): + self._s = sandbox + async def exec(self, code: str, timeout: int = 30, description: str = None, tags: str = None) -> ExecResult: + p = {"code": code, "timeout": timeout} + if description: + p["description"] = description + if tags: + p["tags"] = tags + r = await self._s._exec("ipython/exec", p) + d = r.get("data", r) + return ExecResult(d.get("success", True), d.get("stdout", ""), d.get("stderr", ""), + d.get("result"), 0, d.get("execution_time_ms", 0), d.get("code", code), + r.get("execution_id")) + + class _ShellExec: + def __init__(self, sandbox): + self._s = sandbox + async def exec(self, command: str, cwd: str = None, timeout: int = 30, description: str = None, tags: str = None) -> ExecResult: + p = {"command": command, "timeout": timeout} + if cwd: + p["cwd"] = cwd + if description: + p["description"] = description + if tags: + p["tags"] = tags + r = await self._s._exec("shell/exec", p) + d = r.get("data", r) + return ExecResult(d.get("exit_code", 0) == 0, d.get("stdout", ""), d.get("stderr", ""), + None, d.get("exit_code", 0), d.get("execution_time_ms", 0), d.get("command", command), + r.get("execution_id")) + + class _FileSystem: + def __init__(self, sandbox): + self._s = sandbox + async def read(self, path: str) -> str: + r = await self._s._exec("fs/read_file", {"path": path}) + return r.get("data", r).get("content", "") + async def write(self, path: str, content: str): + await self._s._exec("fs/write_file", {"path": path, "content": content}) + async def list(self, path: str = ".") -> list: + r = await self._s._exec("fs/list_dir", {"path": path}) + return r.get("data", r).get("entries", []) + + +def _format_exec_result(result: ExecResult, include_code: bool = False) -> str: + """Format execution result for LLM consumption.""" + parts = [] + + # Always include execution_id if available + if result.execution_id: + parts.append(f"execution_id: {result.execution_id}") + + # Include code if requested + if include_code and result.code: + parts.append(f"Code:\n{result.code}") + + if result.stdout: + parts.append(f"Output:\n{result.stdout}") + if result.stderr: + parts.append(f"Errors:\n{result.stderr}") + if result.result is not None: + parts.append(f"Result: {result.result}") + if result.exit_code != 0: + parts.append(f"Exit code: {result.exit_code}") + + # Include execution time if code is included + if include_code and result.execution_time_ms: + parts.append(f"Execution time: {result.execution_time_ms}ms") + + return "\n\n".join(parts) if parts else "Executed successfully (no output)" + + +# ============================================================================= +# FastMCP Implementation (preferred, supports HTTP mode with session isolation) +# ============================================================================= + +if FASTMCP_AVAILABLE: + from collections.abc import AsyncIterator + from contextlib import asynccontextmanager + + @dataclass + class GlobalConfig: + """Global configuration initialized during server lifespan.""" + endpoint: str + token: str + default_ttl: int = 1800 + ttl_renew_threshold: int = 600 + + @asynccontextmanager + async def mcp_lifespan(server: FastMCP) -> AsyncIterator[GlobalConfig]: + """Initialize global configuration.""" + endpoint = os.getenv("SHIPYARD_ENDPOINT", "http://localhost:8156") + token = os.getenv("SHIPYARD_TOKEN", "") + ttl = int(os.getenv("SHIPYARD_SANDBOX_TTL", "1800")) + + if not token: + raise ValueError("SHIPYARD_TOKEN environment variable is required") + + yield GlobalConfig(endpoint=endpoint, token=token, default_ttl=ttl) + + mcp = FastMCP("Shipyard", version="1.0.0", lifespan=mcp_lifespan) + + _sandbox_locks: dict[str, asyncio.Lock] = {} + + async def get_or_create_sandbox(ctx: Context) -> Sandbox: + """Get or create per-session Sandbox.""" + session_id = ctx.session_id + + if session_id not in _sandbox_locks: + _sandbox_locks[session_id] = asyncio.Lock() + + async with _sandbox_locks[session_id]: + sandbox = await ctx.get_state("sandbox") + last_renew = await ctx.get_state("last_ttl_renew") + config: GlobalConfig = ctx.request_context.lifespan_context + + if sandbox is None: + sandbox = Sandbox( + endpoint=config.endpoint, + token=config.token, + ttl=config.default_ttl, + session_id=session_id, + ) + try: + await sandbox.start() + except Exception as e: + raise RuntimeError(f"Failed to create sandbox: {e}") + + await ctx.set_state("sandbox", sandbox) + await ctx.set_state("last_ttl_renew", datetime.now()) + else: + now = datetime.now() + if last_renew is None or (now - last_renew).total_seconds() > config.ttl_renew_threshold: + try: + await sandbox.extend_ttl(config.default_ttl) + await ctx.set_state("last_ttl_renew", now) + except Exception: + sandbox = Sandbox( + endpoint=config.endpoint, + token=config.token, + ttl=config.default_ttl, + session_id=session_id, + ) + await sandbox.start() + await ctx.set_state("sandbox", sandbox) + await ctx.set_state("last_ttl_renew", now) + + return sandbox + + @mcp.tool() + async def execute_python(code: str, timeout: int = 30, include_code: bool = False, description: str = None, tags: str = None, ctx: Context = None) -> str: + """Execute Python code in an isolated sandbox. + + Args: + code: Python code to execute + timeout: Execution timeout in seconds (default: 30) + include_code: If True, include the executed code and execution_id in the response. + description: Human-readable description of what this code does (for skill library) + tags: Comma-separated tags for categorization (e.g., 'data-processing,pandas') + """ + sandbox = await get_or_create_sandbox(ctx) + result = await sandbox.python.exec(code, timeout=timeout, description=description, tags=tags) + return _format_exec_result(result, include_code=include_code) + + @mcp.tool() + async def execute_shell(command: str, cwd: str = None, timeout: int = 30, include_code: bool = False, description: str = None, tags: str = None, ctx: Context = None) -> str: + """Execute a shell command in an isolated sandbox. + + Args: + command: Shell command to execute + cwd: Working directory (relative to workspace, optional) + timeout: Execution timeout in seconds (default: 30) + include_code: If True, include the executed command and execution_id in the response. + description: Human-readable description of what this command does (for skill library) + tags: Comma-separated tags for categorization (e.g., 'file-ops,cleanup') + """ + sandbox = await get_or_create_sandbox(ctx) + result = await sandbox.shell.exec(command, cwd=cwd, timeout=timeout, description=description, tags=tags) + return _format_exec_result(result, include_code=include_code) + + @mcp.tool() + async def read_file(path: str, ctx: Context = None) -> str: + """Read file content from the sandbox.""" + sandbox = await get_or_create_sandbox(ctx) + return await sandbox.fs.read(path) + + @mcp.tool() + async def write_file(path: str, content: str, ctx: Context = None) -> str: + """Write content to a file in the sandbox.""" + sandbox = await get_or_create_sandbox(ctx) + await sandbox.fs.write(path, content) + return f"File written: {path}" + + @mcp.tool() + async def list_files(path: str = ".", ctx: Context = None) -> str: + """List files and directories in the sandbox.""" + sandbox = await get_or_create_sandbox(ctx) + entries = await sandbox.fs.list(path) + + if not entries: + return f"Directory '{path}' is empty" + + lines = [] + for entry in entries: + name = entry.get("name", "") + entry_type = entry.get("type", "file") + if entry_type == "directory": + lines.append(f" {name}/") + else: + lines.append(f" {name}") + + return f"Contents of '{path}':\n" + "\n".join(lines) + + @mcp.tool() + async def install_package(package: str, ctx: Context = None) -> str: + """Install a Python package in the sandbox using pip.""" + sandbox = await get_or_create_sandbox(ctx) + result = await sandbox.shell.exec(f"pip install {package}", timeout=120) + + if result.success: + return f"Successfully installed: {package}" + return f"Installation failed: {result.stderr}" + + @mcp.tool() + async def get_sandbox_info(ctx: Context = None) -> str: + """Get information about the current sandbox environment.""" + sandbox = await get_or_create_sandbox(ctx) + return f"Session ID: {sandbox.session_id}\nShip ID: {sandbox.ship_id}" + + @mcp.tool() + async def get_execution_history( + exec_type: str = None, success_only: bool = False, limit: int = 50, + tags: str = None, has_notes: bool = False, has_description: bool = False, + ctx: Context = None + ) -> str: + """Get execution history for this session. + + Args: + exec_type: Filter by 'python' or 'shell' (optional) + success_only: Only return successful executions + limit: Maximum entries to return (default: 50) + tags: Filter by tags (comma-separated, matches if any tag is present) + has_notes: Only return entries with notes + has_description: Only return entries with description + """ + sandbox = await get_or_create_sandbox(ctx) + history = await sandbox.get_execution_history( + exec_type=exec_type, success_only=success_only, limit=limit, + tags=tags, has_notes=has_notes, has_description=has_description + ) + + entries = history.get("entries", []) + if not entries: + return "No execution history found" + + lines = [f"Execution History ({history.get('total', 0)} total):"] + for entry in entries: + status = "✓" if entry.get("success") else "✗" + exec_t = entry.get("exec_type", "?") + time_ms = entry.get("execution_time_ms", 0) + code = entry.get("code", "")[:50] # Truncate long code + if len(entry.get("code", "")) > 50: + code += "..." + meta = [] + if entry.get("tags"): + meta.append(f"tags:{entry.get('tags')}") + if entry.get("notes"): + meta.append("has_notes") + meta_str = f" [{', '.join(meta)}]" if meta else "" + lines.append(f" {status} [{exec_t}] {time_ms}ms{meta_str}: {code}") + + return "\n".join(lines) + + @mcp.tool() + async def get_execution(execution_id: str, ctx: Context = None) -> str: + """Get a specific execution record by ID. + + Use this to retrieve the full details of a previous execution, + including the complete code, output, and timing information. + + Args: + execution_id: The execution ID (returned by execute_python/execute_shell + when include_code=True) + """ + sandbox = await get_or_create_sandbox(ctx) + try: + entry = await sandbox.get_execution(execution_id) + except RuntimeError as e: + return f"Error: {e}" + + lines = [f"Execution ID: {entry.get('id', execution_id)}"] + lines.append(f"Type: {entry.get('exec_type', 'unknown')}") + lines.append(f"Success: {entry.get('success', False)}") + + code = entry.get("code") or entry.get("command") + if code: + lines.append(f"\nCode:\n{code}") + + if entry.get("execution_time_ms"): + lines.append(f"\nExecution time: {entry.get('execution_time_ms')}ms") + + if entry.get("created_at"): + lines.append(f"Created at: {entry.get('created_at')}") + + return "\n".join(lines) + + @mcp.tool() + async def get_last_execution(exec_type: str = None, ctx: Context = None) -> str: + """Get the most recent execution for this session. + + Useful for retrieving the full record of what was just executed, + including the complete code for analysis or skill recording. + + Args: + exec_type: Filter by 'python' or 'shell' (optional) + """ + sandbox = await get_or_create_sandbox(ctx) + try: + entry = await sandbox.get_last_execution(exec_type=exec_type) + except RuntimeError as e: + return f"Error: {e}" + + lines = [f"Execution ID: {entry.get('id')}"] + lines.append(f"Type: {entry.get('exec_type', 'unknown')}") + lines.append(f"Success: {entry.get('success', False)}") + + code = entry.get("code") or entry.get("command") + if code: + lines.append(f"\nCode:\n{code}") + + if entry.get("execution_time_ms"): + lines.append(f"\nExecution time: {entry.get('execution_time_ms')}ms") + + if entry.get("created_at"): + lines.append(f"Created at: {entry.get('created_at')}") + + return "\n".join(lines) + + @mcp.tool() + async def annotate_execution(execution_id: str, description: str = None, tags: str = None, notes: str = None, ctx: Context = None) -> str: + """Annotate an execution record with metadata. + + Use this to add descriptions, tags, or notes to an execution after + it has been recorded. Useful for skill library construction. + + Args: + execution_id: The execution ID to annotate + description: Human-readable description of what this execution does + tags: Comma-separated tags for categorization + notes: Agent notes/annotations about this execution + """ + if description is None and tags is None and notes is None: + return "Error: At least one of description, tags, or notes must be provided" + + sandbox = await get_or_create_sandbox(ctx) + try: + entry = await sandbox.annotate_execution(execution_id, description, tags, notes) + except RuntimeError as e: + return f"Error: {e}" + + lines = [f"Execution ID: {entry.get('id', execution_id)} updated"] + if entry.get("description"): + lines.append(f"Description: {entry.get('description')}") + if entry.get("tags"): + lines.append(f"Tags: {entry.get('tags')}") + if entry.get("notes"): + lines.append(f"Notes: {entry.get('notes')}") + + return "\n".join(lines) + + @mcp.resource("sandbox://info") + async def sandbox_info_resource() -> str: + """Information about the Shipyard sandbox service.""" + return """Shipyard Sandbox Service + +Shipyard provides secure, isolated Python and shell execution environments +for AI agents and assistants. + +Available tools: +- execute_python: Run Python code (supports description, tags for skill library) +- execute_shell: Run shell commands (supports description, tags for skill library) +- read_file: Read file contents +- write_file: Write to files +- list_files: List directory contents +- install_package: Install Python packages via pip +- get_sandbox_info: Get current sandbox information +- get_execution_history: View past executions +- get_execution: Get specific execution by ID +- get_last_execution: Get most recent execution +- annotate_execution: Add notes/tags to an execution record +- get_last_execution: Get most recent execution + +Each session gets a dedicated container with: +- Full Python environment (3.13+) +- Node.js LTS +- Common CLI tools (git, curl, etc.) +- Isolated filesystem +- Network access + +Session state persists across tool calls within the same MCP session. +""" + + +# ============================================================================= +# Fallback stdio-only implementation (when FastMCP is not available) +# ============================================================================= + +class ShipyardMCPServer: + """MCP Server using JSON-RPC over stdio (fallback when FastMCP unavailable).""" + + PROTOCOL_VERSION = "2024-11-05" + + def __init__(self): + self.sandbox: Optional[Sandbox] = None + + async def start(self): + endpoint = os.getenv("SHIPYARD_ENDPOINT", "http://localhost:8156") + token = os.getenv("SHIPYARD_TOKEN", "") + ttl = int(os.getenv("SHIPYARD_SANDBOX_TTL", "1800")) + if not token: + raise ValueError("SHIPYARD_TOKEN environment variable is required") + self.sandbox = Sandbox(endpoint=endpoint, token=token, ttl=ttl) + await self.sandbox.start() + + async def stop(self): + if self.sandbox: + await self.sandbox.stop() + + def get_tools(self) -> list[dict]: + return [ + {"name": "execute_python", "description": "Execute Python code in sandbox", + "inputSchema": {"type": "object", "properties": { + "code": {"type": "string", "description": "Python code"}, + "timeout": {"type": "integer", "default": 30}, + "include_code": {"type": "boolean", "default": False, "description": "Include code and execution_id in response"}}, "required": ["code"]}}, + {"name": "execute_shell", "description": "Execute shell command in sandbox", + "inputSchema": {"type": "object", "properties": { + "command": {"type": "string", "description": "Shell command"}, + "cwd": {"type": "string"}, "timeout": {"type": "integer", "default": 30}, + "include_code": {"type": "boolean", "default": False, "description": "Include command and execution_id in response"}}, + "required": ["command"]}}, + {"name": "read_file", "description": "Read file from sandbox", + "inputSchema": {"type": "object", "properties": { + "path": {"type": "string"}}, "required": ["path"]}}, + {"name": "write_file", "description": "Write file to sandbox", + "inputSchema": {"type": "object", "properties": { + "path": {"type": "string"}, "content": {"type": "string"}}, + "required": ["path", "content"]}}, + {"name": "list_files", "description": "List files in sandbox directory", + "inputSchema": {"type": "object", "properties": {"path": {"type": "string", "default": "."}}}}, + {"name": "install_package", "description": "Install Python package via pip", + "inputSchema": {"type": "object", "properties": { + "package": {"type": "string"}}, "required": ["package"]}}, + {"name": "get_sandbox_info", "description": "Get sandbox information", + "inputSchema": {"type": "object", "properties": {}}}, + {"name": "get_execution_history", "description": "Get execution history", + "inputSchema": {"type": "object", "properties": { + "exec_type": {"type": "string"}, "success_only": {"type": "boolean"}, + "limit": {"type": "integer", "default": 50}, + "tags": {"type": "string", "description": "Filter by tags (comma-separated)"}, + "has_notes": {"type": "boolean", "description": "Only entries with notes"}, + "has_description": {"type": "boolean", "description": "Only entries with description"}}}}, + {"name": "get_execution", "description": "Get specific execution by ID", + "inputSchema": {"type": "object", "properties": { + "execution_id": {"type": "string", "description": "Execution ID"}}, "required": ["execution_id"]}}, + {"name": "get_last_execution", "description": "Get most recent execution", + "inputSchema": {"type": "object", "properties": { + "exec_type": {"type": "string", "description": "Filter by python or shell"}}}}, + {"name": "annotate_execution", "description": "Annotate an execution with metadata", + "inputSchema": {"type": "object", "properties": { + "execution_id": {"type": "string", "description": "Execution ID"}, + "description": {"type": "string", "description": "Description of execution"}, + "tags": {"type": "string", "description": "Comma-separated tags"}, + "notes": {"type": "string", "description": "Agent notes"}}, "required": ["execution_id"]}}, + ] + + async def call_tool(self, name: str, args: dict) -> dict: + try: + if name == "execute_python": + result = await self.sandbox.python.exec(args["code"], args.get("timeout", 30)) + text = _format_exec_result(result, include_code=args.get("include_code", False)) + elif name == "execute_shell": + result = await self.sandbox.shell.exec(args["command"], args.get("cwd"), args.get("timeout", 30)) + text = _format_exec_result(result, include_code=args.get("include_code", False)) + elif name == "read_file": + text = await self.sandbox.fs.read(args["path"]) + elif name == "write_file": + await self.sandbox.fs.write(args["path"], args["content"]) + text = f"File written: {args['path']}" + elif name == "list_files": + entries = await self.sandbox.fs.list(args.get("path", ".")) + if not entries: + text = "Directory is empty" + else: + lines = [f" {e['name']}/" if e.get("type") == "directory" else f" {e['name']}" for e in entries] + text = "\n".join(lines) + elif name == "install_package": + result = await self.sandbox.shell.exec(f"pip install {args['package']}", timeout=120) + text = f"Installed: {args['package']}" if result.success else f"Failed: {result.stderr}" + elif name == "get_sandbox_info": + text = f"Session ID: {self.sandbox.session_id}\nShip ID: {self.sandbox.ship_id}" + elif name == "get_execution_history": + history = await self.sandbox.get_execution_history( + args.get("exec_type"), args.get("success_only", False), args.get("limit", 50), + args.get("tags"), args.get("has_notes", False), args.get("has_description", False)) + entries = history.get("entries", []) + if not entries: + text = "No history" + else: + lines = [f"History ({history.get('total', 0)} total):"] + for e in entries: + s = "✓" if e.get("success") else "✗" + code = e.get("code", "")[:50] + if len(e.get("code", "")) > 50: + code += "..." + meta = [] + if e.get("tags"): + meta.append(f"tags:{e.get('tags')}") + if e.get("notes"): + meta.append("has_notes") + meta_str = f" [{', '.join(meta)}]" if meta else "" + lines.append(f" {s} [{e.get('exec_type', '?')}] {e.get('execution_time_ms', 0)}ms{meta_str}: {code}") + text = "\n".join(lines) + elif name == "get_execution": + try: + entry = await self.sandbox.get_execution(args["execution_id"]) + lines = [f"Execution ID: {entry.get('id', args['execution_id'])}"] + lines.append(f"Type: {entry.get('exec_type', 'unknown')}") + lines.append(f"Success: {entry.get('success', False)}") + code = entry.get("code") or entry.get("command") + if code: + lines.append(f"\nCode:\n{code}") + if entry.get("execution_time_ms"): + lines.append(f"\nExecution time: {entry.get('execution_time_ms')}ms") + text = "\n".join(lines) + except RuntimeError as e: + text = f"Error: {e}" + elif name == "get_last_execution": + try: + entry = await self.sandbox.get_last_execution(args.get("exec_type")) + lines = [f"Execution ID: {entry.get('id')}"] + lines.append(f"Type: {entry.get('exec_type', 'unknown')}") + lines.append(f"Success: {entry.get('success', False)}") + code = entry.get("code") or entry.get("command") + if code: + lines.append(f"\nCode:\n{code}") + if entry.get("execution_time_ms"): + lines.append(f"\nExecution time: {entry.get('execution_time_ms')}ms") + text = "\n".join(lines) + except RuntimeError as e: + text = f"Error: {e}" + elif name == "annotate_execution": + if not args.get("description") and not args.get("tags") and not args.get("notes"): + text = "Error: At least one of description, tags, or notes must be provided" + else: + try: + entry = await self.sandbox.annotate_execution( + args["execution_id"], args.get("description"), args.get("tags"), args.get("notes")) + lines = [f"Execution ID: {entry.get('id', args['execution_id'])} updated"] + if entry.get("description"): + lines.append(f"Description: {entry.get('description')}") + if entry.get("tags"): + lines.append(f"Tags: {entry.get('tags')}") + if entry.get("notes"): + lines.append(f"Notes: {entry.get('notes')}") + text = "\n".join(lines) + except RuntimeError as e: + text = f"Error: {e}" + else: + return {"content": [{"type": "text", "text": f"Unknown tool: {name}"}], "isError": True} + return {"content": [{"type": "text", "text": text}], "isError": False} + except Exception as e: + return {"content": [{"type": "text", "text": str(e)}], "isError": True} + + async def handle_request(self, request: dict) -> Optional[dict]: + method = request.get("method") + params = request.get("params", {}) + req_id = request.get("id") + + try: + if method == "initialize": + result = {"protocolVersion": self.PROTOCOL_VERSION, + "capabilities": {"tools": {}}, + "serverInfo": {"name": "shipyard", "version": "1.0.0"}} + elif method == "tools/list": + result = {"tools": self.get_tools()} + elif method == "tools/call": + result = await self.call_tool(params.get("name", ""), params.get("arguments", {})) + elif method == "notifications/initialized": + return None + else: + return {"jsonrpc": "2.0", "id": req_id, + "error": {"code": -32601, "message": f"Method not found: {method}"}} + return {"jsonrpc": "2.0", "id": req_id, "result": result} + except Exception as e: + return {"jsonrpc": "2.0", "id": req_id, "error": {"code": -32000, "message": str(e)}} + + async def run_stdio(self): + reader = asyncio.StreamReader() + protocol = asyncio.StreamReaderProtocol(reader) + await asyncio.get_event_loop().connect_read_pipe(lambda: protocol, sys.stdin) + + await self.start() + try: + while True: + line = await reader.readline() + if not line: + break + line_str = line.decode("utf-8").strip() + if not line_str: + continue + try: + request = json.loads(line_str) + response = await self.handle_request(request) + if response is not None: + sys.stdout.write(json.dumps(response) + "\n") + sys.stdout.flush() + except json.JSONDecodeError: + pass + finally: + await self.stop() + + +# ============================================================================= +# Entry Point +# ============================================================================= + +async def main_async(transport: str, host: str, port: int): + if transport == "stdio": + if FASTMCP_AVAILABLE: + mcp.run(transport="stdio") + else: + server = ShipyardMCPServer() + await server.run_stdio() + else: + if FASTMCP_AVAILABLE: + mcp.run(transport="streamable-http", host=host, port=port) + else: + print("HTTP transport requires mcp package. Use: pip install mcp", file=sys.stderr) + sys.exit(1) + + +def main(): + parser = argparse.ArgumentParser(description="Shipyard MCP Server") + parser.add_argument("--transport", choices=["stdio", "http"], default="stdio") + parser.add_argument("--port", type=int, default=8000) + parser.add_argument("--host", default="0.0.0.0") + args = parser.parse_args() + + if args.transport == "stdio" and FASTMCP_AVAILABLE: + mcp.run(transport="stdio") + elif args.transport == "http" and FASTMCP_AVAILABLE: + mcp.run(transport="streamable-http", host=args.host, port=args.port) + else: + asyncio.run(main_async(args.transport, args.host, args.port)) + + +if __name__ == "__main__": + main() diff --git a/pkgs/ship/app/components/ipython.py b/pkgs/ship/app/components/ipython.py index 94c6e94..7691810 100644 --- a/pkgs/ship/app/components/ipython.py +++ b/pkgs/ship/app/components/ipython.py @@ -1,4 +1,5 @@ import asyncio +import time from typing import Dict, Any, Optional from fastapi import APIRouter, HTTPException, Header from pydantic import BaseModel @@ -24,6 +25,8 @@ class ExecuteCodeResponse(BaseModel): output: dict = {} error: Optional[str] = None kernel_id: str + code: Optional[str] = None # Original code that was executed + execution_time_ms: Optional[int] = None # Execution time in milliseconds class KernelInfo(BaseModel): @@ -116,6 +119,7 @@ async def execute_code_in_kernel( await ensure_kernel_running(km) kc = km.client() + start_time = time.monotonic() try: # 执行代码 @@ -164,21 +168,27 @@ async def execute_code_in_kernel( break outputs["text"] = "".join(plains).strip() + execution_time_ms = int((time.monotonic() - start_time) * 1000) return { "success": error is None, "execution_count": execution_count, "output": outputs, "error": error, + "code": code, + "execution_time_ms": execution_time_ms, } except Exception as e: + execution_time_ms = int((time.monotonic() - start_time) * 1000) print(f"Error during code execution: {e}") return { "success": False, "execution_count": None, "output": {}, "error": f"Execution error: {str(e)}", + "code": code, + "execution_time_ms": execution_time_ms, } @@ -204,6 +214,8 @@ async def execute_code( output=result["output"], error=result["error"], kernel_id=session_id, + code=result.get("code"), + execution_time_ms=result.get("execution_time_ms"), ) except Exception as e: diff --git a/pkgs/ship/app/components/shell.py b/pkgs/ship/app/components/shell.py index 7f9b0aa..ecd89d3 100644 --- a/pkgs/ship/app/components/shell.py +++ b/pkgs/ship/app/components/shell.py @@ -23,6 +23,8 @@ class ExecuteShellResponse(BaseModel): pid: Optional[int] = None process_id: Optional[str] = None # 用于后台进程 error: Optional[str] = None + command: Optional[str] = None # Original command that was executed + execution_time_ms: Optional[int] = None # Execution time in milliseconds class ProcessInfo(BaseModel): diff --git a/pkgs/ship/app/components/user_manager.py b/pkgs/ship/app/components/user_manager.py index 4fc6b3b..4932365 100644 --- a/pkgs/ship/app/components/user_manager.py +++ b/pkgs/ship/app/components/user_manager.py @@ -14,6 +14,7 @@ import shlex import json import uuid +import time from dataclasses import dataclass from pathlib import Path from typing import Dict, Optional, List, Tuple @@ -50,6 +51,8 @@ class ProcessResult: pid: Optional[int] = None process_id: Optional[str] = None error: Optional[str] = None + command: Optional[str] = None # Original command that was executed + execution_time_ms: Optional[int] = None # Execution time in milliseconds class BackgroundProcessEntry: @@ -561,6 +564,7 @@ async def run_as_user( background: bool = False, ) -> ProcessResult: """以指定用户身份运行命令""" + start_time = time.monotonic() try: username = await get_or_create_session_user(session_id) user_info = await UserManager.get_user_info(username) @@ -670,6 +674,7 @@ async def run_as_user( process_id, command, ) + execution_time_ms = int((time.monotonic() - start_time) * 1000) return ProcessResult( success=True, return_code=0, @@ -677,12 +682,15 @@ async def run_as_user( stderr="", pid=process.pid, process_id=process_id, + command=command, + execution_time_ms=execution_time_ms, ) else: try: stdout, stderr = await asyncio.wait_for( process.communicate(), timeout=timeout ) + execution_time_ms = int((time.monotonic() - start_time) * 1000) return ProcessResult( success=process.returncode == 0, return_code=process.returncode, @@ -690,10 +698,13 @@ async def run_as_user( stderr=stderr.decode().strip(), pid=process.pid, process_id=None, + command=command, + execution_time_ms=execution_time_ms, ) except asyncio.TimeoutError: process.kill() await process.communicate() + execution_time_ms = int((time.monotonic() - start_time) * 1000) return ProcessResult( success=False, return_code=-1, @@ -702,6 +713,8 @@ async def run_as_user( pid=process.pid, process_id=None, error="Command timed out", + command=command, + execution_time_ms=execution_time_ms, ) except Exception as e: @@ -712,6 +725,7 @@ async def run_as_user( cwd, list(env.keys()) if env else [], ) + execution_time_ms = int((time.monotonic() - start_time) * 1000) return ProcessResult( success=False, return_code=-1, @@ -720,4 +734,6 @@ async def run_as_user( error=str(e), pid=None, process_id=None, + command=command, + execution_time_ms=execution_time_ms, ) diff --git a/plans/agent_integration_guide.md b/plans/agent_integration_guide.md new file mode 100644 index 0000000..1d0198c --- /dev/null +++ b/plans/agent_integration_guide.md @@ -0,0 +1,142 @@ +# Agent 集成指南:如何调用 Shipyard + +本文档将指导你如何将 AI Agent 接入 Shipyard,利用其提供的安全沙箱能力。 + +## 1. 核心概念 + +在开始之前,理解以下三个概念至关重要: + +* **Bay**: Shipyard 的控制中心。你的 Agent 主要与 Bay 交互,向它申请资源。 +* **Ship**: 实际的执行沙箱。你的代码和命令在这里运行。 +* **Session ID**: 会话标识符。这是 Shipyard 最核心的设计。 + * 同一个 Session ID 对应同一个工作目录(`/workspace/{session_id}`)。 + * 同一个 Session ID 可以跨请求复用 Python 解释器状态。 + * **最佳实践**: 为每个 Agent 任务或用户会话生成一个唯一的 UUID 作为 Session ID。 + +## 2. 调用流程 + +标准的调用生命周期如下: + +```mermaid +sequenceDiagram + participant Agent + participant Bay + participant Ship + + Note over Agent: 1. 准备阶段 + Agent->>Agent: 生成 Session ID (UUID) + + Note over Agent: 2. 申请资源 + Agent->>Bay: POST /ship (带 Session ID) + Bay-->>Agent: 返回 Ship ID 和连接信息 + + Note over Agent: 3. 执行任务 (循环) + loop 任务执行中 + Agent->>Ship: POST /fs/write_file (上传代码/数据) + Ship-->>Agent: 确认写入 + + Agent->>Ship: POST /ipython/exec (执行 Python) + Ship-->>Agent: 返回执行结果 (stdout/stderr) + + Agent->>Ship: POST /shell/exec (执行 Shell) + Ship-->>Agent: 返回命令输出 + end + + Note over Agent: 4. 资源释放 + Agent->>Bay: DELETE /ship/{id} (可选,TTL 会自动回收) + Bay-->>Agent: 确认删除 +``` + +## 3. 接入方式 + +### 方式一:使用 Python SDK(推荐) + +如果你使用 Python 开发 Agent,SDK 是最便捷的方式。 + +**安装** +```bash +pip install shipyard-python-sdk +``` + +**代码示例** + +```python +import asyncio +import uuid +from shipyard_python_sdk import ShipyardClient, Spec + +async def run_agent_task(): + # 1. 初始化客户端 + client = ShipyardClient( + endpoint_url="http://localhost:8156", # Bay 服务地址 + access_token="secret-token" # 鉴权 Token + ) + + # 2. 生成 Session ID + session_id = str(uuid.uuid4()) + print(f"Task Session ID: {session_id}") + + try: + # 3. 创建或获取 Ship (沙箱) + # ttl: 存活时间(秒), spec: 资源规格 + ship = await client.create_ship( + ttl=3600, + spec=Spec(cpus=1.0, memory="512m") + ) + + # 4. 文件操作:写入数据 + await ship.fs.write_file( + "data.csv", + "name,age\nAlice,30\nBob,25" + ) + + # 5. Python 执行:数据分析 + # 注意:代码在 IPython 内核中运行,状态会保留 + code = """ +import pandas as pd +df = pd.read_csv('data.csv') +print(df.describe()) +mean_age = df['age'].mean() + """ + result = await ship.python.exec(code) + print("Python Output:", result.text) + + # 6. Shell 执行:查看文件 + shell_res = await ship.shell.exec("ls -l") + print("Shell Output:", shell_res.stdout) + + finally: + # 7. 清理资源 (可选,也可以留给 TTL 自动清理以供后续复用) + await client.close() + +if __name__ == "__main__": + asyncio.run(run_agent_task()) +``` + +### 方式二:直接调用 HTTP API + +适用于非 Python 环境(如 Node.js, Go, Java 等)。 + +**1. 创建 Ship** +* **Endpoint**: `POST http://bay-url/ship` +* **Headers**: + * `Authorization: Bearer ` + * `X-SESSION-ID: ` +* **Body**: `{"ttl": 3600}` + +**2. 执行 Python 代码** +* **Endpoint**: `POST http://ship-url/ipython/exec` (注意:这里直接请求 Ship 的地址,或者通过 Bay 代理) +* **Headers**: `X-SESSION-ID: ` +* **Body**: `{"code": "print('hello')"}` + +**3. 执行 Shell 命令** +* **Endpoint**: `POST http://ship-url/shell/exec` +* **Headers**: `X-SESSION-ID: ` +* **Body**: `{"command": "ls -la"}` + +## 4. 最佳实践 + +1. **Session ID 管理**: 始终为每个独立的任务上下文使用唯一的 Session ID。如果 Agent 需要“记忆”之前的变量,请复用同一个 Session ID。 +2. **错误处理**: 总是检查 API 返回的状态码。Shipyard 会在执行出错时返回详细的错误信息。 +3. **资源清理**: 虽然 Shipyard 有 TTL(超时自动销毁)机制,但显式调用 DELETE 接口可以更高效地释放资源。 +4. **安全**: 不要在沙箱中存储敏感密钥。如果需要访问外部 API,建议通过环境变量或临时文件传入 Token,并在使用后立即删除。 diff --git a/plans/project_introduction.md b/plans/project_introduction.md new file mode 100644 index 0000000..652b74e --- /dev/null +++ b/plans/project_introduction.md @@ -0,0 +1,55 @@ +# Shipyard 项目介绍 + +## 1. 项目定位:什么是 Shipyard? + +**Shipyard** 是一个专为 AI Agent 设计的轻量级沙箱(Sandbox)环境。 + +在 AI Agent 的应用场景中,经常需要执行代码(如 Python)、运行 Shell 命令或操作文件系统。直接在宿主机或未隔离的环境中运行这些操作极具风险。Shipyard 提供了一套安全、隔离且可复用的执行环境,让 Agent 能够放心地“大展拳脚”。 + +它由三个核心部分组成: +* **Bay (控制面)**: 负责管理沙箱的生命周期、调度资源和路由请求。 +* **Ship (执行面)**: 实际的容器化沙箱,提供 Python、Shell 和文件系统 API。 +* **Python SDK**: 方便上层应用(如 Agent 框架)快速集成。 + +## 2. 开发背景:为什么需要 Shipyard? + +随着 LLM(大语言模型)和 AI Agent 的兴起,Agent 需要具备“行动”的能力,而不仅仅是“说话”。 + +* **安全隔离需求**: Agent 生成的代码可能包含有害操作,必须在隔离环境中运行。 +* **状态保持需求**: 许多任务是多轮交互的(例如数据分析),需要保持 Python 解释器的状态(变量、函数定义等),而不是每次请求都重启。 +* **性能与开销**: 传统的虚拟机或为每个请求启动新容器的方式开销大、启动慢。Shipyard 专注于**轻量级**和**会话复用**,通过 Session ID 机制在同一个容器内隔离不同会话的工作目录,既保证了隔离性,又极大降低了资源消耗和启动延迟。 +* **灵活性**: 支持多种底层运行时(Docker, Podman, Kubernetes),适应从本地开发到生产集群的不同部署需求。 + +## 3. 核心能力:能做什么? + +Shipyard 为 AI Agent 提供了以下核心能力: + +### 3.1 有状态的 Python 代码执行 +* 基于 IPython 内核,支持变量驻留和上下文保持。 +* 适合数据分析、科学计算、代码生成与验证等场景。 + +### 3.2 Shell 命令执行 +* 执行标准 Linux Shell 命令。 +* 支持后台进程管理,适合运行长时间任务或系统工具。 + +### 3.3 文件系统操作 +* 提供完整的文件读写、上传、删除、列表等能力。 +* **工作目录隔离**: 每个 Session 拥有独立的工作空间(`/workspace/{session_id}`),互不干扰。 + +### 3.4 数据持久化 +* 支持跨容器重启的数据持久化。 +* 在 Docker/Podman 下通过挂载宿主机目录,在 Kubernetes 下通过 PVC 实现。 + +## 4. 还有什么可以做的?(未来展望) + +当前项目处于 **Technical Preview** 阶段,核心功能已就绪,但仍有广阔的扩展空间: + +* **增强安全性**: 进一步加固容器隔离,限制资源使用(CPU/内存配额),网络访问控制等。 +* **更多运行时支持**: 计划支持 `containerd` 等更多底层运行时。 +* **可观测性**: 增强日志、监控和审计功能,方便追踪 Agent 的行为。 +* **SDK 丰富化**: 提供更多语言的 SDK,或更高级的 Agent 框架集成(如 LangChain, AutoGen 集成)。 +* **镜像生态**: 构建预装常用数据科学或开发工具的 Ship 镜像,减少运行时安装依赖的时间。 + +## 总结 + +Shipyard 旨在成为 AI Agent 的“安全游乐场”,通过轻量、高效、隔离的设计,解决 Agent 执行代码和操作系统的安全与状态管理难题。 diff --git a/shipyard_python_sdk/shipyard/__init__.py b/shipyard_python_sdk/shipyard/__init__.py index 74483b5..71453c0 100644 --- a/shipyard_python_sdk/shipyard/__init__.py +++ b/shipyard_python_sdk/shipyard/__init__.py @@ -3,6 +3,13 @@ A Python SDK for interacting with Shipyard containerized execution environments. Provides convenient access to file system, shell, and Python execution capabilities. + +Quick Start: + from shipyard import Sandbox + + async with Sandbox() as sandbox: + result = await sandbox.python.exec("print('hello')") + print(result.stdout) """ from .types import Spec, ShipInfo @@ -12,10 +19,17 @@ from .shell import ShellComponent from .python import PythonComponent from .utils import create_session_ship +from .sandbox import Sandbox, ExecResult, run_python, run_shell __version__ = "1.0.0" __all__ = [ + # New unified interface + "Sandbox", + "ExecResult", + "run_python", + "run_shell", + # Legacy interface (still supported) "Spec", "ShipInfo", "ShipyardClient", diff --git a/shipyard_python_sdk/shipyard/client.py b/shipyard_python_sdk/shipyard/client.py index 51c8ee7..94e49eb 100644 --- a/shipyard_python_sdk/shipyard/client.py +++ b/shipyard_python_sdk/shipyard/client.py @@ -5,6 +5,7 @@ import os import aiohttp from typing import Optional, Dict, Any, Union +import warnings from .types import Spec from .session import SessionShip @@ -65,21 +66,31 @@ async def create_ship( self, ttl: int, spec: Optional[Spec] = None, - max_session_num: int = 1, + max_session_num: int | None = None, session_id: Optional[str] = None, + force_create: bool = False, ) -> SessionShip: """ - Create a new ship or reuse an existing one + Create a new ship or reuse an existing one for the session. + + With 1:1 Session-Ship binding, each session gets a dedicated ship. Args: ttl: Time to live in seconds spec: Ship specifications for resource allocation - max_session_num: Maximum number of sessions that can use this ship + max_session_num: Deprecated. Ignored (Shipyard enforces 1:1 binding). session_id: Session ID (if not provided, a random one will be generated) + force_create: If True, skip reuse logic and always create new container Returns: SessionShip: The created or reused ship session """ + if max_session_num is not None: + warnings.warn( + "`max_session_num` is deprecated and ignored (Shipyard enforces 1:1 Session-Ship binding).", + DeprecationWarning, + stacklevel=2, + ) if session_id is None: import uuid @@ -88,7 +99,10 @@ async def create_ship( session = await self._get_session() # Prepare request payload - payload: Dict[str, Any] = {"ttl": ttl, "max_session_num": max_session_num} + payload: Dict[str, Any] = {"ttl": ttl} + + if force_create: + payload["force_create"] = True if spec: spec_dict: Dict[str, Union[float, str]] = {} @@ -113,6 +127,52 @@ async def create_ship( f"Failed to create ship: {response.status} {error_text}" ) + async def get_or_create_session( + self, + session_id: str, + ttl: int = 3600, + spec: Optional[Spec] = None, + ) -> SessionShip: + """ + Get or create a session with a dedicated ship. + + This is the recommended Session-First API. If a session already exists + with a running ship, it will be returned. Otherwise, a new ship will + be created (or allocated from the warm pool). + + Args: + session_id: The session ID to get or create + ttl: Time to live in seconds (default: 1 hour) + spec: Ship specifications for resource allocation + + Returns: + SessionShip: The session's ship + """ + return await self.create_ship(ttl=ttl, spec=spec, session_id=session_id) + + def session( + self, + session_id: str, + ttl: int = 3600, + spec: Optional[Spec] = None, + ) -> "SessionContext": + """ + Context manager for working with a session. + + Usage: + async with client.session("my-session") as session: + result = await session.python.exec("print('hello')") + + Args: + session_id: The session ID + ttl: Time to live in seconds (default: 1 hour) + spec: Ship specifications for resource allocation + + Returns: + SessionContext: Context manager that yields a SessionShip + """ + return SessionContext(self, session_id, ttl, spec) + async def get_ship(self, ship_id: str) -> Optional[Dict[str, Any]]: """Get ship information by ID""" session = await self._get_session() @@ -235,3 +295,76 @@ async def download_file( raise Exception( f"Failed to download file: {response.status} {error_text}" ) + + async def get_execution_history( + self, + session_id: str, + exec_type: Optional[str] = None, + success_only: bool = False, + limit: int = 100, + offset: int = 0, + ) -> Dict[str, Any]: + """ + Get execution history for a session. + + This enables agents to retrieve their successful execution paths + for skill library construction (inspired by VOYAGER). + + Args: + session_id: The session ID + exec_type: Filter by type ('python' or 'shell') + success_only: If True, only return successful executions + limit: Maximum number of entries to return + offset: Number of entries to skip + + Returns: + Dict with 'entries' list and 'total' count + """ + session = await self._get_session() + + params: Dict[str, Any] = {"limit": limit, "offset": offset} + if exec_type: + params["exec_type"] = exec_type + if success_only: + params["success_only"] = "true" + + async with session.get( + f"{self.endpoint_url}/sessions/{session_id}/history", + params=params, + ) as response: + if response.status == 200: + return await response.json() + else: + error_text = await response.text() + raise Exception( + f"Failed to get execution history: {response.status} {error_text}" + ) + + +class SessionContext: + """Context manager for working with a session.""" + + def __init__( + self, + client: ShipyardClient, + session_id: str, + ttl: int, + spec: Optional[Spec], + ): + self._client = client + self._session_id = session_id + self._ttl = ttl + self._spec = spec + self._ship: Optional[SessionShip] = None + + async def __aenter__(self) -> SessionShip: + self._ship = await self._client.get_or_create_session( + session_id=self._session_id, + ttl=self._ttl, + spec=self._spec, + ) + return self._ship + + async def __aexit__(self, exc_type, exc_val, exc_tb): + # Session resources are managed by TTL, no cleanup needed + pass diff --git a/shipyard_python_sdk/shipyard/sandbox.py b/shipyard_python_sdk/shipyard/sandbox.py new file mode 100644 index 0000000..c067286 --- /dev/null +++ b/shipyard_python_sdk/shipyard/sandbox.py @@ -0,0 +1,453 @@ +""" +Shipyard Python SDK - Sandbox Interface + +This module provides a simple interface for code execution in Shipyard sandboxes. +It connects to a Bay service which manages container lifecycle, session state, +and execution history. + +Usage: + from shipyard import Sandbox + + async with Sandbox() as sandbox: + result = await sandbox.python.exec("print('hello')") + print(result.stdout) + +Environment Variables: + SHIPYARD_ENDPOINT: Bay API URL (default: http://localhost:8156) + SHIPYARD_TOKEN: Access token for authentication (required) +""" + +import os +import uuid +from dataclasses import dataclass +from typing import Any, Dict, Optional + +import aiohttp + + +@dataclass +class ExecResult: + """Result of code execution.""" + + success: bool + stdout: str = "" + stderr: str = "" + result: Any = None + exit_code: int = 0 + execution_time_ms: int = 0 + code: str = "" + execution_id: Optional[str] = None # ID for precise history lookup + + +class PythonExecutor: + """Python execution interface.""" + + def __init__(self, sandbox: "Sandbox"): + self._sandbox = sandbox + + async def exec( + self, + code: str, + timeout: int = 30, + description: Optional[str] = None, + tags: Optional[str] = None, + ) -> ExecResult: + """Execute Python code in the sandbox. + + Args: + code: Python code to execute + timeout: Execution timeout in seconds + description: Human-readable description of what this code does + tags: Comma-separated tags for categorization + """ + payload: Dict[str, Any] = {"code": code, "timeout": timeout} + if description: + payload["description"] = description + if tags: + payload["tags"] = tags + + result = await self._sandbox._exec("ipython/exec", payload) + data = result.get("data", result) + return ExecResult( + success=data.get("success", True), + stdout=data.get("stdout", ""), + stderr=data.get("stderr", ""), + result=data.get("result"), + execution_time_ms=data.get("execution_time_ms", 0), + code=data.get("code", code), + execution_id=result.get("execution_id"), + ) + + +class ShellExecutor: + """Shell execution interface.""" + + def __init__(self, sandbox: "Sandbox"): + self._sandbox = sandbox + + async def exec( + self, + command: str, + cwd: Optional[str] = None, + timeout: int = 30, + description: Optional[str] = None, + tags: Optional[str] = None, + ) -> ExecResult: + """Execute shell command in the sandbox. + + Args: + command: Shell command to execute + cwd: Working directory (optional) + timeout: Execution timeout in seconds + description: Human-readable description of what this command does + tags: Comma-separated tags for categorization + """ + payload: Dict[str, Any] = {"command": command, "timeout": timeout} + if cwd: + payload["cwd"] = cwd + if description: + payload["description"] = description + if tags: + payload["tags"] = tags + + result = await self._sandbox._exec("shell/exec", payload) + data = result.get("data", result) + return ExecResult( + success=data.get("exit_code", 0) == 0, + stdout=data.get("stdout", ""), + stderr=data.get("stderr", ""), + exit_code=data.get("exit_code", 0), + execution_time_ms=data.get("execution_time_ms", 0), + code=data.get("command", command), + execution_id=result.get("execution_id"), + ) + + +class FileSystem: + """File system interface.""" + + def __init__(self, sandbox: "Sandbox"): + self._sandbox = sandbox + + async def read(self, path: str) -> str: + """Read file content from the sandbox.""" + result = await self._sandbox._exec("fs/read_file", {"path": path}) + data = result.get("data", result) + return data.get("content", "") + + async def write(self, path: str, content: str) -> None: + """Write content to a file in the sandbox.""" + await self._sandbox._exec("fs/write_file", {"path": path, "content": content}) + + async def list(self, path: str = ".") -> list: + """List files in a directory.""" + result = await self._sandbox._exec("fs/list_dir", {"path": path}) + data = result.get("data", result) + return data.get("entries", []) + + +class Sandbox: + """ + Sandbox interface for code execution via Shipyard Bay. + + Requires a running Bay service for container management, + session state, and execution history. + + Usage: + async with Sandbox() as sandbox: + result = await sandbox.python.exec("print('hello')") + print(result.stdout) + + # With custom configuration + async with Sandbox( + endpoint="http://bay.example.com:8156", + token="your-token", + ttl=7200, + session_id="my-session" + ) as sandbox: + result = await sandbox.shell.exec("ls -la") + """ + + def __init__( + self, + endpoint: Optional[str] = None, + token: Optional[str] = None, + ttl: int = 3600, + session_id: Optional[str] = None, + ): + """ + Initialize sandbox connection to Bay. + + Args: + endpoint: Bay API URL (or SHIPYARD_ENDPOINT env var) + token: Access token (or SHIPYARD_TOKEN env var) + ttl: Session TTL in seconds (default: 1 hour) + session_id: Session ID for state persistence (auto-generated if not provided) + """ + self.endpoint = ( + endpoint or os.getenv("SHIPYARD_ENDPOINT", "http://localhost:8156") + ).rstrip("/") + self.token = token or os.getenv("SHIPYARD_TOKEN", "") + self.ttl = ttl + self.session_id = session_id or str(uuid.uuid4()) + + self._ship_id: Optional[str] = None + self._http: Optional[aiohttp.ClientSession] = None + + # Component interfaces (initialized on start) + self.python: PythonExecutor + self.shell: ShellExecutor + self.fs: FileSystem + + async def start(self) -> "Sandbox": + """Start the sandbox session.""" + if not self.token: + raise ValueError( + "SHIPYARD_TOKEN is required. Set it via environment variable or constructor." + ) + + headers = {"Authorization": f"Bearer {self.token}"} + self._http = aiohttp.ClientSession(headers=headers) + + # Create ship via Bay + payload = {"ttl": self.ttl} + req_headers = {"X-SESSION-ID": self.session_id} + + async with self._http.post( + f"{self.endpoint}/ship", json=payload, headers=req_headers + ) as resp: + if resp.status == 201: + data = await resp.json() + self._ship_id = data["id"] + else: + error = await resp.text() + await self._http.close() + raise RuntimeError(f"Failed to create sandbox: {error}") + + # Initialize component interfaces + self.python = PythonExecutor(self) + self.shell = ShellExecutor(self) + self.fs = FileSystem(self) + + return self + + async def stop(self) -> None: + """Stop the sandbox session (resources managed by TTL).""" + if self._http: + await self._http.close() + self._http = None + + async def _exec(self, op_type: str, payload: Dict[str, Any]) -> Dict[str, Any]: + """Execute operation on the sandbox.""" + if not self._http or not self._ship_id: + raise RuntimeError("Sandbox not started. Use 'async with Sandbox()' or call start().") + + headers = {"X-SESSION-ID": self.session_id} + async with self._http.post( + f"{self.endpoint}/ship/{self._ship_id}/exec", + json={"type": op_type, "payload": payload}, + headers=headers, + ) as resp: + if resp.status == 200: + return await resp.json() + else: + error = await resp.text() + raise RuntimeError(f"Execution failed: {error}") + + async def extend_ttl(self, ttl: int) -> None: + """Extend the sandbox TTL.""" + if not self._http or not self._ship_id: + raise RuntimeError("Sandbox not started.") + + async with self._http.post( + f"{self.endpoint}/ship/{self._ship_id}/extend-ttl", + json={"ttl": ttl}, + ) as resp: + if resp.status != 200: + error = await resp.text() + raise RuntimeError(f"Failed to extend TTL: {error}") + + async def get_execution_history( + self, + exec_type: Optional[str] = None, + success_only: bool = False, + limit: int = 100, + tags: Optional[str] = None, + has_notes: bool = False, + has_description: bool = False, + ) -> Dict[str, Any]: + """ + Get execution history for this session. + + Useful for building skill libraries (VOYAGER-style). + + Args: + exec_type: Filter by 'python' or 'shell' + success_only: Only return successful executions + limit: Maximum entries to return + tags: Filter by tags (comma-separated, matches if any tag is present) + has_notes: Only return entries with notes + has_description: Only return entries with description + + Returns: + Dict with 'entries' and 'total' + """ + if not self._http: + raise RuntimeError("Sandbox not started.") + + params: Dict[str, Any] = {"limit": limit} + if exec_type: + params["exec_type"] = exec_type + if success_only: + params["success_only"] = "true" + if tags: + params["tags"] = tags + if has_notes: + params["has_notes"] = "true" + if has_description: + params["has_description"] = "true" + + async with self._http.get( + f"{self.endpoint}/sessions/{self.session_id}/history", + params=params, + ) as resp: + if resp.status == 200: + return await resp.json() + else: + error = await resp.text() + raise RuntimeError(f"Failed to get history: {error}") + + async def get_execution(self, execution_id: str) -> Dict[str, Any]: + """ + Get a specific execution record by ID. + + Args: + execution_id: The execution history ID + + Returns: + Dict with execution details including code, success, output, etc. + """ + if not self._http: + raise RuntimeError("Sandbox not started.") + + async with self._http.get( + f"{self.endpoint}/sessions/{self.session_id}/history/{execution_id}", + ) as resp: + if resp.status == 200: + return await resp.json() + elif resp.status == 404: + raise RuntimeError(f"Execution {execution_id} not found") + else: + error = await resp.text() + raise RuntimeError(f"Failed to get execution: {error}") + + async def get_last_execution( + self, + exec_type: Optional[str] = None, + ) -> Dict[str, Any]: + """ + Get the most recent execution for this session. + + Args: + exec_type: Filter by 'python' or 'shell' (optional) + + Returns: + Dict with execution details including code, success, output, etc. + """ + if not self._http: + raise RuntimeError("Sandbox not started.") + + params: Dict[str, Any] = {} + if exec_type: + params["exec_type"] = exec_type + + async with self._http.get( + f"{self.endpoint}/sessions/{self.session_id}/history/last", + params=params, + ) as resp: + if resp.status == 200: + return await resp.json() + elif resp.status == 404: + raise RuntimeError("No execution history found") + else: + error = await resp.text() + raise RuntimeError(f"Failed to get last execution: {error}") + + async def annotate_execution( + self, + execution_id: str, + description: Optional[str] = None, + tags: Optional[str] = None, + notes: Optional[str] = None, + ) -> Dict[str, Any]: + """ + Annotate an execution record with metadata. + + Use this to add descriptions, tags, or notes to an execution after + it has been recorded. Useful for skill library construction. + + Args: + execution_id: The execution history ID + description: Human-readable description of what this execution does + tags: Comma-separated tags for categorization + notes: Agent notes/annotations about this execution + + Returns: + Dict with updated execution details + """ + if not self._http: + raise RuntimeError("Sandbox not started.") + + payload: Dict[str, Any] = {} + if description is not None: + payload["description"] = description + if tags is not None: + payload["tags"] = tags + if notes is not None: + payload["notes"] = notes + + async with self._http.patch( + f"{self.endpoint}/sessions/{self.session_id}/history/{execution_id}", + json=payload, + ) as resp: + if resp.status == 200: + return await resp.json() + elif resp.status == 404: + raise RuntimeError(f"Execution {execution_id} not found") + else: + error = await resp.text() + raise RuntimeError(f"Failed to annotate execution: {error}") + + @property + def ship_id(self) -> Optional[str]: + """Get the Ship container ID.""" + return self._ship_id + + async def __aenter__(self) -> "Sandbox": + return await self.start() + + async def __aexit__(self, exc_type, exc_val, exc_tb) -> None: + await self.stop() + + +# Convenience functions +async def run_python(code: str, **kwargs) -> ExecResult: + """ + Quick helper to run Python code. + + Usage: + result = await run_python("print('hello')") + """ + async with Sandbox(**kwargs) as sandbox: + return await sandbox.python.exec(code) + + +async def run_shell(command: str, **kwargs) -> ExecResult: + """ + Quick helper to run shell command. + + Usage: + result = await run_shell("ls -la") + """ + async with Sandbox(**kwargs) as sandbox: + return await sandbox.shell.exec(command) diff --git a/shipyard_python_sdk/shipyard/session.py b/shipyard_python_sdk/shipyard/session.py index 66fba5d..df0135f 100644 --- a/shipyard_python_sdk/shipyard/session.py +++ b/shipyard_python_sdk/shipyard/session.py @@ -71,3 +71,37 @@ async def download_file( await self._client.download_file( self.id, remote_file_path, self._session_id, local_file_path ) + + async def get_execution_history( + self, + exec_type: str | None = None, + success_only: bool = False, + limit: int = 100, + offset: int = 0, + ) -> Dict[str, Any]: + """Get execution history for this session. + + This enables agents to retrieve their successful execution paths + for skill library construction (inspired by VOYAGER). + + Args: + exec_type: Filter by type ('python' or 'shell') + success_only: If True, only return successful executions + limit: Maximum number of entries to return + offset: Number of entries to skip + + Returns: + Dict with 'entries' list and 'total' count + """ + return await self._client.get_execution_history( + session_id=self._session_id, + exec_type=exec_type, + success_only=success_only, + limit=limit, + offset=offset, + ) + + @property + def session_id(self) -> str: + """Get the session ID.""" + return self._session_id diff --git a/shipyard_python_sdk/shipyard/types.py b/shipyard_python_sdk/shipyard/types.py index 733ca16..8fb6c2e 100644 --- a/shipyard_python_sdk/shipyard/types.py +++ b/shipyard_python_sdk/shipyard/types.py @@ -52,11 +52,8 @@ def ttl(self) -> int: return self._data["ttl"] @property - def max_session_num(self) -> int: - """Maximum number of sessions""" - return self._data["max_session_num"] - - @property - def current_session_num(self) -> int: - """Current number of sessions""" - return self._data["current_session_num"] + def expires_at(self) -> Optional[datetime]: + """Expiration timestamp""" + if "expires_at" in self._data and self._data["expires_at"]: + return datetime.fromisoformat(self._data["expires_at"].replace("Z", "+00:00")) + return None diff --git a/shipyard_python_sdk/shipyard/utils.py b/shipyard_python_sdk/shipyard/utils.py index 72e06bd..d7e4922 100644 --- a/shipyard_python_sdk/shipyard/utils.py +++ b/shipyard_python_sdk/shipyard/utils.py @@ -9,26 +9,34 @@ async def create_session_ship( - ttl: int, + ttl: int = 3600, spec: Optional[Spec] = None, - max_session_num: int = 1, + max_session_num: int | None = None, endpoint_url: Optional[str] = None, access_token: Optional[str] = None, session_id: Optional[str] = None, + force_create: bool = False, ) -> SessionShip: """ Convenience function to create a SessionShip directly Args: - ttl: Time to live in seconds + ttl: Time to live in seconds (default: 1 hour) spec: Ship specifications for resource allocation - max_session_num: Maximum number of sessions that can use this ship + max_session_num: Deprecated. Ignored (Shipyard enforces 1:1 binding). endpoint_url: Bay API endpoint URL (can also be set via SHIPYARD_ENDPOINT env var) access_token: Access token for authentication (can also be set via SHIPYARD_TOKEN env var) session_id: Session ID (if not provided, a random one will be generated) + force_create: If True, skip reuse logic and always create new container Returns: SessionShip: The created ship session """ client = ShipyardClient(endpoint_url, access_token) - return await client.create_ship(ttl, spec, max_session_num, session_id) + return await client.create_ship( + ttl=ttl, + spec=spec, + max_session_num=max_session_num, + session_id=session_id, + force_create=force_create, + )