diff --git a/examples/sandbox_02_fs_ops.py b/examples/sandbox_02_fs_ops.py index 5e0747e..3556caf 100644 --- a/examples/sandbox_02_fs_ops.py +++ b/examples/sandbox_02_fs_ops.py @@ -1,4 +1,6 @@ import asyncio +from pathlib import Path +from tempfile import TemporaryDirectory from dotenv import load_dotenv @@ -26,9 +28,23 @@ async def main() -> None: ) data1 = await sandbox.read_file("hello.txt") + stream = await sandbox.iter_file("hello.txt", chunk_size=5) data2 = await sandbox.read_file("/vercel/sandbox/nested/dir/note.txt") result = await sandbox.run_command("./hello.sh") + assert stream is not None + streamed_data1 = b"".join([chunk async for chunk in stream]) + + with TemporaryDirectory() as tmp_dir: + downloaded_path = Path(tmp_dir) / "downloaded-hello.txt" + saved_path = await sandbox.download_file("hello.txt", downloaded_path) + assert saved_path == str(downloaded_path.resolve()) + downloaded_data1 = downloaded_path.read_bytes() + + assert data1 == streamed_data1 + assert data1 == downloaded_data1 print("hello.txt:", data1.decode()) + print("hello.txt (streamed):", streamed_data1.decode()) + print("hello.txt (downloaded):", downloaded_data1.decode()) print("note.txt:", data2.decode()) print("hello.sh:", (await result.stdout()).strip()) diff --git a/src/vercel/_internal/fs.py b/src/vercel/_internal/fs.py new file mode 100644 index 0000000..9e5c9ab --- /dev/null +++ b/src/vercel/_internal/fs.py @@ -0,0 +1,176 @@ +"""Internal filesystem client primitives with runtime-specific platforms. + +The shared client delegates to a platform with an async-shaped interface. The +sync platform wraps explicitly synchronous filesystem operations so shared +business logic can still run through ``iter_coroutine()`` without suspension. +The async platform delegates to ``anyio``'s async filesystem primitives. +""" + +from __future__ import annotations + +import os +from typing import BinaryIO, Generic, Protocol, TypeVar + +import anyio + +PathInput = str | os.PathLike[str] +SyncFileHandle = BinaryIO +AsyncFileHandle = anyio.AsyncFile[bytes] +FileHandle = SyncFileHandle | AsyncFileHandle +HandleT = TypeVar("HandleT", SyncFileHandle, AsyncFileHandle) + + +def _coerce_path(path: PathInput) -> str: + return os.fspath(path) + + +def _parent_dir(path: PathInput) -> str: + return os.path.dirname(_coerce_path(path)) or "." + + +class FilesystemPlatform(Protocol[HandleT]): + async def coerce_path(self, path: PathInput) -> str: ... + + async def create_parent_directories(self, path: PathInput) -> None: ... + + async def open_binary_writer(self, path: PathInput) -> HandleT: ... + + async def write(self, handle: HandleT, data: bytes) -> None: ... + + async def close(self, handle: HandleT) -> None: ... + + async def replace(self, src: PathInput, dst: PathInput) -> None: ... + + async def remove_if_exists(self, path: PathInput) -> None: ... + + async def exists(self, path: PathInput) -> bool: ... + + +class SyncFilesystemPlatform: + """Sync platform with async interface for use with ``iter_coroutine()``.""" + + async def coerce_path(self, path: PathInput) -> str: + return _coerce_path(path) + + async def create_parent_directories(self, path: PathInput) -> None: + os.makedirs(_parent_dir(path), exist_ok=True) + + async def open_binary_writer(self, path: PathInput) -> SyncFileHandle: + return open(_coerce_path(path), "wb") + + async def write(self, handle: SyncFileHandle, data: bytes) -> None: + handle.write(data) + + async def close(self, handle: SyncFileHandle) -> None: + handle.close() + + async def replace(self, src: PathInput, dst: PathInput) -> None: + os.replace(_coerce_path(src), _coerce_path(dst)) + + async def remove_if_exists(self, path: PathInput) -> None: + try: + os.remove(_coerce_path(path)) + except FileNotFoundError: + pass + + async def exists(self, path: PathInput) -> bool: + return os.path.exists(_coerce_path(path)) + + +class AsyncFilesystemPlatform: + async def coerce_path(self, path: PathInput) -> str: + return _coerce_path(path) + + async def create_parent_directories(self, path: PathInput) -> None: + await anyio.Path(_parent_dir(path)).mkdir(parents=True, exist_ok=True) + + async def open_binary_writer(self, path: PathInput) -> AsyncFileHandle: + return await anyio.open_file(_coerce_path(path), "wb") + + async def write(self, handle: AsyncFileHandle, data: bytes) -> None: + await handle.write(data) + + async def close(self, handle: AsyncFileHandle) -> None: + await handle.aclose() + + async def replace(self, src: PathInput, dst: PathInput) -> None: + await anyio.Path(_coerce_path(src)).replace(_coerce_path(dst)) + + async def remove_if_exists(self, path: PathInput) -> None: + await anyio.Path(_coerce_path(path)).unlink(missing_ok=True) + + async def exists(self, path: PathInput) -> bool: + return await anyio.Path(_coerce_path(path)).exists() + + +class FilesystemClient(Generic[HandleT]): + """Shared filesystem client with a transport-backed async API.""" + + def __init__(self, *, platform: FilesystemPlatform[HandleT]) -> None: + self._platform: FilesystemPlatform[HandleT] = platform + + async def coerce_path(self, path: PathInput) -> str: + return await self._platform.coerce_path(path) + + async def create_parent_directories(self, path: PathInput) -> None: + await self._platform.create_parent_directories(path) + + async def open_binary_writer(self, path: PathInput) -> HandleT: + return await self._platform.open_binary_writer(path) + + async def write(self, handle: HandleT, data: bytes) -> None: + await self._platform.write(handle, data) + + async def close(self, handle: HandleT) -> None: + await self._platform.close(handle) + + async def replace(self, src: PathInput, dst: PathInput) -> None: + await self._platform.replace(src, dst) + + async def remove_if_exists(self, path: PathInput) -> None: + await self._platform.remove_if_exists(path) + + async def exists(self, path: PathInput) -> bool: + return await self._platform.exists(path) + + +def create_filesystem_client() -> FilesystemClient[SyncFileHandle]: + """Create a sync filesystem client backed by blocking file operations.""" + + return FilesystemClient(platform=SyncFilesystemPlatform()) + + +def create_async_filesystem_client() -> FilesystemClient[AsyncFileHandle]: + """Create an async filesystem client backed by anyio filesystem primitives.""" + + return FilesystemClient(platform=AsyncFilesystemPlatform()) + + +class SyncFilesystemClient(FilesystemClient[SyncFileHandle]): + """Convenience wrapper matching the repo's sync/async client naming pattern.""" + + def __init__(self) -> None: + super().__init__(platform=SyncFilesystemPlatform()) + + +class AsyncFilesystemClient(FilesystemClient[AsyncFileHandle]): + """Convenience wrapper matching the repo's sync/async client naming pattern.""" + + def __init__(self) -> None: + super().__init__(platform=AsyncFilesystemPlatform()) + + +__all__ = [ + "AsyncFileHandle", + "AsyncFilesystemClient", + "AsyncFilesystemPlatform", + "FileHandle", + "FilesystemClient", + "FilesystemPlatform", + "PathInput", + "SyncFileHandle", + "SyncFilesystemClient", + "SyncFilesystemPlatform", + "create_async_filesystem_client", + "create_filesystem_client", +] diff --git a/src/vercel/_internal/sandbox/__init__.py b/src/vercel/_internal/sandbox/__init__.py index 9945823..2f1b24b 100644 --- a/src/vercel/_internal/sandbox/__init__.py +++ b/src/vercel/_internal/sandbox/__init__.py @@ -5,6 +5,7 @@ APIError, SandboxAuthError, SandboxError, + SandboxNotFoundError, SandboxPermissionError, SandboxRateLimitError, SandboxServerError, @@ -16,6 +17,7 @@ "SandboxError", "APIError", "SandboxAuthError", + "SandboxNotFoundError", "SandboxPermissionError", "SandboxRateLimitError", "SandboxServerError", diff --git a/src/vercel/_internal/sandbox/core.py b/src/vercel/_internal/sandbox/core.py index 83c8a9e..31118db 100644 --- a/src/vercel/_internal/sandbox/core.py +++ b/src/vercel/_internal/sandbox/core.py @@ -10,16 +10,24 @@ import io import json +import os import platform import posixpath import sys import tarfile -from collections.abc import AsyncGenerator, Generator +from collections.abc import AsyncGenerator, AsyncIterator, Generator +from contextlib import asynccontextmanager from importlib.metadata import version as _pkg_version from typing import Any, TypeAlias, cast import httpx +from vercel._internal.fs import ( + FileHandle, + FilesystemClient, + create_async_filesystem_client, + create_filesystem_client, +) from vercel._internal.http import ( BytesBody, JSONBody, @@ -31,6 +39,7 @@ from vercel._internal.sandbox.errors import ( APIError, SandboxAuthError, + SandboxNotFoundError, SandboxPermissionError, SandboxRateLimitError, SandboxServerError, @@ -179,6 +188,8 @@ def _build_sandbox_error( data: JSONValue | None = None, ) -> APIError: status_code = response.status_code + if status_code == 404: + return SandboxNotFoundError(response, message, data=data) if status_code == 401: return SandboxAuthError(response, message, data=data) if status_code == 403: @@ -250,6 +261,10 @@ class BaseSandboxOpsClient: """ _request_client: SandboxRequestClient + _filesystem_client: FilesystemClient[Any] + + def __init__(self, *, filesystem_client: FilesystemClient[Any]) -> None: + self._filesystem_client = filesystem_client async def create_sandbox( self, @@ -373,25 +388,97 @@ async def mk_dir(self, *, sandbox_id: str, path: str, cwd: str | None = None) -> body=JSONBody(body), ) - async def read_file( + async def read_file(self, *, sandbox_id: str, path: str, cwd: str | None = None) -> bytes: + body: dict[str, Any] = {"path": path} + if cwd is not None: + body["cwd"] = cwd + resp = await self._request_client.request( + "POST", + f"/v1/sandboxes/{sandbox_id}/fs/read", + body=JSONBody(body), + ) + return resp.content + + async def _open_file_stream( self, *, sandbox_id: str, path: str, cwd: str | None = None - ) -> bytes | None: + ) -> httpx.Response: body: dict[str, Any] = {"path": path} if cwd is not None: body["cwd"] = cwd + return await self._request_client.request( + "POST", + f"/v1/sandboxes/{sandbox_id}/fs/read", + body=JSONBody(body), + stream=True, + ) + + def _stream_file_chunks( + self, response: httpx.Response, *, chunk_size: int + ) -> AsyncIterator[bytes]: + raise NotImplementedError + + async def _close_file_response(self, response: httpx.Response) -> None: + raise NotImplementedError + + @asynccontextmanager + async def file_chunk_stream( + self, + *, + sandbox_id: str, + path: str, + cwd: str | None = None, + chunk_size: int = 65536, + ) -> AsyncIterator[AsyncIterator[bytes]]: + response = await self._open_file_stream(sandbox_id=sandbox_id, path=path, cwd=cwd) + try: - resp = await self._request_client.request( - "POST", - f"/v1/sandboxes/{sandbox_id}/fs/read", - body=JSONBody(body), - ) - except APIError as e: - if e.status_code == 404: - return None - raise - if resp.content is None: - return None - return resp.content + yield self._stream_file_chunks(response, chunk_size=chunk_size) + finally: + await self._close_file_response(response) + + async def download_file( + self, + *, + sandbox_id: str, + remote_path: str, + local_path: str | os.PathLike, + cwd: str | None = None, + create_parents: bool = False, + chunk_size: int = 65536, + ) -> str: + if not remote_path: + raise ValueError("remote_path is required") + if not local_path: + raise ValueError("local_path is required") + + destination = os.path.abspath(await self._filesystem_client.coerce_path(local_path)) + if create_parents: + await self._filesystem_client.create_parent_directories(destination) + temp_path = destination + ".part" + + async with self.file_chunk_stream( + sandbox_id=sandbox_id, + path=remote_path, + cwd=cwd, + chunk_size=chunk_size, + ) as stream: + handle: FileHandle | None = None + try: + handle = await self._filesystem_client.open_binary_writer(temp_path) + try: + async for chunk in stream: + if chunk: + await self._filesystem_client.write(handle, chunk) + finally: + if handle is not None: + await self._filesystem_client.close(handle) + handle = None + await self._filesystem_client.replace(temp_path, destination) + except Exception: + await self._filesystem_client.remove_if_exists(temp_path) + raise + + return destination async def write_files( self, @@ -484,7 +571,15 @@ async def _get_log_stream(self, *, sandbox_id: str, cmd_id: str) -> httpx.Respon class SyncSandboxOpsClient(BaseSandboxOpsClient): - def __init__(self, *, host: str = "https://api.vercel.com", team_id: str, token: str) -> None: + def __init__( + self, + *, + host: str = "https://api.vercel.com", + team_id: str, + token: str, + filesystem_client: FilesystemClient[Any] | None = None, + ) -> None: + super().__init__(filesystem_client=filesystem_client or create_filesystem_client()) rc = create_request_client( token=token, base_headers={"user-agent": USER_AGENT}, @@ -515,6 +610,38 @@ def get_logs(self, *, sandbox_id: str, cmd_id: str) -> Generator[LogLine, None, finally: resp.close() + def iter_file( + self, + *, + sandbox_id: str, + path: str, + cwd: str | None = None, + chunk_size: int = 65536, + ) -> Generator[bytes, None, None]: + resp = iter_coroutine(self._open_file_stream(sandbox_id=sandbox_id, path=path, cwd=cwd)) + + def _iterate() -> Generator[bytes, None, None]: + try: + for chunk in resp.iter_bytes(chunk_size=chunk_size): + if chunk: + yield chunk + finally: + resp.close() + + return _iterate() + + def _stream_file_chunks( + self, response: httpx.Response, *, chunk_size: int + ) -> AsyncIterator[bytes]: + async def _iterate() -> AsyncIterator[bytes]: + for chunk in response.iter_bytes(chunk_size=chunk_size): + yield chunk + + return _iterate() + + async def _close_file_response(self, response: httpx.Response) -> None: + response.close() + def close(self) -> None: self._rc.close() @@ -531,7 +658,15 @@ def __exit__(self, *args: object) -> None: class AsyncSandboxOpsClient(BaseSandboxOpsClient): - def __init__(self, *, host: str = "https://api.vercel.com", team_id: str, token: str) -> None: + def __init__( + self, + *, + host: str = "https://api.vercel.com", + team_id: str, + token: str, + filesystem_client: FilesystemClient[Any] | None = None, + ) -> None: + super().__init__(filesystem_client=filesystem_client or create_async_filesystem_client()) rc = create_async_request_client( token=token, base_headers={"user-agent": USER_AGENT}, @@ -562,6 +697,38 @@ async def get_logs(self, *, sandbox_id: str, cmd_id: str) -> AsyncGenerator[LogL finally: await resp.aclose() + async def iter_file( + self, + *, + sandbox_id: str, + path: str, + cwd: str | None = None, + chunk_size: int = 65536, + ) -> AsyncGenerator[bytes, None]: + resp = await self._open_file_stream(sandbox_id=sandbox_id, path=path, cwd=cwd) + + async def _iterate() -> AsyncGenerator[bytes, None]: + try: + async for chunk in resp.aiter_bytes(chunk_size=chunk_size): + if chunk: + yield chunk + finally: + await resp.aclose() + + return _iterate() + + def _stream_file_chunks( + self, response: httpx.Response, *, chunk_size: int + ) -> AsyncIterator[bytes]: + async def _iterate() -> AsyncIterator[bytes]: + async for chunk in response.aiter_bytes(chunk_size=chunk_size): + yield chunk + + return _iterate() + + async def _close_file_response(self, response: httpx.Response) -> None: + await response.aclose() + async def aclose(self) -> None: await self._rc.aclose() diff --git a/src/vercel/_internal/sandbox/errors.py b/src/vercel/_internal/sandbox/errors.py index 62bb96d..416d4e4 100644 --- a/src/vercel/_internal/sandbox/errors.py +++ b/src/vercel/_internal/sandbox/errors.py @@ -36,6 +36,10 @@ class SandboxPermissionError(APIError): """Authorization failures returned by the sandbox API.""" +class SandboxNotFoundError(APIError): + """Requested sandbox resource was not found.""" + + class SandboxRateLimitError(APIError): def __init__( self, @@ -58,6 +62,7 @@ class SandboxServerError(APIError): "APIError", "SandboxAuthError", "SandboxPermissionError", + "SandboxNotFoundError", "SandboxRateLimitError", "SandboxServerError", ] diff --git a/src/vercel/sandbox/__init__.py b/src/vercel/sandbox/__init__.py index fe6bf58..4073801 100644 --- a/src/vercel/sandbox/__init__.py +++ b/src/vercel/sandbox/__init__.py @@ -2,6 +2,7 @@ APIError, SandboxAuthError, SandboxError, + SandboxNotFoundError, SandboxPermissionError, SandboxRateLimitError, SandboxServerError, @@ -29,6 +30,7 @@ "SandboxError", "APIError", "SandboxAuthError", + "SandboxNotFoundError", "SandboxPermissionError", "SandboxRateLimitError", "SandboxServerError", diff --git a/src/vercel/sandbox/command.py b/src/vercel/sandbox/command.py index aaec233..3409b79 100644 --- a/src/vercel/sandbox/command.py +++ b/src/vercel/sandbox/command.py @@ -4,7 +4,11 @@ from dataclasses import dataclass from vercel._internal.iter_coroutine import iter_coroutine -from vercel._internal.sandbox import APIError, AsyncSandboxOpsClient, SyncSandboxOpsClient +from vercel._internal.sandbox import ( + AsyncSandboxOpsClient, + SandboxNotFoundError, + SyncSandboxOpsClient, +) from vercel._internal.sandbox.models import ( Command as CommandModel, CommandFinishedResponse, @@ -64,11 +68,9 @@ async def kill(self, signal: int = 15) -> None: await self.client.kill_command( sandbox_id=self.sandbox_id, command_id=self.cmd.id, signal=signal ) - except APIError as e: + except SandboxNotFoundError: # Command may already have exited; ignore 404s - if e.status_code == 404: - return - raise + return @dataclass @@ -135,10 +137,8 @@ def kill(self, signal: int = 15) -> None: sandbox_id=self.sandbox_id, command_id=self.cmd.id, signal=signal ) ) - except APIError as e: - if e.status_code == 404: - return - raise + except SandboxNotFoundError: + return @dataclass diff --git a/src/vercel/sandbox/sandbox.py b/src/vercel/sandbox/sandbox.py index 3285662..5f76683 100644 --- a/src/vercel/sandbox/sandbox.py +++ b/src/vercel/sandbox/sandbox.py @@ -2,12 +2,15 @@ import builtins import time +from collections.abc import AsyncIterator, Iterator from dataclasses import dataclass from datetime import datetime +from os import PathLike from typing import Any from vercel._internal.iter_coroutine import iter_coroutine from vercel._internal.sandbox.core import AsyncSandboxOpsClient, SyncSandboxOpsClient +from vercel._internal.sandbox.errors import SandboxNotFoundError from vercel._internal.sandbox.models import ( CommandResponse, Sandbox as SandboxModel, @@ -359,8 +362,39 @@ async def run_command_detached( async def mk_dir(self, path: str, *, cwd: str | None = None) -> None: await self.client.mk_dir(sandbox_id=self.sandbox.id, path=path, cwd=cwd) + async def iter_file( + self, path: str, *, cwd: str | None = None, chunk_size: int = 65536 + ) -> AsyncIterator[bytes]: + return await self.client.iter_file( + sandbox_id=self.sandbox.id, + path=path, + cwd=cwd, + chunk_size=chunk_size, + ) + async def read_file(self, path: str, *, cwd: str | None = None) -> bytes | None: - return await self.client.read_file(sandbox_id=self.sandbox.id, path=path, cwd=cwd) + try: + return await self.client.read_file(sandbox_id=self.sandbox.id, path=path, cwd=cwd) + except SandboxNotFoundError: + return None + + async def download_file( + self, + remote_path: str, + local_path: str | PathLike, + *, + cwd: str | None = None, + create_parents: bool = False, + chunk_size: int = 65536, + ) -> str: + return await self.client.download_file( + sandbox_id=self.sandbox.id, + remote_path=remote_path, + local_path=local_path, + cwd=cwd, + create_parents=create_parents, + chunk_size=chunk_size, + ) async def write_files(self, files: builtins.list[WriteFile]) -> None: await self.client.write_files( @@ -721,8 +755,43 @@ def run_command_detached( def mk_dir(self, path: str, *, cwd: str | None = None) -> None: iter_coroutine(self.client.mk_dir(sandbox_id=self.sandbox.id, path=path, cwd=cwd)) + def iter_file( + self, path: str, *, cwd: str | None = None, chunk_size: int = 65536 + ) -> Iterator[bytes]: + return self.client.iter_file( + sandbox_id=self.sandbox.id, + path=path, + cwd=cwd, + chunk_size=chunk_size, + ) + def read_file(self, path: str, *, cwd: str | None = None) -> bytes | None: - return iter_coroutine(self.client.read_file(sandbox_id=self.sandbox.id, path=path, cwd=cwd)) + try: + return iter_coroutine( + self.client.read_file(sandbox_id=self.sandbox.id, path=path, cwd=cwd) + ) + except SandboxNotFoundError: + return None + + def download_file( + self, + remote_path: str, + local_path: str | PathLike, + *, + cwd: str | None = None, + create_parents: bool = False, + chunk_size: int = 65536, + ) -> str: + return iter_coroutine( + self.client.download_file( + sandbox_id=self.sandbox.id, + remote_path=remote_path, + local_path=local_path, + cwd=cwd, + create_parents=create_parents, + chunk_size=chunk_size, + ) + ) def write_files(self, files: builtins.list[WriteFile]) -> None: iter_coroutine( diff --git a/tests/integration/test_sandbox_streaming_errors.py b/tests/integration/test_sandbox_streaming_errors.py index 4fdd649..1eb881d 100644 --- a/tests/integration/test_sandbox_streaming_errors.py +++ b/tests/integration/test_sandbox_streaming_errors.py @@ -8,6 +8,7 @@ APIError, SandboxAuthError, SandboxError, + SandboxNotFoundError, SandboxPermissionError, SandboxRateLimitError, SandboxServerError, @@ -45,6 +46,7 @@ def _make_error_response( SYNC_CASES = [ + (404, SandboxNotFoundError, "not_found", "Missing command.", None, None), (401, SandboxAuthError, "unauthorized", "Authentication required.", None, None), (403, SandboxPermissionError, "forbidden", "Access denied.", None, None), (429, SandboxRateLimitError, "rate_limited", "Slow down.", {"retry-after": "120"}, 120), diff --git a/tests/integration/test_sandbox_sync_async.py b/tests/integration/test_sandbox_sync_async.py index 494c24c..ff5c9b0 100644 --- a/tests/integration/test_sandbox_sync_async.py +++ b/tests/integration/test_sandbox_sync_async.py @@ -5,8 +5,11 @@ import json import tarfile +from collections.abc import AsyncIterator from datetime import datetime, timezone from io import BytesIO +from pathlib import Path +from unittest.mock import AsyncMock import httpx import pytest @@ -17,6 +20,8 @@ NetworkPolicyRule, NetworkPolicySubnets, NetworkTransformer, + SandboxNotFoundError, + SandboxServerError, ) # Base URL for Vercel Sandbox API @@ -1988,9 +1993,46 @@ def test_read_file_sync( sandbox.client.close() + @respx.mock + def test_iter_file_sync( + self, mock_env_clear, mock_sandbox_get_response, mock_sandbox_read_file_content + ): + """Test synchronous streamed file read.""" + from vercel.sandbox import Sandbox + + sandbox_id = "sbx_test123456" + + respx.get(f"{SANDBOX_API_BASE}/v1/sandboxes/{sandbox_id}").mock( + return_value=httpx.Response( + 200, + json={ + "sandbox": mock_sandbox_get_response, + "routes": [], + }, + ) + ) + + respx.post(f"{SANDBOX_API_BASE}/v1/sandboxes/{sandbox_id}/fs/read").mock( + return_value=httpx.Response(200, content=mock_sandbox_read_file_content) + ) + + sandbox = Sandbox.get( + sandbox_id=sandbox_id, + token="test_token", + team_id="team_test123", + project_id="prj_test123", + ) + + stream = sandbox.iter_file("/etc/hosts", chunk_size=4) + + assert stream is not None + assert b"".join(stream) == mock_sandbox_read_file_content + + sandbox.client.close() + @respx.mock def test_read_file_not_found(self, mock_env_clear, mock_sandbox_get_response): - """Test file read returns None for non-existent file.""" + """Test file read raises for non-existent file.""" from vercel.sandbox import Sandbox sandbox_id = "sbx_test123456" @@ -2018,9 +2060,299 @@ def test_read_file_not_found(self, mock_env_clear, mock_sandbox_get_response): project_id="prj_test123", ) - content = sandbox.read_file("/nonexistent/file") + assert sandbox.read_file("/nonexistent/file") is None + + sandbox.client.close() + + @respx.mock + def test_iter_file_not_found(self, mock_env_clear, mock_sandbox_get_response): + """Test streamed file read raises for non-existent file.""" + from vercel.sandbox import Sandbox + + sandbox_id = "sbx_test123456" + + respx.get(f"{SANDBOX_API_BASE}/v1/sandboxes/{sandbox_id}").mock( + return_value=httpx.Response( + 200, + json={ + "sandbox": mock_sandbox_get_response, + "routes": [], + }, + ) + ) + + respx.post(f"{SANDBOX_API_BASE}/v1/sandboxes/{sandbox_id}/fs/read").mock( + return_value=httpx.Response(404, json={"error": {"code": "not_found"}}) + ) + + sandbox = Sandbox.get( + sandbox_id=sandbox_id, + token="test_token", + team_id="team_test123", + project_id="prj_test123", + ) + + with pytest.raises(SandboxNotFoundError, match="HTTP 404"): + sandbox.iter_file("/nonexistent/file") + + sandbox.client.close() + + @respx.mock + def test_download_file_sync( + self, mock_env_clear, mock_sandbox_get_response, mock_sandbox_read_file_content, tmp_path + ): + """Test synchronous sandbox-to-local file download.""" + from vercel.sandbox import Sandbox + + sandbox_id = "sbx_test123456" + + respx.get(f"{SANDBOX_API_BASE}/v1/sandboxes/{sandbox_id}").mock( + return_value=httpx.Response( + 200, + json={ + "sandbox": mock_sandbox_get_response, + "routes": [], + }, + ) + ) + + respx.post(f"{SANDBOX_API_BASE}/v1/sandboxes/{sandbox_id}/fs/read").mock( + return_value=httpx.Response(200, content=mock_sandbox_read_file_content) + ) + + sandbox = Sandbox.get( + sandbox_id=sandbox_id, + token="test_token", + team_id="team_test123", + project_id="prj_test123", + ) + + destination = tmp_path / "downloaded.txt" + result = sandbox.download_file("/etc/hosts", destination) + + assert result == str(destination.resolve()) + assert destination.read_bytes() == mock_sandbox_read_file_content + + sandbox.client.close() + + @respx.mock + def test_download_file_sync_creates_parents( + self, mock_env_clear, mock_sandbox_get_response, mock_sandbox_read_file_content, tmp_path + ): + """Test sync download can create parent directories.""" + from vercel.sandbox import Sandbox + + sandbox_id = "sbx_test123456" + + respx.get(f"{SANDBOX_API_BASE}/v1/sandboxes/{sandbox_id}").mock( + return_value=httpx.Response( + 200, + json={ + "sandbox": mock_sandbox_get_response, + "routes": [], + }, + ) + ) + + respx.post(f"{SANDBOX_API_BASE}/v1/sandboxes/{sandbox_id}/fs/read").mock( + return_value=httpx.Response(200, content=mock_sandbox_read_file_content) + ) + + sandbox = Sandbox.get( + sandbox_id=sandbox_id, + token="test_token", + team_id="team_test123", + project_id="prj_test123", + ) + + destination = tmp_path / "nested" / "dir" / "downloaded.txt" + result = sandbox.download_file("/etc/hosts", destination, create_parents=True) + + assert result == str(destination.resolve()) + assert destination.read_bytes() == mock_sandbox_read_file_content + + sandbox.client.close() + + @respx.mock + def test_download_file_sync_uses_filesystem_client_for_local_path_setup( + self, mock_env_clear, mock_sandbox_get_response, mock_sandbox_read_file_content, tmp_path + ): + """Test sync download uses the injected filesystem client for local setup.""" + from vercel.sandbox import Sandbox + + sandbox_id = "sbx_test123456" + + respx.get(f"{SANDBOX_API_BASE}/v1/sandboxes/{sandbox_id}").mock( + return_value=httpx.Response( + 200, + json={ + "sandbox": mock_sandbox_get_response, + "routes": [], + }, + ) + ) + + respx.post(f"{SANDBOX_API_BASE}/v1/sandboxes/{sandbox_id}/fs/read").mock( + return_value=httpx.Response(200, content=mock_sandbox_read_file_content) + ) + + sandbox = Sandbox.get( + sandbox_id=sandbox_id, + token="test_token", + team_id="team_test123", + project_id="prj_test123", + ) + + requested_destination = tmp_path / "ignored.txt" + rewritten_destination = tmp_path / "rewritten" / "downloaded.txt" + sandbox.client._filesystem_client.coerce_path = AsyncMock( + return_value=str(rewritten_destination) + ) + + async def create_parent_directories(path: str) -> None: + Path(path).parent.mkdir(parents=True, exist_ok=True) + + sandbox.client._filesystem_client.create_parent_directories = AsyncMock( + side_effect=create_parent_directories + ) + + result = sandbox.download_file("/etc/hosts", requested_destination, create_parents=True) + + assert result == str(rewritten_destination.resolve()) + assert rewritten_destination.read_bytes() == mock_sandbox_read_file_content + assert not requested_destination.exists() + sandbox.client._filesystem_client.coerce_path.assert_awaited_once_with( + requested_destination + ) + sandbox.client._filesystem_client.create_parent_directories.assert_awaited_once_with( + str(rewritten_destination.resolve()) + ) + + sandbox.client.close() + + @respx.mock + def test_download_file_sync_stream_failure_removes_part_file( + self, mock_env_clear, mock_sandbox_get_response, tmp_path + ): + """Test sync download cleans up the temp file after a stream failure.""" + from vercel.sandbox import Sandbox + + sandbox_id = "sbx_test123456" + + respx.get(f"{SANDBOX_API_BASE}/v1/sandboxes/{sandbox_id}").mock( + return_value=httpx.Response( + 200, + json={ + "sandbox": mock_sandbox_get_response, + "routes": [], + }, + ) + ) + + respx.post(f"{SANDBOX_API_BASE}/v1/sandboxes/{sandbox_id}/fs/read").mock( + return_value=httpx.Response(200, content=b"ignored") + ) + + sandbox = Sandbox.get( + sandbox_id=sandbox_id, + token="test_token", + team_id="team_test123", + project_id="prj_test123", + ) + + async def fail_after_partial_chunk( + response: httpx.Response, *, chunk_size: int + ) -> AsyncIterator[bytes]: + del response, chunk_size + yield b"partial" + raise RuntimeError("stream failed") + + sandbox.client._stream_file_chunks = fail_after_partial_chunk # type: ignore[method-assign] + + destination = tmp_path / "downloaded.txt" + temp_path = destination.with_name(destination.name + ".part") + + with pytest.raises(RuntimeError, match="stream failed"): + sandbox.download_file("/etc/hosts", destination) + + assert not destination.exists() + assert not temp_path.exists() + + sandbox.client.close() + + @respx.mock + def test_download_file_sync_not_found( + self, mock_env_clear, mock_sandbox_get_response, tmp_path + ): + """Test sync download raises for non-existent file.""" + from vercel.sandbox import Sandbox + + sandbox_id = "sbx_test123456" + + respx.get(f"{SANDBOX_API_BASE}/v1/sandboxes/{sandbox_id}").mock( + return_value=httpx.Response( + 200, + json={ + "sandbox": mock_sandbox_get_response, + "routes": [], + }, + ) + ) + + respx.post(f"{SANDBOX_API_BASE}/v1/sandboxes/{sandbox_id}/fs/read").mock( + return_value=httpx.Response(404, json={"error": {"code": "not_found"}}) + ) + + sandbox = Sandbox.get( + sandbox_id=sandbox_id, + token="test_token", + team_id="team_test123", + project_id="prj_test123", + ) + + destination = tmp_path / "downloaded.txt" + with pytest.raises(SandboxNotFoundError, match="HTTP 404"): + sandbox.download_file("/nonexistent/file", destination) + assert not destination.exists() - assert content is None + sandbox.client.close() + + @respx.mock + def test_download_file_sync_server_error_propagates( + self, mock_env_clear, mock_sandbox_get_response, tmp_path + ): + """Test sync download preserves non-404 sandbox API errors.""" + from vercel.sandbox import Sandbox + + sandbox_id = "sbx_test123456" + + respx.get(f"{SANDBOX_API_BASE}/v1/sandboxes/{sandbox_id}").mock( + return_value=httpx.Response( + 200, + json={ + "sandbox": mock_sandbox_get_response, + "routes": [], + }, + ) + ) + + respx.post(f"{SANDBOX_API_BASE}/v1/sandboxes/{sandbox_id}/fs/read").mock( + return_value=httpx.Response(500, json={"message": "remote exploded"}) + ) + + sandbox = Sandbox.get( + sandbox_id=sandbox_id, + token="test_token", + team_id="team_test123", + project_id="prj_test123", + ) + + destination = tmp_path / "downloaded.txt" + + with pytest.raises(SandboxServerError, match="HTTP 500: remote exploded"): + sandbox.download_file("/etc/hosts", destination) + + assert not destination.exists() sandbox.client.close() @@ -2060,6 +2392,366 @@ async def test_read_file_async( content = await sandbox.read_file("/etc/hosts") assert content is not None + assert content == mock_sandbox_read_file_content + + await sandbox.client.aclose() + + @respx.mock + @pytest.mark.asyncio + async def test_iter_file_async( + self, mock_env_clear, mock_sandbox_get_response, mock_sandbox_read_file_content + ): + """Test asynchronous streamed file read.""" + from vercel.sandbox import AsyncSandbox + + sandbox_id = "sbx_test123456" + + respx.get(f"{SANDBOX_API_BASE}/v1/sandboxes/{sandbox_id}").mock( + return_value=httpx.Response( + 200, + json={ + "sandbox": mock_sandbox_get_response, + "routes": [], + }, + ) + ) + + respx.post(f"{SANDBOX_API_BASE}/v1/sandboxes/{sandbox_id}/fs/read").mock( + return_value=httpx.Response(200, content=mock_sandbox_read_file_content) + ) + + sandbox = await AsyncSandbox.get( + sandbox_id=sandbox_id, + token="test_token", + team_id="team_test123", + project_id="prj_test123", + ) + + stream = await sandbox.iter_file("/etc/hosts", chunk_size=4) + + assert stream is not None + chunks = [chunk async for chunk in stream] + assert b"".join(chunks) == mock_sandbox_read_file_content + + await sandbox.client.aclose() + + @respx.mock + @pytest.mark.asyncio + async def test_read_file_async_not_found(self, mock_env_clear, mock_sandbox_get_response): + """Test async file read raises for non-existent file.""" + from vercel.sandbox import AsyncSandbox + + sandbox_id = "sbx_test123456" + + respx.get(f"{SANDBOX_API_BASE}/v1/sandboxes/{sandbox_id}").mock( + return_value=httpx.Response( + 200, + json={ + "sandbox": mock_sandbox_get_response, + "routes": [], + }, + ) + ) + + respx.post(f"{SANDBOX_API_BASE}/v1/sandboxes/{sandbox_id}/fs/read").mock( + return_value=httpx.Response(404, json={"error": {"code": "not_found"}}) + ) + + sandbox = await AsyncSandbox.get( + sandbox_id=sandbox_id, + token="test_token", + team_id="team_test123", + project_id="prj_test123", + ) + + assert await sandbox.read_file("/nonexistent/file") is None + + await sandbox.client.aclose() + + @respx.mock + @pytest.mark.asyncio + async def test_iter_file_async_not_found(self, mock_env_clear, mock_sandbox_get_response): + """Test async streamed file read raises for non-existent file.""" + from vercel.sandbox import AsyncSandbox + + sandbox_id = "sbx_test123456" + + respx.get(f"{SANDBOX_API_BASE}/v1/sandboxes/{sandbox_id}").mock( + return_value=httpx.Response( + 200, + json={ + "sandbox": mock_sandbox_get_response, + "routes": [], + }, + ) + ) + + respx.post(f"{SANDBOX_API_BASE}/v1/sandboxes/{sandbox_id}/fs/read").mock( + return_value=httpx.Response(404, json={"error": {"code": "not_found"}}) + ) + + sandbox = await AsyncSandbox.get( + sandbox_id=sandbox_id, + token="test_token", + team_id="team_test123", + project_id="prj_test123", + ) + + with pytest.raises(SandboxNotFoundError, match="HTTP 404"): + await sandbox.iter_file("/nonexistent/file") + + await sandbox.client.aclose() + + @respx.mock + @pytest.mark.asyncio + async def test_download_file_async( + self, mock_env_clear, mock_sandbox_get_response, mock_sandbox_read_file_content, tmp_path + ): + """Test async sandbox-to-local file download.""" + from vercel.sandbox import AsyncSandbox + + sandbox_id = "sbx_test123456" + + respx.get(f"{SANDBOX_API_BASE}/v1/sandboxes/{sandbox_id}").mock( + return_value=httpx.Response( + 200, + json={ + "sandbox": mock_sandbox_get_response, + "routes": [], + }, + ) + ) + + respx.post(f"{SANDBOX_API_BASE}/v1/sandboxes/{sandbox_id}/fs/read").mock( + return_value=httpx.Response(200, content=mock_sandbox_read_file_content) + ) + + sandbox = await AsyncSandbox.get( + sandbox_id=sandbox_id, + token="test_token", + team_id="team_test123", + project_id="prj_test123", + ) + + destination = tmp_path / "downloaded.txt" + result = await sandbox.download_file("/etc/hosts", destination) + + assert result == str(destination.resolve()) + assert destination.read_bytes() == mock_sandbox_read_file_content + + await sandbox.client.aclose() + + @respx.mock + @pytest.mark.asyncio + async def test_download_file_async_creates_parents( + self, mock_env_clear, mock_sandbox_get_response, mock_sandbox_read_file_content, tmp_path + ): + """Test async download can create parent directories.""" + from vercel.sandbox import AsyncSandbox + + sandbox_id = "sbx_test123456" + + respx.get(f"{SANDBOX_API_BASE}/v1/sandboxes/{sandbox_id}").mock( + return_value=httpx.Response( + 200, + json={ + "sandbox": mock_sandbox_get_response, + "routes": [], + }, + ) + ) + + respx.post(f"{SANDBOX_API_BASE}/v1/sandboxes/{sandbox_id}/fs/read").mock( + return_value=httpx.Response(200, content=mock_sandbox_read_file_content) + ) + + sandbox = await AsyncSandbox.get( + sandbox_id=sandbox_id, + token="test_token", + team_id="team_test123", + project_id="prj_test123", + ) + + destination = tmp_path / "nested" / "dir" / "downloaded.txt" + result = await sandbox.download_file("/etc/hosts", destination, create_parents=True) + + assert result == str(destination.resolve()) + assert destination.parent.is_dir() + assert destination.read_bytes() == mock_sandbox_read_file_content + + await sandbox.client.aclose() + + @respx.mock + @pytest.mark.asyncio + async def test_download_file_async_uses_filesystem_client_for_local_path_setup( + self, mock_env_clear, mock_sandbox_get_response, mock_sandbox_read_file_content, tmp_path + ): + """Test async download uses the injected filesystem client for local setup.""" + from vercel.sandbox import AsyncSandbox + + sandbox_id = "sbx_test123456" + + respx.get(f"{SANDBOX_API_BASE}/v1/sandboxes/{sandbox_id}").mock( + return_value=httpx.Response( + 200, + json={ + "sandbox": mock_sandbox_get_response, + "routes": [], + }, + ) + ) + + respx.post(f"{SANDBOX_API_BASE}/v1/sandboxes/{sandbox_id}/fs/read").mock( + return_value=httpx.Response(200, content=mock_sandbox_read_file_content) + ) + + sandbox = await AsyncSandbox.get( + sandbox_id=sandbox_id, + token="test_token", + team_id="team_test123", + project_id="prj_test123", + ) + + requested_destination = tmp_path / "ignored.txt" + rewritten_destination = tmp_path / "rewritten" / "downloaded.txt" + sandbox.client._filesystem_client.coerce_path = AsyncMock( + return_value=str(rewritten_destination) + ) + + async def create_parent_directories(path: str) -> None: + Path(path).parent.mkdir(parents=True, exist_ok=True) + + sandbox.client._filesystem_client.create_parent_directories = AsyncMock( + side_effect=create_parent_directories + ) + + result = await sandbox.download_file( + "/etc/hosts", requested_destination, create_parents=True + ) + + assert result == str(rewritten_destination.resolve()) + assert rewritten_destination.read_bytes() == mock_sandbox_read_file_content + assert not requested_destination.exists() + sandbox.client._filesystem_client.coerce_path.assert_awaited_once_with( + requested_destination + ) + sandbox.client._filesystem_client.create_parent_directories.assert_awaited_once_with( + str(rewritten_destination.resolve()) + ) + + await sandbox.client.aclose() + + @respx.mock + @pytest.mark.asyncio + async def test_download_file_async_uses_filesystem_client_for_file_writes_and_cleanup( + self, mock_env_clear, mock_sandbox_get_response, tmp_path + ): + """Test async download routes writes through the filesystem client and cleans up.""" + from vercel.sandbox import AsyncSandbox + + sandbox_id = "sbx_test123456" + + respx.get(f"{SANDBOX_API_BASE}/v1/sandboxes/{sandbox_id}").mock( + return_value=httpx.Response( + 200, + json={ + "sandbox": mock_sandbox_get_response, + "routes": [], + }, + ) + ) + + respx.post(f"{SANDBOX_API_BASE}/v1/sandboxes/{sandbox_id}/fs/read").mock( + return_value=httpx.Response(200, content=b"ignored") + ) + + sandbox = await AsyncSandbox.get( + sandbox_id=sandbox_id, + token="test_token", + team_id="team_test123", + project_id="prj_test123", + ) + + async def stream_chunks( + response: httpx.Response, *, chunk_size: int + ) -> AsyncIterator[bytes]: + del response, chunk_size + yield b"first" + yield b"second" + + sandbox.client._stream_file_chunks = stream_chunks # type: ignore[method-assign] + + destination = tmp_path / "downloaded.txt" + resolved_destination = destination.resolve() + temp_path = Path(str(resolved_destination) + ".part") + + original_open = sandbox.client._filesystem_client.open_binary_writer + original_close = sandbox.client._filesystem_client.close + original_remove_if_exists = sandbox.client._filesystem_client.remove_if_exists + + sandbox.client._filesystem_client.open_binary_writer = AsyncMock(side_effect=original_open) + sandbox.client._filesystem_client.close = AsyncMock(side_effect=original_close) + sandbox.client._filesystem_client.remove_if_exists = AsyncMock( + side_effect=original_remove_if_exists + ) + + original_write = sandbox.client._filesystem_client.write + + async def fail_during_write(handle: object, data: bytes) -> None: + await original_write(handle, data) + raise RuntimeError("write failed") + + sandbox.client._filesystem_client.write = AsyncMock(side_effect=fail_during_write) + + with pytest.raises(RuntimeError, match="write failed"): + await sandbox.download_file("/etc/hosts", destination) + + assert not destination.exists() + assert not temp_path.exists() + sandbox.client._filesystem_client.open_binary_writer.assert_awaited_once_with( + str(temp_path) + ) + sandbox.client._filesystem_client.write.assert_awaited_once() + sandbox.client._filesystem_client.close.assert_awaited_once() + sandbox.client._filesystem_client.remove_if_exists.assert_awaited_once_with(str(temp_path)) + + await sandbox.client.aclose() + + @respx.mock + @pytest.mark.asyncio + async def test_download_file_async_not_found( + self, mock_env_clear, mock_sandbox_get_response, tmp_path + ): + """Test async download raises for non-existent file.""" + from vercel.sandbox import AsyncSandbox + + sandbox_id = "sbx_test123456" + + respx.get(f"{SANDBOX_API_BASE}/v1/sandboxes/{sandbox_id}").mock( + return_value=httpx.Response( + 200, + json={ + "sandbox": mock_sandbox_get_response, + "routes": [], + }, + ) + ) + + respx.post(f"{SANDBOX_API_BASE}/v1/sandboxes/{sandbox_id}/fs/read").mock( + return_value=httpx.Response(404, json={"error": {"code": "not_found"}}) + ) + + sandbox = await AsyncSandbox.get( + sandbox_id=sandbox_id, + token="test_token", + team_id="team_test123", + project_id="prj_test123", + ) + + destination = tmp_path / "downloaded.txt" + with pytest.raises(SandboxNotFoundError, match="HTTP 404"): + await sandbox.download_file("/nonexistent/file", destination) + assert not destination.exists() await sandbox.client.aclose() diff --git a/tests/live/test_sandbox_live.py b/tests/live/test_sandbox_live.py index 010c078..0eac442 100644 --- a/tests/live/test_sandbox_live.py +++ b/tests/live/test_sandbox_live.py @@ -5,6 +5,7 @@ Run with: pytest tests/live/test_sandbox_live.py -v """ +import os import time import pytest @@ -78,7 +79,7 @@ async def test_async_sandbox_lifecycle(self, vercel_token, vercel_team_id, clean # Context manager should have stopped the sandbox - def test_file_operations(self, vercel_token, vercel_team_id, cleanup_registry): + def test_file_operations(self, vercel_token, vercel_team_id, cleanup_registry, tmp_path): """Test sandbox file write and read operations.""" from vercel.sandbox import Sandbox from vercel.sandbox.models import WriteFile @@ -101,8 +102,13 @@ def test_file_operations(self, vercel_token, vercel_team_id, cleanup_registry): assert test_content in content.decode() # Read a non-existent file - missing = sandbox.read_file("/tmp/nonexistent.txt") - assert missing is None + assert sandbox.read_file("/tmp/nonexistent.txt") is None + + # Download the file locally without buffering API callers separately + local_path = tmp_path / "sandbox-download.txt" + downloaded = sandbox.download_file("/tmp/test.txt", local_path) + assert downloaded == os.fspath(local_path.resolve()) + assert local_path.read_text() == test_content finally: try: diff --git a/tests/unit/test_filesystem.py b/tests/unit/test_filesystem.py new file mode 100644 index 0000000..ae056ca --- /dev/null +++ b/tests/unit/test_filesystem.py @@ -0,0 +1,105 @@ +"""Behavioral tests for the internal filesystem client layer.""" + +from __future__ import annotations + +import os +from pathlib import Path +from typing import TypeVar + +import pytest + +from vercel._internal.fs import ( + AsyncFileHandle, + AsyncFilesystemClient, + SyncFileHandle, + SyncFilesystemClient, +) +from vercel._internal.iter_coroutine import iter_coroutine + +_T = TypeVar("_T") + + +class _PathLike: + def __init__(self, path: Path) -> None: + self._path = path + + def __fspath__(self) -> str: + return os.fspath(self._path) + + +class TestSyncFilesystemClient: + def test_sync_client_supports_iter_coroutine_temp_file_workflow(self, tmp_path: Path) -> None: + client = SyncFilesystemClient() + destination = tmp_path / "nested" / "dir" / "artifact.bin" + temp_path = tmp_path / "nested" / "dir" / "artifact.bin.part" + destination_path = iter_coroutine(client.coerce_path(_PathLike(destination))) + temp_file_path = iter_coroutine(client.coerce_path(temp_path)) + + assert destination_path == os.fspath(destination) + + iter_coroutine(client.create_parent_directories(temp_file_path)) + handle: SyncFileHandle = iter_coroutine(client.open_binary_writer(temp_file_path)) + iter_coroutine(client.write(handle, b"hello ")) + iter_coroutine(client.write(handle, b"world")) + iter_coroutine(client.close(handle)) + iter_coroutine(client.replace(temp_file_path, destination_path)) + + assert destination.parent.is_dir() + assert not temp_path.exists() + assert destination.read_bytes() == b"hello world" + + def test_sync_client_remove_if_exists_handles_existing_and_missing_files( + self, tmp_path: Path + ) -> None: + client = SyncFilesystemClient() + destination = tmp_path / "stale.bin" + destination.write_bytes(b"stale") + destination_path = iter_coroutine(client.coerce_path(_PathLike(destination))) + + iter_coroutine(client.remove_if_exists(destination_path)) + + assert not destination.exists() + + iter_coroutine(client.remove_if_exists(destination_path)) + + assert not destination.exists() + + +class TestAsyncFilesystemClient: + @pytest.mark.asyncio + async def test_async_client_writes_and_replaces_final_contents(self, tmp_path: Path) -> None: + client = AsyncFilesystemClient() + destination = tmp_path / "async" / "artifact.bin" + temp_path = tmp_path / "async" / "artifact.bin.part" + destination_path = await client.coerce_path(_PathLike(destination)) + temp_file_path = await client.coerce_path(temp_path) + + assert destination_path == os.fspath(destination) + + await client.create_parent_directories(temp_file_path) + handle: AsyncFileHandle = await client.open_binary_writer(temp_file_path) + await client.write(handle, b"async ") + await client.write(handle, b"contents") + await client.close(handle) + await client.replace(temp_file_path, destination_path) + + assert destination.parent.is_dir() + assert not temp_path.exists() + assert destination.read_bytes() == b"async contents" + + @pytest.mark.asyncio + async def test_async_client_remove_if_exists_handles_existing_and_missing_files( + self, tmp_path: Path + ) -> None: + client = AsyncFilesystemClient() + destination = tmp_path / "remove-me.bin" + destination.write_bytes(b"old") + destination_path = await client.coerce_path(_PathLike(destination)) + + await client.remove_if_exists(destination_path) + + assert not destination.exists() + + await client.remove_if_exists(destination_path) + + assert not destination.exists() diff --git a/tests/unit/test_sandbox_errors.py b/tests/unit/test_sandbox_errors.py index e13a3b6..8ad0a16 100644 --- a/tests/unit/test_sandbox_errors.py +++ b/tests/unit/test_sandbox_errors.py @@ -15,6 +15,7 @@ APIError, SandboxAuthError, SandboxError, + SandboxNotFoundError, SandboxPermissionError, SandboxRateLimitError, SandboxServerError, @@ -23,6 +24,7 @@ APIError as PublicAPIError, SandboxAuthError as PublicSandboxAuthError, SandboxError as PublicSandboxError, + SandboxNotFoundError as PublicSandboxNotFoundError, SandboxPermissionError as PublicSandboxPermissionError, SandboxRateLimitError as PublicSandboxRateLimitError, SandboxServerError as PublicSandboxServerError, @@ -72,8 +74,10 @@ def _make_request_client(response: httpx.Response) -> SandboxRequestClient: def test_public_error_hierarchy_is_exposed() -> None: + assert issubclass(SandboxNotFoundError, APIError) assert issubclass(PublicAPIError, PublicSandboxError) assert issubclass(PublicSandboxAuthError, PublicAPIError) + assert issubclass(PublicSandboxNotFoundError, PublicAPIError) assert issubclass(PublicSandboxPermissionError, PublicAPIError) assert issubclass(PublicSandboxRateLimitError, PublicAPIError) assert issubclass(PublicSandboxServerError, PublicAPIError) @@ -86,7 +90,7 @@ def test_public_error_hierarchy_is_exposed() -> None: (403, SandboxPermissionError, "forbidden", "Access denied.", None, None), (429, SandboxRateLimitError, "rate_limited", "Slow down.", {"retry-after": "120"}, 120), (500, SandboxServerError, "internal_server_error", "Something broke.", None, None), - (404, APIError, "not_found", "Missing file.", None, None), + (404, SandboxNotFoundError, "not_found", "Missing file.", None, None), ], ) def test_request_classifies_sandbox_http_errors(