diff --git a/parallel_web_tools/cli/commands.py b/parallel_web_tools/cli/commands.py index 0422028..a83a000 100644 --- a/parallel_web_tools/cli/commands.py +++ b/parallel_web_tools/cli/commands.py @@ -24,6 +24,7 @@ MONITOR_PROCESSORS, MONITOR_TYPES, RESEARCH_PROCESSORS, + ReauthenticationRequired, cancel_findall_run, cancel_monitor, create_findall_run, @@ -33,6 +34,7 @@ extend_findall, get_api_key, get_auth_status, + get_control_api_access_token, get_findall_result, get_findall_schema, get_findall_status, @@ -482,53 +484,174 @@ def auth(output_json: bool): print(json.dumps(status, indent=2)) return - if status["authenticated"]: - if status["method"] == "environment": - console.print("[green]Authenticated via PARALLEL_API_KEY environment variable[/green]") - else: - console.print("[green]Authenticated via OAuth[/green]") - console.print(f" Credentials: {status['token_file']}") - else: + if not status["authenticated"]: console.print("[yellow]Not authenticated[/yellow]") console.print("\n[cyan]To get started:[/cyan]") console.print(" 1. Create an account at [link=https://parallel.ai]parallel.ai[/link]") console.print(" 2. Run: parallel-cli login") console.print(" Or set PARALLEL_API_KEY environment variable") + return + # Active source — whichever resolve_api_key would return — comes first. + if status["method"] == "environment": + console.print("[green]Active: PARALLEL_API_KEY environment variable[/green]") + console.print(" [dim]This key is used for all API calls.[/dim]") + else: + console.print("[green]Active: stored credentials (OAuth)[/green]") + console.print(f" Credentials: {status['token_file']}") + if status.get("selected_org_name"): + console.print(f" Organization: {status['selected_org_name']} ({status['selected_org_id']})") + elif status.get("selected_org_id"): + console.print(f" Organization ID: {status['selected_org_id']}") + + # If the env var is overriding stored creds, make that loud and impossible to miss. + if status.get("stored_overridden_by_env"): + console.print() + console.print("[bold yellow]⚠ PARALLEL_API_KEY is set and OVERRIDES your stored login.[/bold yellow]") + console.print(f" Stored credentials: {status['token_file']}") + if status.get("selected_org_name"): + console.print( + f" Stored organization: {status['selected_org_name']} ({status['selected_org_id']}) " + "[dim](inactive)[/dim]" + ) + elif status.get("selected_org_id"): + console.print(f" Stored organization ID: {status['selected_org_id']} [dim](inactive)[/dim]") + console.print(" [dim]Unset PARALLEL_API_KEY to use the stored login instead (`unset PARALLEL_API_KEY`).[/dim]") + elif status["method"] == "environment" and not status.get("has_stored_credentials"): + # Env var set, no stored creds — informational only. + console.print(" [dim]No stored credentials. Run `parallel-cli login` to add an OAuth login.[/dim]") + + +def _build_login_hint(login_method: str | None, email: str | None) -> str | None: + """Format a platform-compatible ``login_hint`` query value. + + Scheme — the hint always names the method only; any email travels as a + separate top-level query param (see :func:`_login_extra_params`): + + - ``"email"`` → ``login=email`` (requires an email; passed as ``&email=…``) + - ``"google"`` → ``login=google`` + - ``"sso"`` → ``login=sso`` (requires an email; passed as ``&email=…``) + + Returns ``None`` when ``login_method`` is ``None`` so the caller can + skip the query param entirely. + """ + if login_method is None: + return None + if login_method in ("email", "sso"): + if not email: + raise ValueError(f"login_method={login_method!r} requires an email") + return f"login={login_method}" + if login_method == "google": + return "login=google" + raise ValueError(f"Unknown login_method: {login_method!r}") + + +def _login_extra_params(login_method: str | None, email: str | None) -> dict[str, str] | None: + """Extra query params to append alongside ``login_hint``. + + Returns ``{"email": }`` for identity-bearing methods (``email`` + and ``sso``) so the platform's login page receives the address as a + top-level param, e.g. ``...&login_hint=login=sso&email=you@example.com``. + Returns ``None`` for methods that carry no identity (``google``, or + none at all). + """ + if login_method in ("email", "sso") and email: + return {"email": email} + return None + + +def _run_login(output_json: bool, email: str | None, login_method: str | None) -> None: + """Shared body for all ``parallel-cli login`` variants. + + ``login_method`` selects the identity-provider hint and UX flavor: + + - ``None`` → plain device flow: print URL + code, open browser. + - ``"email"`` → email magic-link: POST ``/api/auth/send-magic-link``, + tell the user to check their inbox, do NOT open + the browser. Falls back to manual display on + magic-link failure. + - ``"google"`` → append ``login_hint=login=google`` to the URL + and open the browser. + - ``"sso"`` → append ``login_hint=login=sso&email=`` to + the URL (two separate query params) and open + the browser. + """ + import webbrowser + + from parallel_web_tools.core.auth import ( + build_verification_uri, + ensure_client_id, + is_headless, + send_magic_link, + ) + + login_hint = _build_login_hint(login_method, email) + extra_params = _login_extra_params(login_method, email) -@main.command() -@click.option("--json", "output_json", is_flag=True, help="Output as JSON") -@click.option("--device", is_flag=True, help="Use device authorization flow (for SSH, containers, etc.)") -def login(output_json: bool, device: bool): - """Authenticate with Parallel API.""" if not output_json: - if device: - console.print("[bold cyan]Authenticating with Parallel (device flow)...[/bold cyan]\n") - else: - console.print("[bold cyan]Authenticating with Parallel...[/bold cyan]\n") + console.print("[bold cyan]Authenticating with Parallel...[/bold cyan]\n") def _on_device_code(info): + magic_link_sent = False + magic_link_error: str | None = None + if login_method == "email" and email: + try: + send_magic_link(client_id=ensure_client_id(), email=email, user_code=info.user_code) + magic_link_sent = True + except Exception as e: + magic_link_error = str(e) + + enriched_uri = build_verification_uri(info.verification_uri_complete, login_hint, extra_params=extra_params) + if output_json: - print( - json.dumps( - { - "status": "waiting_for_authorization", - "verification_uri": info.verification_uri, - "verification_uri_complete": info.verification_uri_complete, - "user_code": info.user_code, - "expires_in": info.expires_in, - } - ), - flush=True, + payload = { + "status": "waiting_for_authorization", + "verification_uri": info.verification_uri, + "verification_uri_complete": enriched_uri, + "user_code": info.user_code, + "expires_in": info.expires_in, + } + if login_method == "email": + payload["magic_link_sent"] = magic_link_sent + if magic_link_error: + payload["magic_link_error"] = magic_link_error + print(json.dumps(payload), flush=True) + return + + if magic_link_sent: + # Email login succeeded: tell the user to check their inbox. + # Still print the URL + code as a fallback in case the mail is + # slow or lands in spam. Do NOT open the browser. + console.print(f"[green]Magic link sent to {email}.[/green] Check your inbox to authorize.") + console.print( + f"\nOr visit [bold cyan]{info.verification_uri}[/bold cyan] " + f"and enter code [bold yellow]{info.user_code}[/bold yellow]." ) - else: - console.print(f"Visit: [bold cyan]{info.verification_uri}[/bold cyan]") - console.print(f"Enter code: [bold yellow]{info.user_code}[/bold yellow]\n") - console.print(f"Or open: [link={info.verification_uri_complete}]{info.verification_uri_complete}[/link]\n") console.print("Waiting for authorization...") + return + + if magic_link_error: + console.print( + f"[yellow]Could not send magic link ({magic_link_error}); " + "falling back to manual authorization.[/yellow]\n" + ) + + console.print(f"Visit: [bold cyan]{info.verification_uri}[/bold cyan]") + console.print(f"Enter code: [bold yellow]{info.user_code}[/bold yellow]\n") + console.print(f"Or open: [link={enriched_uri}]{enriched_uri}[/link]\n") + console.print("Confirm the code matches what your browser shows, then authorize.") + console.print("Waiting for authorization...") + + # Providing an on_device_code callback suppresses auth.py's default + # browser-launch branch, so open it here for interactive CLI use. + if not is_headless(): + try: + webbrowser.open(enriched_uri) + except Exception: + pass try: - get_api_key(force_login=True, device=device, on_device_code=_on_device_code) + get_api_key(force_login=True, on_device_code=_on_device_code, login_hint=login_hint) if output_json: print(json.dumps({"status": "authenticated"})) else: @@ -537,6 +660,67 @@ def _on_device_code(info): _handle_error(e, output_json=output_json, exit_code=EXIT_AUTH_ERROR, prefix="Authentication failed") +@main.group(invoke_without_command=True) +@click.option("--json", "output_json", is_flag=True, help="Output as JSON") +@click.pass_context +def login(ctx: click.Context, output_json: bool): + """Authenticate with Parallel API (device authorization flow). + + \b + Examples: + parallel-cli login # opens browser for SSO + parallel-cli login email you@example.com # sends a magic-link email + parallel-cli login google # opens browser, hints Google SSO + parallel-cli login sso you@example.com # opens browser, hints SSO + email + """ + ctx.ensure_object(dict) + ctx.obj["output_json"] = output_json + if ctx.invoked_subcommand is None: + _run_login(output_json=output_json, email=None, login_method=None) + + +@login.command("email") +@click.argument("user_email") +@click.pass_context +def login_email(ctx: click.Context, user_email: str): + """Send a magic-link email to USER_EMAIL that auto-confirms the CLI's device code. + + No browser is opened — the link in the email handles authorization. If the + email can't be sent, the CLI falls back to printing the URL and code for + manual entry. + """ + output_json = ctx.obj.get("output_json", False) if ctx.obj else False + _run_login(output_json=output_json, email=user_email, login_method="email") + + +@login.command("google") +@click.pass_context +def login_google(ctx: click.Context): + """Authenticate via Google SSO. + + Opens the browser on a verification URL that hints ``login=google`` so the + landing page auto-routes to Google's SSO (and auto-submits where it can + if the user is already signed in). + """ + output_json = ctx.obj.get("output_json", False) if ctx.obj else False + _run_login(output_json=output_json, email=None, login_method="google") + + +@login.command("sso") +@click.argument("user_email") +@click.pass_context +def login_sso(ctx: click.Context, user_email: str): + """Authenticate via enterprise SSO for USER_EMAIL. + + Opens the browser on a verification URL with ``login_hint=login=sso`` + plus a separate ``email=`` query param so the landing page + resolves the right SSO tenant for the email domain and pre-fills + the address. + """ + output_json = ctx.obj.get("output_json", False) if ctx.obj else False + _run_login(output_json=output_json, email=user_email, login_method="sso") + + @main.command(name="logout") @click.option("--json", "output_json", is_flag=True, help="Output as JSON") def logout_cmd(output_json: bool): @@ -550,6 +734,106 @@ def logout_cmd(output_json: bool): console.print("[yellow]No stored credentials found[/yellow]") +def _format_cents(cents: int | float) -> str: + """Render a cents amount as ``$X.YZ (N¢)``.""" + return f"${cents / 100:.2f} ({int(cents)}¢)" + + +def _derive_idempotency_key(amount_cents: int) -> str: + """Build a deterministic idempotency key for ``balance add``. + + Format: ``{client_id}-{amount_cents}-{five_min_bucket}``, where + ``five_min_bucket`` is the current unix time rounded down to the nearest + 300 seconds. Identical repeat requests inside the same 5-minute window + reuse the same key, so Stripe's idempotency dedupes them server-side. + """ + from parallel_web_tools.core.auth import ensure_client_id + + client_id = ensure_client_id() + five_min_bucket = int(time.time() // 300) * 300 + return f"{client_id}-{amount_cents}-{five_min_bucket}" + + +def _render_balance(resp, output_json: bool, *, prefix_lines: list[str] | None = None) -> None: + """Render a :class:`BalanceResponse` in JSON or Rich-console form.""" + if output_json: + print(json.dumps(resp.model_dump(), indent=2)) + return + + for line in prefix_lines or []: + console.print(line) + console.print(f"Organization: [cyan]{resp.org_id}[/cyan]") + console.print(f"Credit balance: [bold green]{_format_cents(resp.credit_balance_cents)}[/bold green]") + pending = resp.pending_debit_balance_cents or 0 + if pending: + console.print(f"Pending debit: [yellow]{_format_cents(pending)}[/yellow]") + if resp.will_invoice: + console.print("[dim]Billed by invoice (postpaid)[/dim]") + + +@main.group(name="balance") +@click.option("--json", "output_json", is_flag=True, help="Output as JSON") +@click.pass_context +def balance(ctx: click.Context, output_json: bool): + """Inspect or top up the org's prepaid credit balance.""" + ctx.ensure_object(dict) + ctx.obj["output_json"] = output_json + + +@balance.command("get") +@click.pass_context +def balance_get(ctx: click.Context): + """Show the current credit balance.""" + from parallel_web_tools.core import service + + output_json = ctx.obj.get("output_json", False) if ctx.obj else False + try: + token = get_control_api_access_token() + resp = service.get_balance(token) + except ReauthenticationRequired as e: + _handle_error(e, output_json=output_json, exit_code=EXIT_AUTH_ERROR, prefix="Authentication required") + return + except Exception as e: + _handle_error(e, output_json=output_json, exit_code=EXIT_API_ERROR, prefix="Balance API error") + return + + _render_balance(resp, output_json) + + +@balance.command("add") +@click.argument("amount_cents", type=int) +@click.option( + "--idempotency-key", + "idempotency_key_override", + default=None, + help="Override the auto-derived idempotency key. Defaults to " + "{client_id}-{amount_cents}-{5min_bucket} so repeat attempts inside " + "the same 5-minute window dedupe server-side.", +) +@click.pass_context +def balance_add(ctx: click.Context, amount_cents: int, idempotency_key_override: str | None): + """Charge and top up the prepaid balance by AMOUNT_CENTS.""" + from parallel_web_tools.core import service + + output_json = ctx.obj.get("output_json", False) if ctx.obj else False + idempotency_key = idempotency_key_override or _derive_idempotency_key(amount_cents) + try: + token = get_control_api_access_token() + resp = service.add_balance(token, amount_cents, idempotency_key) + except ReauthenticationRequired as e: + _handle_error(e, output_json=output_json, exit_code=EXIT_AUTH_ERROR, prefix="Authentication required") + return + except Exception as e: + _handle_error(e, output_json=output_json, exit_code=EXIT_API_ERROR, prefix="Balance API error") + return + + _render_balance( + resp, + output_json, + prefix_lines=[f"[green]Added {_format_cents(amount_cents)} to balance.[/green]"], + ) + + @main.command(name="update") @click.option("--check", is_flag=True, help="Check for updates without installing") @click.option("--force", is_flag=True, help="Reinstall even if already at latest version") @@ -831,12 +1115,9 @@ def search( source_policy["after_date"] = after_date try: - from parallel import Parallel - - from parallel_web_tools.core import get_default_headers + from parallel_web_tools.core.auth import get_client - api_key = get_api_key() - client = Parallel(api_key=api_key, default_headers=get_default_headers("cli")) + client = get_client(source="cli") fetch_policy: dict[str, Any] = {} if max_age_seconds is not None: @@ -999,12 +1280,9 @@ def extract( raise click.UsageError(f"--objective must be 5000 characters or fewer (got {len(objective)}).") try: - from parallel import Parallel - - from parallel_web_tools.core import get_default_headers + from parallel_web_tools.core.auth import get_client - api_key = get_api_key() - client = Parallel(api_key=api_key, default_headers=get_default_headers("cli")) + client = get_client(source="cli") fetch_policy: dict[str, Any] = {} if max_age_seconds is not None: diff --git a/parallel_web_tools/core/__init__.py b/parallel_web_tools/core/__init__.py index 0c11d39..cb54908 100644 --- a/parallel_web_tools/core/__init__.py +++ b/parallel_web_tools/core/__init__.py @@ -2,11 +2,13 @@ from parallel_web_tools.core.auth import ( DeviceCodeInfo, + ReauthenticationRequired, create_client, get_api_key, get_async_client, get_auth_status, get_client, + get_control_api_access_token, logout, poll_device_token, request_device_code, @@ -87,11 +89,13 @@ __all__ = [ # Auth "DeviceCodeInfo", + "ReauthenticationRequired", "create_client", "get_api_key", + "get_async_client", "get_auth_status", "get_client", - "get_async_client", + "get_control_api_access_token", "logout", "poll_device_token", "request_device_code", diff --git a/parallel_web_tools/core/auth.py b/parallel_web_tools/core/auth.py index f8087f2..0558b59 100644 --- a/parallel_web_tools/core/auth.py +++ b/parallel_web_tools/core/auth.py @@ -1,13 +1,21 @@ -"""OAuth Authentication for Parallel API.""" +"""Device-flow authentication for parallel-cli. + +Authentication happens exclusively via the OAuth 2.0 Device Authorization Grant +(RFC 8628) against the platform's ``/getServiceKeys/*`` endpoints. After a +successful device flow the CLI additionally provisions a data-API key against +the service API so that subsequent commands (search, extract, etc.) have a key +to use. + +All endpoints are built from :mod:`parallel_web_tools.core.endpoints`, so a +local dev stack can be reached via ``PARALLEL_PLATFORM_URL`` / +``PARALLEL_SERVICE_API_URL`` env vars. +""" + +from __future__ import annotations -import base64 -import hashlib -import html -import http.server import json import os -import secrets -import socketserver +import platform as _platform import sys import time import urllib.error @@ -16,20 +24,34 @@ import webbrowser from collections.abc import Callable from dataclasses import dataclass -from pathlib import Path from parallel import AsyncParallel, Parallel +from parallel_web_tools.core import credentials, service +from parallel_web_tools.core.endpoints import ( + CLIENT_ID, + DEFAULT_SCOPE, + get_api_url, + get_platform_url, +) from parallel_web_tools.core.user_agent import ClientSource, get_default_headers -# OAuth Configuration -OAUTH_PROVIDER_HOST = "platform.parallel.ai" -OAUTH_PROVIDER_PATH_PREFIX = "/getKeys" -OAUTH_SCOPE = "key:read" -TOKEN_FILE = Path.home() / ".config" / "parallel-web-tools" / "credentials.json" - -# Device flow grant type (RFC 8628) DEVICE_CODE_GRANT_TYPE = "urn:ietf:params:oauth:grant-type:device_code" +REFRESH_TOKEN_GRANT_TYPE = "refresh_token" + +# Proactively refresh when the access token is within this many seconds of its +# absolute expiry, so callers don't get a token that dies mid-request under clock +# skew or network latency. +ACCESS_TOKEN_SKEW_SECONDS = 30 + + +class ReauthenticationRequired(Exception): + """Raised when the control-API grant can no longer be refreshed silently. + + The caller must run ``parallel-cli login`` before any control-API call + will succeed — the authorization grant, the refresh token, or both have + expired (or never existed), so no silent refresh is possible. + """ @dataclass @@ -37,221 +59,235 @@ class DeviceCodeInfo: """Response from the device authorization endpoint (RFC 8628).""" device_code: str - """Opaque code used to poll the token endpoint. Never shown to user.""" - user_code: str - """Human-readable code the user enters at the verification URL (e.g. BCDF-GHJK).""" - verification_uri: str - """URL the user visits to enter the code.""" - verification_uri_complete: str - """URL with user_code pre-filled as a query parameter.""" + expires_in: int + interval: int + +@dataclass +class TokenResponse: + """Response from ``/getServiceKeys/token`` (device or refresh grant).""" + + access_token: str + refresh_token: str expires_in: int - """Seconds until the device code expires (default 600).""" + refresh_token_expires_in: int + authorization_expires_in: int + org_id: str + org_name: str | None = None + scope: str = "" + token_type: str = "Bearer" - interval: int - """Minimum polling interval in seconds (default 5).""" + @property + def scopes(self) -> list[str]: + return self.scope.split() if self.scope else [] -def _generate_code_verifier() -> str: - """Generate a random code verifier for PKCE.""" - return secrets.token_urlsafe(32) +def _platform_path(path: str) -> str: + return f"{get_platform_url()}{path}" -def _generate_code_challenge(verifier: str) -> str: - """Generate code challenge from verifier using S256.""" - digest = hashlib.sha256(verifier.encode()).digest() - return base64.urlsafe_b64encode(digest).rstrip(b"=").decode() +def is_headless() -> bool: + """Detect if the environment cannot open a browser.""" + if os.environ.get("SSH_CLIENT") or os.environ.get("SSH_TTY"): + return True + if os.environ.get("CI"): + return True + if sys.platform == "linux" and not os.environ.get("DISPLAY") and not os.environ.get("WAYLAND_DISPLAY"): + return True + if os.path.exists("/.dockerenv") or os.environ.get("container"): + return True + return False -def _load_stored_token() -> str | None: - """Load stored OAuth token from file.""" - if not TOKEN_FILE.exists(): - return None - try: - with open(TOKEN_FILE) as f: - data = json.load(f) - return data.get("access_token") - except (OSError, json.JSONDecodeError): - return None - - -def _save_token(access_token: str) -> None: - """Save OAuth token to file with secure permissions.""" - TOKEN_FILE.parent.mkdir(parents=True, exist_ok=True) - with open(TOKEN_FILE, "w") as f: - json.dump({"access_token": access_token}, f) - os.chmod(TOKEN_FILE, 0o600) - - -class OAuthCallbackHandler(http.server.BaseHTTPRequestHandler): - """HTTP handler to receive OAuth callback.""" - - auth_code: str | None = None - error: str | None = None - - def log_message(self, format, *args): - pass - - def do_GET(self): - parsed = urllib.parse.urlparse(self.path) - params = urllib.parse.parse_qs(parsed.query) - - if "code" in params: - OAuthCallbackHandler.auth_code = params["code"][0] - self.send_response(200) - self.send_header("Content-Type", "text/html") - self.end_headers() - self.wfile.write( - b""" - -

Authentication Successful!

-

You can close this window and return to the terminal.

- - """ - ) - elif "error" in params: - OAuthCallbackHandler.error = params.get("error_description", params["error"])[0] - self.send_response(400) - self.send_header("Content-Type", "text/html") - self.end_headers() - self.wfile.write( - f""" - -

Authentication Failed

-

{html.escape(OAuthCallbackHandler.error)}

- - """.encode() - ) - else: - self.send_response(404) - self.end_headers() - - -def _do_oauth_flow() -> str: - """Perform OAuth authorization code flow with PKCE.""" - OAuthCallbackHandler.auth_code = None - OAuthCallbackHandler.error = None - - with socketserver.TCPServer(("127.0.0.1", 0), OAuthCallbackHandler) as httpd: - port = httpd.server_address[1] - redirect_uri = f"http://localhost:{port}/callback" - - code_verifier = _generate_code_verifier() - code_challenge = _generate_code_challenge(code_verifier) - - auth_params = { - "client_id": "localhost", - "redirect_uri": redirect_uri, - "response_type": "code", - "scope": OAUTH_SCOPE, - "resource": f"http://localhost:{port}", - "code_challenge": code_challenge, - "code_challenge_method": "S256", - } - auth_url = f"https://{OAUTH_PROVIDER_HOST}{OAUTH_PROVIDER_PATH_PREFIX}/authorize?" + urllib.parse.urlencode( - auth_params - ) +def _post_form(url: str, data: dict[str, str], headers: dict[str, str] | None = None, timeout: int = 30) -> dict: + """POST a form-encoded request, return parsed JSON body. - print("\nOpening browser for authentication...", file=sys.stderr) - print(f"If browser doesn't open, visit: {auth_url}", file=sys.stderr) + Raises ``urllib.error.HTTPError`` on HTTP error (body still readable via ``e.read()``). + """ + body = urllib.parse.urlencode(data).encode() + req_headers = {"Content-Type": "application/x-www-form-urlencoded"} + req_headers.update(_platform_bypass_headers(url)) + if headers: + req_headers.update(headers) + req = urllib.request.Request(url, data=body, headers=req_headers) + with urllib.request.urlopen(req, timeout=timeout) as response: + return json.loads(response.read().decode()) + + +def _post_json(url: str, body: dict, timeout: int = 30) -> dict: + """POST a JSON body, return parsed JSON response.""" + data = json.dumps(body).encode() + req_headers = {"Content-Type": "application/json", "Accept": "application/json"} + req_headers.update(_platform_bypass_headers(url)) + req = urllib.request.Request( + url, + data=data, + headers=req_headers, + ) + with urllib.request.urlopen(req, timeout=timeout) as response: + return json.loads(response.read().decode()) - webbrowser.open(auth_url) - httpd.timeout = 300 - while OAuthCallbackHandler.auth_code is None and OAuthCallbackHandler.error is None: - httpd.handle_request() +def _platform_bypass_headers(url: str) -> dict[str, str]: + """Add Vercel protection bypass header for platform requests when configured.""" + token = os.environ.get("VERCEL_PROTECTION_BYPASS_TOKEN") + if not token: + return {} + if not url.startswith(get_platform_url()): + return {} + return {"X-Vercel-Protection-Bypass": token} - if OAuthCallbackHandler.error: - raise Exception(f"OAuth error: {OAuthCallbackHandler.error}") - auth_code = OAuthCallbackHandler.auth_code +def _get_platform_info() -> dict[str, str]: + """Best-effort OS/arch metadata for the registration payload. - token_url = f"https://{OAUTH_PROVIDER_HOST}{OAUTH_PROVIDER_PATH_PREFIX}/token" - token_data = urllib.parse.urlencode( - { - "grant_type": "authorization_code", - "code": auth_code, - "client_id": "localhost", - "redirect_uri": redirect_uri, - "code_verifier": code_verifier, - "resource": f"http://localhost:{port}", - } - ).encode() + Mirrors the TS ``ClientPlatform`` type: every field is optional. We drop + any key whose value is falsy (e.g. ``platform.processor()`` returns ``""`` + on some Linux distros), so the payload only carries meaningful fields. + """ - req = urllib.request.Request( - token_url, - data=token_data, - headers={"Content-Type": "application/x-www-form-urlencoded"}, - ) + def safe(getter: Callable[[], str]) -> str: + try: + return getter() + except Exception: + return "" + raw = { + "system": safe(_platform.system), + "release": safe(_platform.release), + "machine": safe(_platform.machine), + "processor": safe(_platform.processor), + "version": safe(_platform.version), + "os_name": os.name, + } + return {k: v for k, v in raw.items() if v} + + +def register_client(client_name: str = "parallel-cli") -> str: + """Register this CLI install with the platform and return the new ``client_id``. + + POSTs to ``/getServiceKeys/register`` with the client name and OS platform + metadata. The platform assigns and returns a unique ``client_id`` used on + subsequent OAuth calls. + """ + url = _platform_path("/getServiceKeys/register") + body: dict = {"client_name": client_name, "platform": _get_platform_info()} try: - with urllib.request.urlopen(req, timeout=30) as response: - token_response = json.loads(response.read().decode()) - access_token = token_response.get("access_token") - if not access_token: - raise Exception("No access token in response") - return access_token + data = _post_json(url, body) except urllib.error.HTTPError as e: - error_body = e.read().decode() - raise Exception(f"Token exchange failed: {e.code} - {error_body}") from e + err_body = e.read().decode() + raise Exception(f"Client registration failed: {e.code} - {err_body}") from e + return data["client_id"] + +def send_magic_link(client_id: str, email: str, user_code: str, email_type: str = "deviceCode") -> None: + """Ask the platform to email a magic link that auto-authorizes ``user_code``. -def _is_headless() -> bool: - """Detect if the environment cannot open a browser for OAuth. + POSTs to ``/api/auth/send-magic-link`` with: - Returns True for SSH sessions, containers, CI, and other headless - environments where the authorization code flow won't work. + - ``client_id`` — the registered CLI client. + - ``email`` — recipient. + - ``emailType`` — ``"deviceCode"`` routes the template that confirms a + pending device-flow user code. + - ``queryParams.user_code`` — echoed into the magic-link URL so the + landing page can pre-confirm the CLI's device code in one click. + + Raises ``Exception`` on any HTTP error so the caller can fall back to + the manual URL-and-code flow. """ - # SSH session - if os.environ.get("SSH_CLIENT") or os.environ.get("SSH_TTY"): - return True + url = f"{get_platform_url()}/api/auth/send-magic-link" + body = { + "client_id": client_id, + "email": email, + "emailType": email_type, + "queryParams": {"user_code": user_code}, + } + try: + _post_json(url, body) + except urllib.error.HTTPError as e: + err_body = e.read().decode() + raise Exception(f"Magic link send failed: {e.code} - {err_body}") from e - # CI environments - if os.environ.get("CI"): - return True - # No display on Linux - if sys.platform == "linux" and not os.environ.get("DISPLAY") and not os.environ.get("WAYLAND_DISPLAY"): - return True +def ensure_client_id() -> str: + """Return a registered ``client_id``, registering if none is stored yet. - # Container indicators - if os.path.exists("/.dockerenv") or os.environ.get("container"): - return True + - If the credentials file already has a ``client_id``, returns it. + - Otherwise calls :func:`register_client` and persists the result. + - If registration fails, emits a single-line stderr warning and falls + back to the hardcoded ``CLIENT_ID``. The stored ``client_id`` stays + unset so the next login attempt retries transparently. + """ + creds = credentials.load() or credentials.Credentials() + if creds.client_id: + return creds.client_id + try: + client_id = register_client() + except Exception as e: + print(f"Warning: client registration failed ({e}); using fallback client_id.", file=sys.stderr) + return CLIENT_ID + creds.client_id = client_id + credentials.save(creds) + return client_id - return False +def _replace_client_id(client_id: str) -> None: + """Persist a freshly registered client_id, replacing any stale value.""" + creds = credentials.load() or credentials.Credentials() + creds.client_id = client_id + credentials.save(creds) -def request_device_code() -> DeviceCodeInfo: - """Request a device code from the authorization server (RFC 8628 Step 1). - Returns a DeviceCodeInfo with the user_code, verification URL, and device_code - needed for the rest of the flow. Callers should present the verification_uri and - user_code to the user, then call poll_device_token() to wait for authorization. +def _is_unknown_client_error(error: Exception) -> bool: + """Return True when the platform rejects an unknown/stale client_id.""" + message = str(error) + return '"error":"invalid_client"' in message and "Unknown client_id" in message - Example:: - info = request_device_code() - print(f"Visit {info.verification_uri} and enter code: {info.user_code}") - token = poll_device_token(info) - """ - device_code_url = f"https://{OAUTH_PROVIDER_HOST}{OAUTH_PROVIDER_PATH_PREFIX}/device/code" +def _reregister_client_id() -> str: + """Register a new client_id and persist it, raising on failure.""" + client_id = register_client() + _replace_client_id(client_id) + return client_id - request_data = urllib.parse.urlencode({"client_id": "localhost", "scope": OAUTH_SCOPE}).encode() - req = urllib.request.Request( - device_code_url, - data=request_data, - headers={"Content-Type": "application/x-www-form-urlencoded"}, - ) +def build_verification_uri( + base: str, + login_hint: str | None, + extra_params: dict[str, str] | None = None, +) -> str: + """Append ``agent=true``, an optional ``login_hint``, and any ``extra_params``. + + ``login_hint`` is appended verbatim as a query param. Callers format it + per the platform's scheme — e.g. ``login=email``, ``login=google``, + ``login=sso`` — so the landing page can route the user to the right + identity provider. + + ``extra_params`` appends additional top-level query params (e.g. + ``email=``) alongside ``login_hint``. Used by the email and SSO + flows where the identity lives outside the ``login_hint`` value. + """ + parsed = urllib.parse.urlparse(base) + query = urllib.parse.parse_qsl(parsed.query, keep_blank_values=True) + query.append(("onboard_variant", "agent")) + if login_hint: + query.append(("login_hint", login_hint)) + for k, v in (extra_params or {}).items(): + query.append((k, v)) + return urllib.parse.urlunparse(parsed._replace(query=urllib.parse.urlencode(query))) + + +def request_device_code(scope: str = DEFAULT_SCOPE, client_id: str | None = None) -> DeviceCodeInfo: + """Request a device code from ``/getServiceKeys/device/code`` (RFC 8628 Step 1).""" + url = _platform_path("/getServiceKeys/device/code") try: - with urllib.request.urlopen(req, timeout=30) as response: - data = json.loads(response.read().decode()) + data = _post_form(url, {"client_id": client_id or CLIENT_ID, "scope": scope}) except urllib.error.HTTPError as e: - error_body = e.read().decode() - raise Exception(f"Device code request failed: {e.code} - {error_body}") from e + body = e.read().decode() + raise Exception(f"Device code request failed: {e.code} - {body}") from e return DeviceCodeInfo( device_code=data["device_code"], @@ -263,250 +299,409 @@ def request_device_code() -> DeviceCodeInfo: ) -def poll_device_token(info: DeviceCodeInfo) -> str: - """Poll the token endpoint until the user authorizes (RFC 8628 Step 3). +def _parse_token_response(data: dict) -> TokenResponse: + return TokenResponse( + access_token=data["access_token"], + refresh_token=data["refresh_token"], + expires_in=int(data.get("expires_in", 0)), + refresh_token_expires_in=int(data.get("refresh_token_expires_in", 0)), + authorization_expires_in=int(data.get("authorization_expires_in", 0)), + org_id=data["org_id"], + org_name=data.get("org_name"), + scope=data.get("scope", ""), + token_type=data.get("token_type", "Bearer"), + ) - Args: - info: DeviceCodeInfo from request_device_code(). - Returns: - The access token string. +def poll_device_token(info: DeviceCodeInfo, client_id: str | None = None) -> TokenResponse: + """Poll ``/getServiceKeys/token`` until the user authorizes (RFC 8628 Step 3). - Raises: - Exception: On expiry, denial, or other errors. + Polls the token endpoint immediately on entry, then waits ``interval`` + seconds between subsequent polls. RFC 8628 only requires waiting *between* + requests, so polling right away makes fast authorizations feel snappy + instead of eating a silent ``interval``-second delay. """ - token_url = f"https://{OAUTH_PROVIDER_HOST}{OAUTH_PROVIDER_PATH_PREFIX}/token" + url = _platform_path("/getServiceKeys/token") interval = info.interval deadline = time.monotonic() + info.expires_in while time.monotonic() < deadline: - time.sleep(interval) - - poll_data = urllib.parse.urlencode( - { - "grant_type": DEVICE_CODE_GRANT_TYPE, - "device_code": info.device_code, - "client_id": "localhost", - } - ).encode() - - poll_req = urllib.request.Request( - token_url, - data=poll_data, - headers={"Content-Type": "application/x-www-form-urlencoded"}, - ) - try: - with urllib.request.urlopen(poll_req, timeout=30) as response: - token_response = json.loads(response.read().decode()) - access_token = token_response.get("access_token") - if access_token: - return access_token - raise Exception("No access token in response") + data = _post_form( + url, + { + "grant_type": DEVICE_CODE_GRANT_TYPE, + "device_code": info.device_code, + "client_id": client_id or CLIENT_ID, + }, + ) + return _parse_token_response(data) except urllib.error.HTTPError as e: - error_body = json.loads(e.read().decode()) - error_code = error_body.get("error", "") - + body = json.loads(e.read().decode()) + error_code = body.get("error", "") if error_code == "authorization_pending": - continue + pass elif error_code == "slow_down": interval += 5 - continue elif error_code == "expired_token": raise Exception("Device code expired. Please try again.") from e elif error_code == "access_denied": raise Exception("Authorization denied by user.") from e else: - raise Exception(f"Token exchange failed: {error_body.get('error_description', error_code)}") from e + raise Exception(f"Token exchange failed: {body.get('error_description', error_code)}") from e + time.sleep(interval) raise Exception("Device code expired (timeout). Please try again.") -def _do_device_flow(on_device_code: Callable[[DeviceCodeInfo], None] | None = None) -> str: - """Perform the full device authorization flow (request + poll). +def refresh_access_token(refresh_token: str, client_id: str | None = None) -> TokenResponse: + """Exchange a refresh token for a new access+refresh token pair.""" + url = _platform_path("/getServiceKeys/token") + try: + data = _post_form( + url, + { + "grant_type": REFRESH_TOKEN_GRANT_TYPE, + "refresh_token": refresh_token, + "client_id": client_id or CLIENT_ID, + }, + ) + except urllib.error.HTTPError as e: + body = e.read().decode() + raise Exception(f"Token refresh failed: {e.code} - {body}") from e + return _parse_token_response(data) + + +def revoke_token(refresh_token: str) -> None: + """Revoke a refresh token via form-encoded POST. + + Body shape: ``refresh_token=`` (application/x-www-form-urlencoded). + The endpoint identifies the caller from the token itself — no bearer auth. + """ + url = _platform_path("/getServiceKeys/token/revoke") + body = urllib.parse.urlencode({"refresh_token": refresh_token}).encode() + req = urllib.request.Request( + url, + data=body, + headers={ + "Content-Type": "application/x-www-form-urlencoded", + "Accept": "application/json", + **_platform_bypass_headers(url), + }, + method="POST", + ) + try: + with urllib.request.urlopen(req, timeout=30): + pass + except urllib.error.HTTPError as e: + err_body = e.read().decode() + raise Exception(f"Token revocation failed: {e.code} - {err_body}") from e - Args: - on_device_code: Optional callback invoked with the DeviceCodeInfo after requesting - the device code. Use this to present the verification URL and user code to the - user in a custom way (e.g., in a chat message). If not provided, prints - instructions to stderr and attempts to open the browser. + +def _do_device_flow( + login_hint: str | None = None, + on_device_code: Callable[[DeviceCodeInfo], None] | None = None, + client_id: str | None = None, +) -> TokenResponse: + """Run the full device authorization flow (request + poll) and return tokens. + + ``login_hint`` — if set — is appended to the verification URI so the + platform login page can pre-fill / auto-submit the right identity + provider. Only observable in the default (no-callback) fallback path; + callers that provide ``on_device_code`` build their own enriched URI. """ - info = request_device_code() + try: + info = request_device_code(client_id=client_id) + except Exception as e: + if client_id and _is_unknown_client_error(e): + client_id = _reregister_client_id() + info = request_device_code(client_id=client_id) + else: + raise + + enriched_uri = build_verification_uri(info.verification_uri_complete, login_hint) if on_device_code: on_device_code(info) else: - # Default: print to stderr and try to open browser print(f"\nTo authenticate, visit: {info.verification_uri}", file=sys.stderr) print(f"And enter code: {info.user_code}\n", file=sys.stderr) - print(f"Or open: {info.verification_uri_complete}\n", file=sys.stderr) + print(f"Or open: {enriched_uri}\n", file=sys.stderr) print(f"Waiting for authorization (expires in {info.expires_in // 60} minutes)...", file=sys.stderr) - try: - webbrowser.open(info.verification_uri_complete) - except Exception: - pass + if not is_headless(): + try: + webbrowser.open(enriched_uri) + except Exception: + pass + + return poll_device_token(info, client_id=client_id) + + +def _persist_token_response(resp: TokenResponse) -> None: + """Write a TokenResponse into credentials under its org_id, selecting it.""" + now = int(time.time()) + creds = credentials.load() or credentials.Credentials() + org = creds.orgs.get(resp.org_id) or credentials.OrgCredentials() + org.org_name = resp.org_name + org.control_api = credentials.ControlApiTokens( + access_token=resp.access_token, + access_token_expires_at=now + resp.expires_in, + access_token_scopes=resp.scopes, + refresh_token=resp.refresh_token, + refresh_token_expires_at=now + resp.refresh_token_expires_in, + authorization_expires_at=now + resp.authorization_expires_in, + ) + creds.orgs[resp.org_id] = org + creds.selected_org_id = resp.org_id + credentials.save(creds) - return poll_device_token(info) +def get_control_api_access_token() -> str: + """Return a currently-valid control-API access token for the selected org. -def resolve_api_key(api_key: str | None = None) -> str: - """Resolve API key from parameter, environment, or stored credentials. + Transparently refreshes the access token when it has expired (or is about + to expire within ``ACCESS_TOKEN_SKEW_SECONDS``), persisting the refreshed + tokens back to the credentials file. + + Raises: + ReauthenticationRequired: The caller must run ``parallel-cli login``. + Reasons: no stored credentials, no control-API tokens for the + selected org, ``authorization_expires_at`` in the past, or + ``refresh_token_expires_at`` in the past. + """ + creds = credentials.load() + org = creds.selected_org() if creds else None + if org is None: + raise ReauthenticationRequired("not logged in; run 'parallel-cli login'") - This is the non-interactive version that raises an error if no key is found. - Use get_api_key() if you want interactive OAuth flow as a fallback. + tokens = org.control_api + access_token = tokens.access_token + if not access_token: + raise ReauthenticationRequired("not logged in; run 'parallel-cli login'") - Args: - api_key: Optional API key. If provided, returns it directly. + now = int(time.time()) - Returns: - The resolved API key string. + if tokens.authorization_expires_at is not None and now >= tokens.authorization_expires_at: + raise ReauthenticationRequired("authorization grant has expired; run 'parallel-cli login'") - Raises: - ValueError: If no API key can be found. + # Fast path: current access token still valid beyond the skew buffer. + if tokens.access_token_expires_at is None or now < tokens.access_token_expires_at - ACCESS_TOKEN_SKEW_SECONDS: + return access_token + + # Access token is (about to be) expired. Can we refresh? + refresh_token_value = tokens.refresh_token + if not refresh_token_value: + raise ReauthenticationRequired("no refresh token available; run 'parallel-cli login'") + if tokens.refresh_token_expires_at is not None and now >= tokens.refresh_token_expires_at: + raise ReauthenticationRequired("refresh token has expired; run 'parallel-cli login'") + + new_tokens = refresh_access_token(refresh_token_value, client_id=ensure_client_id()) + _persist_token_response(new_tokens) + return new_tokens.access_token + + +def login_flow( + login_hint: str | None = None, + on_device_code: Callable[[DeviceCodeInfo], None] | None = None, +) -> str: + """Run the full CLI login: register client → device flow → persist tokens → auto-mint data API key. + + ``login_hint`` is forwarded to the device flow's URL enrichment (see + :func:`build_verification_uri`). Returns the newly-minted data API key. + """ + client_id = ensure_client_id() + token_resp = _do_device_flow(login_hint=login_hint, on_device_code=on_device_code, client_id=client_id) + _persist_token_response(token_resp) + + api_key, key_name = service.provision_cli_api_key(token_resp.access_token, client_id=client_id) + + creds = credentials.load() + assert creds is not None and creds.selected_org_id == token_resp.org_id + creds.orgs[token_resp.org_id].api_key = api_key + # Drop the v0→v1 legacy placeholder org now that the user is properly + # authenticated against a real org. It only existed for backwards compat + # during migration; keeping it around after login would be dead state. + if credentials.LEGACY_ORG_ID != token_resp.org_id: + creds.orgs.pop(credentials.LEGACY_ORG_ID, None) + credentials.save(creds) + + if not on_device_code: + print(f"Authentication successful! Provisioned data API key: {key_name}", file=sys.stderr) + + return api_key + + +def resolve_api_key(api_key: str | None = None) -> str: + """Resolve API key from parameter, environment, or stored credentials. + + Priority: explicit ``api_key`` argument → ``PARALLEL_API_KEY`` env var → + stored credentials. Env beats stored creds so operators can override a + developer's local ``parallel-cli login`` session by exporting the env var + (matches the convention used by AWS, GCP, Anthropic, Stripe SDKs). + Raises ``ValueError`` if no key is available. """ if api_key: return api_key - env_key = os.environ.get("PARALLEL_API_KEY") if env_key: return env_key - - stored_token = _load_stored_token() - if stored_token: - return stored_token - + stored = credentials.get_selected_api_key() + if stored: + return stored raise ValueError( - "Parallel API key required. Provide via api_key parameter, " - "PARALLEL_API_KEY environment variable, or run 'parallel-cli login'." + "Parallel API key required. Set the PARALLEL_API_KEY environment " + "variable, run 'parallel-cli login', or pass api_key explicitly." ) def get_api_key( force_login: bool = False, - device: bool = False, on_device_code: Callable[[DeviceCodeInfo], None] | None = None, + login_hint: str | None = None, ) -> str: - """Get API key/token for Parallel API with interactive OAuth fallback. - - Priority: - 1. PARALLEL_API_KEY environment variable - 2. Stored OAuth token - 3. Interactive OAuth flow (or device flow if headless/requested) - - Args: - force_login: Force a new login flow, ignoring stored credentials. - device: Force device authorization flow instead of browser-based PKCE. - on_device_code: Callback invoked with DeviceCodeInfo when using device flow. - Use this to present the verification URL and user code to the user - programmatically (e.g., in a chat message from an AI agent). If not - provided, instructions are printed to stderr. - """ - api_key = os.environ.get("PARALLEL_API_KEY") - if api_key and not force_login: - return api_key + """Get API key, triggering device-flow login + auto-mint as a fallback. + + Priority (when not ``force_login``): ``PARALLEL_API_KEY`` env var → stored + credentials → service-API key provisioning from stored control-API tokens + → interactive device flow. + ``login_hint`` is forwarded to :func:`login_flow` — see + :func:`build_verification_uri` for the supported hint format. + """ if not force_login: - stored_token = _load_stored_token() - if stored_token: - return stored_token + env_key = os.environ.get("PARALLEL_API_KEY") + if env_key: + return env_key - use_device = device or _is_headless() + stored = credentials.get_selected_api_key() + if stored: + return stored - if use_device: - if not on_device_code: - print("Starting device authorization...", file=sys.stderr) - access_token = _do_device_flow(on_device_code=on_device_code) - else: - print("Starting authentication...", file=sys.stderr) - access_token = _do_oauth_flow() + # If we still have valid control-API auth but no data API key saved, + # mint a new data key via service API before forcing an interactive + # device-authorization flow. + try: + access_token = get_control_api_access_token() + client_id = ensure_client_id() + minted_api_key, _ = service.provision_cli_api_key(access_token, client_id=client_id) + creds = credentials.load() + if creds is not None: + org = creds.selected_org() + if org is not None: + org.api_key = minted_api_key + credentials.save(creds) + return minted_api_key + except ReauthenticationRequired: + pass + except service.ServiceApiError: + pass - _save_token(access_token) if not on_device_code: - print("Authentication successful! Credentials saved.", file=sys.stderr) + print("Starting device authorization...", file=sys.stderr) + return login_flow(login_hint=login_hint, on_device_code=on_device_code) - return access_token - -def create_client( - api_key: str | None = None, - source: ClientSource = "python", -) -> Parallel: - """Create a configured Parallel client, resolving the API key if not provided. - - Unlike get_client(), this uses resolve_api_key() which raises ValueError - instead of triggering interactive OAuth if no key is found. - - Args: - api_key: Optional API key. Resolved from env/stored credentials if not provided. - source: Source identifier for User-Agent (cli, duckdb, bigquery, etc.) - - Returns: - A configured Parallel client. - """ +def create_client(api_key: str | None = None, source: ClientSource = "python") -> Parallel: + """Create a configured Parallel client, resolving the API key if not provided.""" return Parallel( + base_url=get_api_url(), api_key=resolve_api_key(api_key), default_headers=get_default_headers(source), ) -def get_client( - force_login: bool = False, - source: ClientSource = "python", -) -> Parallel: - """Get a configured Parallel client with interactive OAuth fallback. - - Args: - force_login: Force a new OAuth login flow. - source: Source identifier for User-Agent (cli, duckdb, bigquery, etc.) - - Returns: - A configured Parallel client. - """ - api_key = get_api_key(force_login=force_login) +def get_client(force_login: bool = False, source: ClientSource = "python") -> Parallel: + """Get a configured Parallel client with interactive device-flow fallback.""" return Parallel( - api_key=api_key, + base_url=get_api_url(), + api_key=get_api_key(force_login=force_login), default_headers=get_default_headers(source), ) -def get_async_client( - force_login: bool = False, - source: ClientSource = "python", -) -> AsyncParallel: - """Get a configured async Parallel client with User-Agent header. - - Args: - force_login: Force a new OAuth login flow. - source: Source identifier for User-Agent (cli, duckdb, bigquery, etc.) - - Returns: - A configured async Parallel client. - """ - api_key = get_api_key(force_login=force_login) +def get_async_client(force_login: bool = False, source: ClientSource = "python") -> AsyncParallel: + """Get a configured async Parallel client.""" return AsyncParallel( - base_url="https://api.parallel.ai", - api_key=api_key, + base_url=get_api_url(), + api_key=get_api_key(force_login=force_login), default_headers=get_default_headers(source), ) def logout() -> bool: - """Remove stored OAuth token.""" - if TOKEN_FILE.exists(): - TOKEN_FILE.unlink() - return True - return False - - -def get_auth_status() -> dict[str, str | bool | None]: - """Get current authentication status.""" - api_key = os.environ.get("PARALLEL_API_KEY") - if api_key: - return {"authenticated": True, "method": "environment", "token_file": None} + """Revoke all stored refresh tokens (best-effort) and remove auth files.""" + creds = credentials.load() + if creds is not None: + seen_refresh_tokens: set[str] = set() + for org in creds.orgs.values(): + refresh_token = org.control_api.refresh_token + if not refresh_token or refresh_token in seen_refresh_tokens: + continue + seen_refresh_tokens.add(refresh_token) + try: + revoke_token(refresh_token) + except Exception as e: + print( + f"Warning: refresh token revocation failed ({e}); removing local credentials anyway.", + file=sys.stderr, + ) + return credentials.delete() + + +def get_auth_status() -> dict: + """Get current authentication status. + + Reports BOTH sources independently so callers can show that ``PARALLEL_API_KEY`` + is overriding a stored login. ``method`` names the source that + :func:`resolve_api_key` would actually return — env beats stored creds. + + Returned fields: + + - ``authenticated``: at least one source has a key. + - ``method``: ``"environment"`` | ``"oauth"`` | ``None`` — which source wins. + - ``env_var_set``: whether ``PARALLEL_API_KEY`` is set (regardless of winner). + - ``has_stored_credentials``: whether ``auth.json`` has a usable key. + - ``stored_overridden_by_env``: True when the env var is set AND stored creds + exist; the stored creds will not be used until the env var is unset. + - ``token_file`` / ``version`` / ``selected_org_id`` / ``selected_org_name`` + / ``has_control_api_tokens``: stored-credential metadata when present. + """ + env_key = os.environ.get("PARALLEL_API_KEY") + env_var_set = bool(env_key) + + creds = credentials.load() + stored_org = creds.selected_org() if creds is not None else None + has_stored_credentials = bool(stored_org and stored_org.api_key) + + status: dict = { + "authenticated": env_var_set or has_stored_credentials, + "method": None, + "env_var_set": env_var_set, + "has_stored_credentials": has_stored_credentials, + "stored_overridden_by_env": env_var_set and has_stored_credentials, + "token_file": None, + "version": None, + "selected_org_id": None, + "selected_org_name": None, + "has_control_api_tokens": False, + } + + if has_stored_credentials and creds is not None and stored_org is not None: + token_file = credentials.get_active_credentials_file() or credentials.CREDENTIALS_FILE + status.update( + { + "token_file": str(token_file), + "version": creds.version, + "selected_org_id": creds.selected_org_id, + "selected_org_name": stored_org.org_name, + "has_control_api_tokens": bool(stored_org.control_api.refresh_token), + } + ) - stored_token = _load_stored_token() - if stored_token: - return {"authenticated": True, "method": "oauth", "token_file": str(TOKEN_FILE)} + if env_var_set: + status["method"] = "environment" + elif has_stored_credentials: + status["method"] = "oauth" - return {"authenticated": False, "method": None, "token_file": None} + return status diff --git a/parallel_web_tools/core/credentials.py b/parallel_web_tools/core/credentials.py new file mode 100644 index 0000000..817a060 --- /dev/null +++ b/parallel_web_tools/core/credentials.py @@ -0,0 +1,215 @@ +"""Versioned credentials storage for parallel-cli. + +New structured auth state lives in ``auth.json``. The legacy flat +``credentials.json`` file is left in its old shape for backward compatibility +with older CLI releases. +""" + +from __future__ import annotations + +import json +import os +import tempfile +from dataclasses import asdict, dataclass, field +from pathlib import Path + +AUTH_FILE = Path.home() / ".config" / "parallel-web-tools" / "auth.json" +LEGACY_CREDENTIALS_FILE = Path.home() / ".config" / "parallel-web-tools" / "credentials.json" +# Backward-compatible alias used across the codebase/tests for the new auth file. +CREDENTIALS_FILE = AUTH_FILE +CURRENT_VERSION = 1 +LEGACY_ORG_ID = "legacy" + + +@dataclass +class ControlApiTokens: + access_token: str | None = None + access_token_expires_at: int | None = None + access_token_scopes: list[str] = field(default_factory=list) + refresh_token: str | None = None + refresh_token_expires_at: int | None = None + authorization_expires_at: int | None = None + + +@dataclass +class OrgCredentials: + api_key: str | None = None + org_name: str | None = None + control_api: ControlApiTokens = field(default_factory=ControlApiTokens) + + +@dataclass +class Credentials: + version: int = CURRENT_VERSION + selected_org_id: str | None = None + orgs: dict[str, OrgCredentials] = field(default_factory=dict) + # Dynamically-registered OAuth client_id returned by + # ``/getServiceKeys/register``. ``None`` means registration hasn't + # succeeded yet (first boot, migrated v0 file, or prior failure) — the + # next login attempt will retry and fall back to a hardcoded id on error. + client_id: str | None = None + + def selected_org(self) -> OrgCredentials | None: + if self.selected_org_id is None: + return None + return self.orgs.get(self.selected_org_id) + + +def _migrate_v0(raw: dict) -> dict: + """Transform a v0 credentials dict into v1 shape. + + v0 shape: ``{"access_token": ""}`` — a single API key with no org context. + The token is wrapped into a placeholder ``legacy`` org so existing users keep + working without re-authenticating. + """ + legacy_token = raw.get("access_token") + org: dict = {} + if legacy_token: + org["api_key"] = legacy_token + return { + "version": CURRENT_VERSION, + "selected_org_id": LEGACY_ORG_ID if legacy_token else None, + "orgs": {LEGACY_ORG_ID: org} if legacy_token else {}, + } + + +def _credentials_from_dict(data: dict) -> Credentials: + orgs_raw = data.get("orgs") or {} + orgs: dict[str, OrgCredentials] = {} + for org_id, org_data in orgs_raw.items(): + control_raw = (org_data or {}).get("control_api") or {} + orgs[org_id] = OrgCredentials( + api_key=(org_data or {}).get("api_key"), + org_name=(org_data or {}).get("org_name"), + control_api=ControlApiTokens( + access_token=control_raw.get("access_token"), + access_token_expires_at=control_raw.get("access_token_expires_at"), + access_token_scopes=list(control_raw.get("access_token_scopes") or []), + refresh_token=control_raw.get("refresh_token"), + refresh_token_expires_at=control_raw.get("refresh_token_expires_at"), + authorization_expires_at=control_raw.get("authorization_expires_at"), + ), + ) + return Credentials( + version=data.get("version", CURRENT_VERSION), + selected_org_id=data.get("selected_org_id"), + orgs=orgs, + client_id=data.get("client_id"), + ) + + +def _load_json_file(path: Path) -> dict | None: + if not path.exists(): + return None + try: + with open(path) as f: + raw = json.load(f) + except (OSError, json.JSONDecodeError): + return None + if not isinstance(raw, dict): + return None + return raw + + +def _write_json_file(path: Path, payload: dict, temp_prefix: str) -> None: + path.parent.mkdir(parents=True, exist_ok=True) + fd, tmp_path = tempfile.mkstemp( + prefix=temp_prefix, + suffix=".tmp", + dir=str(path.parent), + ) + try: + with os.fdopen(fd, "w") as f: + json.dump(payload, f, indent=2) + os.chmod(tmp_path, 0o600) + os.replace(tmp_path, path) + except Exception: + if os.path.exists(tmp_path): + try: + os.unlink(tmp_path) + except OSError: + pass + raise + + +def load() -> Credentials | None: + """Load credentials from disk. + + Preference order: + 1. ``auth.json`` in the new structured format. + 2. Legacy ``credentials.json`` in the flat v0 format, migrated forward into + ``auth.json`` when the new file does not exist. + + Returns ``None`` if neither file exists or both are unreadable/corrupt. + """ + raw = _load_json_file(CREDENTIALS_FILE) + if raw is not None: + if "version" not in raw: + return _credentials_from_dict(_migrate_v0(raw)) + return _credentials_from_dict(raw) + + legacy_raw = _load_json_file(LEGACY_CREDENTIALS_FILE) + if legacy_raw is None: + return None + creds = _credentials_from_dict(_migrate_v0(legacy_raw)) + save(creds) + # Remove the legacy file only after the new one is durably on disk, so a + # crash mid-migration leaves the user with the original credentials rather + # than nothing. + try: + LEGACY_CREDENTIALS_FILE.unlink() + except OSError: + pass + return creds + + +def save(creds: Credentials) -> None: + """Write structured auth atomically to ``auth.json``.""" + payload = asdict(creds) + _write_json_file(CREDENTIALS_FILE, payload, ".auth.") + + +def delete() -> bool: + """Remove auth files. Returns True if any local auth file was removed.""" + removed = False + for path in (CREDENTIALS_FILE, LEGACY_CREDENTIALS_FILE): + if path.exists(): + path.unlink() + removed = True + return removed + + +def get_active_credentials_file() -> Path | None: + """Return the on-disk auth file currently backing ``load()``, if any.""" + raw = _load_json_file(CREDENTIALS_FILE) + if raw is not None: + return CREDENTIALS_FILE + legacy_raw = _load_json_file(LEGACY_CREDENTIALS_FILE) + if legacy_raw is not None: + return LEGACY_CREDENTIALS_FILE + return None + + +def get_selected_api_key() -> str | None: + """Return the API key for the currently selected org, or None.""" + creds = load() + if creds is None: + return None + org = creds.selected_org() + if org is None: + return None + return org.api_key + + +def set_api_key_for_org(org_id: str, api_key: str) -> None: + """Write an API key into the given org, creating the org if missing. + + If no org is currently selected, ``selected_org_id`` is set to ``org_id``. + """ + creds = load() or Credentials() + org = creds.orgs.get(org_id) or OrgCredentials() + org.api_key = api_key + creds.orgs[org_id] = org + if creds.selected_org_id is None: + creds.selected_org_id = org_id + save(creds) diff --git a/parallel_web_tools/core/endpoints.py b/parallel_web_tools/core/endpoints.py new file mode 100644 index 0000000..4b8a6dc --- /dev/null +++ b/parallel_web_tools/core/endpoints.py @@ -0,0 +1,45 @@ +"""Endpoint and client-identity configuration for parallel-cli. + +Three base URLs are configurable via env vars so the CLI can be pointed at a +local dev stack: + +- ``PARALLEL_PLATFORM_URL`` — the platform that serves ``/getServiceKeys/*`` + (device authorization, token exchange, revocation). Default + ``https://platform.parallel.ai``; for local dev set to ``http://localhost:3000``. + +- ``PARALLEL_SERVICE_API_URL`` — the service/account API that serves + ``/service/v1/*`` (apps, API-key management). Default + ``https://api.parallel.ai/account``; for local dev set to + ``http://localhost:8090``. + +- ``PARALLEL_API_URL`` — the data API (search, extract, research, enrich, + findall, monitor). Default ``https://api.parallel.ai``. +""" + +from __future__ import annotations + +import os + +DEFAULT_PLATFORM_URL = "https://platform.parallel.ai" +DEFAULT_SERVICE_API_URL = "https://api.parallel.ai/account" +DEFAULT_API_URL = "https://api.parallel.ai" + +CLIENT_ID = "parallel-cli" +DEFAULT_SCOPE = "keys:read keys:create keys:delete apps:read apps:create apps:delete balance:read balance:add" + +PARALLEL_CLI_APP_NAME = "parallel-cli Users" + + +def get_platform_url() -> str: + """Return the platform base URL (no trailing slash).""" + return os.environ.get("PARALLEL_PLATFORM_URL", DEFAULT_PLATFORM_URL).rstrip("/") + + +def get_service_api_url() -> str: + """Return the service API base URL (no trailing slash).""" + return os.environ.get("PARALLEL_SERVICE_API_URL", DEFAULT_SERVICE_API_URL).rstrip("/") + + +def get_api_url() -> str: + """Return the data API base URL (no trailing slash).""" + return os.environ.get("PARALLEL_API_URL", DEFAULT_API_URL).rstrip("/") diff --git a/parallel_web_tools/core/service.py b/parallel_web_tools/core/service.py new file mode 100644 index 0000000..bb2d4bb --- /dev/null +++ b/parallel_web_tools/core/service.py @@ -0,0 +1,138 @@ +"""Service API client for parallel-cli. + +Wraps the subset of ``/service/v1/*`` endpoints the CLI consumes: + +- ``GET /service/v1/apps`` — list apps for the caller's org +- ``POST /service/v1/apps/{app_id}/keys`` — create an API key on an app +- ``GET /service/v1/balance`` — read the org's prepaid balance +- ``POST /service/v1/balance/add`` — charge Stripe and top up balance + +Request and response shapes are parsed with the Pydantic models in +:mod:`parallel_web_tools.core.service_types` (auto-generated from the OpenAPI +spec; regenerate with ``scripts/generate_service_types.py``). +""" + +from __future__ import annotations + +import json +import time +import urllib.error +import urllib.request +from typing import Any + +from pydantic import ValidationError + +from parallel_web_tools.core.endpoints import PARALLEL_CLI_APP_NAME, get_service_api_url +from parallel_web_tools.core.service_types import ( + AddBalanceRequest, + AppItem, + BalanceResponse, + CreateApiKeyRequestModel, + CreateKeyResponse, + GetAppsForOrgResponseModel, +) + + +class ServiceApiError(Exception): + """Raised when the service API returns an error or an unexpected payload.""" + + +def _request( + method: str, + path: str, + access_token: str, + body: dict | None = None, + timeout: int = 30, +) -> Any: + url = f"{get_service_api_url()}{path}" + data = json.dumps(body).encode() if body is not None else None + headers = { + "Authorization": f"Bearer {access_token}", + "Accept": "application/json", + } + if data is not None: + headers["Content-Type"] = "application/json" + req = urllib.request.Request(url, data=data, headers=headers, method=method) + try: + with urllib.request.urlopen(req, timeout=timeout) as response: + raw = response.read().decode() + return json.loads(raw) if raw else None + except urllib.error.HTTPError as e: + body_text = e.read().decode() + raise ServiceApiError(f"{method} {path} failed: {e.code} - {body_text}") from e + + +def list_apps(access_token: str) -> list[AppItem]: + """Return all apps for the caller's org.""" + data = _request("GET", "/service/v1/apps", access_token) + try: + resp = GetAppsForOrgResponseModel.model_validate(data or {}) + except ValidationError as e: + raise ServiceApiError(f"Unexpected /service/v1/apps response: {e}") from e + return resp.apps or [] + + +def create_api_key(access_token: str, app_id: str, api_key_name: str) -> CreateKeyResponse: + """Create an API key on the given app and return the typed result.""" + body = CreateApiKeyRequestModel(api_key_name=api_key_name).model_dump() + data = _request("POST", f"/service/v1/apps/{app_id}/keys", access_token, body=body) + try: + return CreateKeyResponse.model_validate(data) + except ValidationError as e: + raise ServiceApiError(f"Unexpected create_api_key response: {e}") from e + + +def _build_key_name(client_id: str | None = None, now: float | None = None) -> str: + """Return a CLI-minted key name. + + Uses the registered OAuth ``client_id`` as the high-entropy prefix, with a + ``YYYY-MM-DD-HHMM`` suffix so the same client can mint multiple keys and + still distinguish them. Falls back to the plain ``parallel-cli`` prefix + when no ``client_id`` is available (e.g. registration failed earlier). + """ + prefix = client_id or "parallel-cli" + return f"{prefix}-{time.strftime('%Y-%m-%d-%H%M', time.localtime(now))}" + + +def get_balance(access_token: str) -> BalanceResponse: + """Return the caller's current org balance.""" + data = _request("GET", "/service/v1/balance", access_token) + try: + return BalanceResponse.model_validate(data) + except ValidationError as e: + raise ServiceApiError(f"Unexpected /service/v1/balance response: {e}") from e + + +def add_balance(access_token: str, amount_cents: int, idempotency_key: str) -> BalanceResponse: + """Charge the org's payment method and top up the prepaid balance. + + Returns the updated :class:`BalanceResponse`. ``idempotency_key`` must be + high-entropy; the server dedupes repeat charges for at least 24h when the + same key is submitted. + """ + body = AddBalanceRequest(amount_cents=amount_cents, idempotency_key=idempotency_key).model_dump() + data = _request("POST", "/service/v1/balance/add", access_token, body=body) + try: + return BalanceResponse.model_validate(data) + except ValidationError as e: + raise ServiceApiError(f"Unexpected /service/v1/balance/add response: {e}") from e + + +def provision_cli_api_key(access_token: str, client_id: str | None = None) -> tuple[str, str]: + """Find the ``parallel-cli Users`` app and mint a fresh API key on it. + + Returns ``(raw_api_key, key_name)``. The raw key is only returned once by + the server — at creation time — so the caller must persist it immediately. + """ + apps = list_apps(access_token) + match = next((a for a in apps if a.app_name == PARALLEL_CLI_APP_NAME), None) + if match is None: + raise ServiceApiError( + f"No app named {PARALLEL_CLI_APP_NAME!r} found for this org. " + "It should be auto-created during device authorization; contact support if missing." + ) + key_name = _build_key_name(client_id) + created = create_api_key(access_token, match.app_id, key_name) + if not created.raw_api_key: + raise ServiceApiError("Server returned no raw_api_key on key creation; cannot persist a usable key without it.") + return created.raw_api_key, key_name diff --git a/parallel_web_tools/core/service_types.py b/parallel_web_tools/core/service_types.py new file mode 100644 index 0000000..30c778d --- /dev/null +++ b/parallel_web_tools/core/service_types.py @@ -0,0 +1,100 @@ +# generated by datamodel-codegen: +# filename: http://localhost:8090/service/openapi.json +# timestamp: 2026-05-08T19:39:42+00:00 + +from __future__ import annotations + +from typing import Annotated + +from pydantic import BaseModel, Field + + +class CreateAppResponseModel(BaseModel): + app_id: Annotated[str, Field(description="App ID", title="App Id")] + + +class CreateAppRequestModel(BaseModel): + app_name: Annotated[str, Field(description="App name", title="App Name")] + + +class BalanceResponse(BaseModel): + org_id: Annotated[str, Field(description="Organization ID", title="Org Id")] + credit_balance_cents: Annotated[ + float, + Field( + description="Total available prepaid balance in cents (credits + prepaid commits). Always 0 when will_invoice is true.", + title="Credit Balance Cents", + ), + ] + pending_debit_balance_cents: Annotated[ + float | None, + Field( + description="Balance in cents currently held for inflight tasks plus charges not yet synced to the billing provider. Always 0 when will_invoice is true.", + title="Pending Debit Balance Cents", + ), + ] = 0 + will_invoice: Annotated[ + bool | None, + Field( + description="True if this organization is billed by invoice (postpaid) rather than from a prepaid credit balance. Invoice-only orgs cannot add balance via this API.", + title="Will Invoice", + ), + ] = False + + +class AppItem(BaseModel): + app_name: Annotated[str, Field(description="App name", title="App Name")] + org_name: Annotated[str | None, Field(description="Organization name", title="Org Name")] + app_id: Annotated[str, Field(description="App ID", title="App Id")] + org_id: Annotated[str, Field(description="Organization ID", title="Org Id")] + + +class AddBalanceRequest(BaseModel): + amount_cents: Annotated[ + int, + Field( + description="Amount in cents to charge and add to the balance. Must be between 1 and 10000 cents ($100.00).", + gt=0, + le=10000, + title="Amount Cents", + ), + ] + idempotency_key: Annotated[ + str, + Field( + description="Required idempotency key. Stripe dedupes the charge server-side for at least 24h when the same key is submitted again for the same org (see https://docs.stripe.com/api/idempotent_requests). Pick a high-entropy value (e.g. a UUID) so distinct agent attempts do not collide.", + max_length=128, + min_length=8, + title="Idempotency Key", + ), + ] + + +class CreateKeyResponse(BaseModel): + api_key_id: Annotated[str, Field(description="API Key ID", title="Api Key Id")] + api_key_name: Annotated[str, Field(description="API Key Name", title="Api Key Name")] + app_id: Annotated[str, Field(description="App ID", title="App Id")] + app_name: Annotated[str, Field(description="App Name", title="App Name")] + created_by_user_id: Annotated[str, Field(description="Created by User ID", title="Created By User Id")] + created_by_user_email: Annotated[str, Field(description="Created by User Email", title="Created By User Email")] + display_value: Annotated[str, Field(description="Display Value", title="Display Value")] + raw_api_key: Annotated[str | None, Field(description="Raw API Key", title="Raw Api Key")] = None + created_at: Annotated[int, Field(description="Created At", title="Created At")] + + +class CreateApiKeyRequestModel(BaseModel): + api_key_name: Annotated[str, Field(description="API Key Name", title="Api Key Name")] + + +class ValidationError(BaseModel): + loc: Annotated[list[str | int], Field(title="Location")] + msg: Annotated[str, Field(title="Message")] + type: Annotated[str, Field(title="Error Type")] + + +class GetAppsForOrgResponseModel(BaseModel): + apps: Annotated[list[AppItem] | None, Field(description="List of apps", title="Apps")] = None + + +class HTTPValidationError(BaseModel): + detail: Annotated[list[ValidationError] | None, Field(title="Detail")] = None diff --git a/parallel_web_tools/integrations/duckdb/udf.py b/parallel_web_tools/integrations/duckdb/udf.py index d110400..f929a55 100644 --- a/parallel_web_tools/integrations/duckdb/udf.py +++ b/parallel_web_tools/integrations/duckdb/udf.py @@ -62,7 +62,13 @@ async def _enrich_all_async( from parallel import AsyncParallel from parallel.types import JsonSchemaParam, TaskSpecParam - client = AsyncParallel(api_key=api_key, default_headers=get_default_headers("duckdb")) + from parallel_web_tools.core.endpoints import get_api_url + + client = AsyncParallel( + base_url=get_api_url(), + api_key=api_key, + default_headers=get_default_headers("duckdb"), + ) output_schema = build_output_schema(output_columns) task_spec = TaskSpecParam(output_schema=JsonSchemaParam(type="json", json_schema=output_schema)) diff --git a/parallel_web_tools/integrations/spark/streaming.py b/parallel_web_tools/integrations/spark/streaming.py index 38affe7..5ee4ffb 100644 --- a/parallel_web_tools/integrations/spark/streaming.py +++ b/parallel_web_tools/integrations/spark/streaming.py @@ -178,7 +178,10 @@ def enrich_streaming_batch( try: from parallel import Parallel + from parallel_web_tools.core.endpoints import get_api_url + client = Parallel( + base_url=get_api_url(), api_key=resolve_api_key(api_key), default_headers=get_default_headers("spark"), ) diff --git a/pyproject.toml b/pyproject.toml index 37c9bc1..717452c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -98,6 +98,7 @@ dev = [ "pre-commit>=4.6.0", "ruff>=0.15.0", "ty>=0.0.33", + "datamodel-code-generator[ruff]>=0.26.0", ] [tool.hatch.build.targets.wheel] diff --git a/scripts/generate_service_types.py b/scripts/generate_service_types.py new file mode 100644 index 0000000..193035e --- /dev/null +++ b/scripts/generate_service_types.py @@ -0,0 +1,84 @@ +#!/usr/bin/env python3 +"""Generate Pydantic models from the service API OpenAPI spec. + +Mirrors the npm workflow used elsewhere: + + openapi-typescript http://127.0.0.1:8090/openapi.json -o ./app/api/account-service-types.ts + +Usage: + + uv run python scripts/generate_service_types.py # prod (default) + uv run python scripts/generate_service_types.py --env dev # localhost:8090 + uv run python scripts/generate_service_types.py --url + +Output is written to ``parallel_web_tools/core/service_types.py``. +Requires ``datamodel-code-generator`` (installed via the ``dev`` extra). +""" + +from __future__ import annotations + +import argparse +import subprocess +import sys +from pathlib import Path + +ENV_URLS = { + "prod": "https://api.parallel.ai/account/service/openapi.json", + "dev": "http://localhost:8090/service/openapi.json", +} + +OUTPUT_PATH = Path(__file__).resolve().parent.parent / "parallel_web_tools" / "core" / "service_types.py" + + +def main() -> int: + parser = argparse.ArgumentParser(description=__doc__) + parser.add_argument( + "--env", + choices=("prod", "dev"), + default="prod", + help="Which environment's OpenAPI spec to fetch (default: prod).", + ) + parser.add_argument( + "--url", + help="Custom OpenAPI URL; overrides --env.", + ) + parser.add_argument( + "--output", + type=Path, + default=OUTPUT_PATH, + help=f"Output file (default: {OUTPUT_PATH.relative_to(Path.cwd()) if OUTPUT_PATH.is_relative_to(Path.cwd()) else OUTPUT_PATH}).", + ) + args = parser.parse_args() + + url = args.url or ENV_URLS[args.env] + print(f"Generating service types from {url} → {args.output}", file=sys.stderr) + + args.output.parent.mkdir(parents=True, exist_ok=True) + cmd = [ + sys.executable, + "-m", + "datamodel_code_generator", + "--url", + url, + "--input-file-type", + "openapi", + "--output", + str(args.output), + "--output-model-type", + "pydantic_v2.BaseModel", + "--target-python-version", + "3.10", + "--use-standard-collections", + "--use-union-operator", + "--use-annotated", + "--snake-case-field", + "--formatters", + "ruff-format", + "ruff-check", + ] + result = subprocess.run(cmd) + return result.returncode + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/tests/conftest.py b/tests/conftest.py index 5d521ba..ea743ad 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,8 +1,22 @@ """Shared pytest fixtures for the parallel-web-tools test suite.""" +import asyncio +import socket +import subprocess +import urllib.request +import webbrowser + +import httpx import pytest +def _blocked_external_io(kind: str): + def fail(*args, **kwargs): + raise AssertionError(f"{kind} is disabled in tests; mock the request instead.") + + return fail + + @pytest.fixture(autouse=True) def _isolate_cwd(tmp_path, monkeypatch): """Run every test in a fresh tmp dir. @@ -14,3 +28,54 @@ def _isolate_cwd(tmp_path, monkeypatch): drop the per-test `monkeypatch.chdir(tmp_path)` boilerplate. """ monkeypatch.chdir(tmp_path) + + +@pytest.fixture(autouse=True) +def _block_network(monkeypatch): + """Prevent accidental outbound network calls during tests. + + Tests should mock the specific transport layer they exercise. If something + reaches the real socket layer, fail fast instead of hanging on live auth or + API calls. + """ + + fail = _blocked_external_io("Network access") + + monkeypatch.setattr(socket, "create_connection", fail) + monkeypatch.setattr(socket, "getaddrinfo", fail) + monkeypatch.setattr(socket, "gethostbyname", fail) + monkeypatch.setattr(socket, "gethostbyname_ex", fail) + monkeypatch.setattr(socket, "gethostbyaddr", fail) + monkeypatch.setattr(socket, "getnameinfo", fail) + monkeypatch.setattr(socket.socket, "connect", fail) + monkeypatch.setattr(socket.socket, "connect_ex", fail) + monkeypatch.setattr(asyncio, "open_connection", fail) + monkeypatch.setattr(urllib.request, "urlopen", fail) + monkeypatch.setattr(httpx, "get", fail) + monkeypatch.setattr(httpx, "post", fail) + monkeypatch.setattr(httpx, "request", fail) + monkeypatch.setattr(httpx, "stream", fail) + monkeypatch.setattr(httpx.Client, "send", fail) + monkeypatch.setattr(httpx.AsyncClient, "send", fail) + + +@pytest.fixture(autouse=True) +def _block_subprocess(monkeypatch): + """Prevent subprocesses from escaping the in-process test harness.""" + + fail = _blocked_external_io("Subprocess execution") + + monkeypatch.setattr(subprocess, "Popen", fail) + monkeypatch.setattr(subprocess, "run", fail) + monkeypatch.setattr(subprocess, "call", fail) + monkeypatch.setattr(subprocess, "check_call", fail) + monkeypatch.setattr(subprocess, "check_output", fail) + + +@pytest.fixture(autouse=True) +def _block_browser_launch(monkeypatch): + """Prevent tests from opening a real browser window.""" + + monkeypatch.setattr(webbrowser, "open", lambda *args, **kwargs: True) + monkeypatch.setattr(webbrowser, "open_new", lambda *args, **kwargs: True) + monkeypatch.setattr(webbrowser, "open_new_tab", lambda *args, **kwargs: True) diff --git a/tests/test_auth.py b/tests/test_auth.py index f5f18c9..9172f2c 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -1,584 +1,1137 @@ -"""Tests for the auth module.""" +"""Tests for the auth module (device flow against /getServiceKeys).""" +import io import json import os import urllib.error +from contextlib import contextmanager +from dataclasses import replace +from email.message import Message from unittest import mock import pytest +from parallel_web_tools.core import credentials from parallel_web_tools.core.auth import ( + ACCESS_TOKEN_SKEW_SECONDS, DeviceCodeInfo, + ReauthenticationRequired, + TokenResponse, _do_device_flow, - _generate_code_challenge, - _generate_code_verifier, - _is_headless, - _load_stored_token, - _save_token, + _persist_token_response, + build_verification_uri, create_client, + ensure_client_id, get_api_key, get_auth_status, + get_control_api_access_token, + is_headless, + login_flow, logout, poll_device_token, + refresh_access_token, + register_client, request_device_code, resolve_api_key, + revoke_token, + send_magic_link, ) +# --------------------------------------------------------------------------- +# Shared helpers / fixtures +# --------------------------------------------------------------------------- -class TestPKCE: - """Tests for PKCE code generation.""" - def test_generate_code_verifier_length(self): - """Code verifier should be URL-safe base64.""" - verifier = _generate_code_verifier() - assert len(verifier) >= 43 # Base64 encoded 32 bytes - assert verifier.replace("-", "").replace("_", "").isalnum() +@pytest.fixture +def creds_file(tmp_path, monkeypatch): + """Patch auth.json path to a tmp path for isolation.""" + path = tmp_path / "auth.json" + monkeypatch.setattr(credentials, "AUTH_FILE", path) + monkeypatch.setattr(credentials, "CREDENTIALS_FILE", path) + monkeypatch.setattr(credentials, "LEGACY_CREDENTIALS_FILE", tmp_path / "credentials.json") + return path - def test_generate_code_verifier_unique(self): - """Each code verifier should be unique.""" - verifiers = [_generate_code_verifier() for _ in range(10)] - assert len(set(verifiers)) == 10 - def test_generate_code_challenge(self): - """Code challenge should be SHA256 of verifier, base64 encoded.""" - verifier = "test_verifier_12345" - challenge = _generate_code_challenge(verifier) +@pytest.fixture +def legacy_file(tmp_path): + return tmp_path / "credentials.json" - # Challenge should be URL-safe base64 without padding - assert "=" not in challenge - assert challenge.replace("-", "").replace("_", "").isalnum() +@pytest.fixture +def no_sleep(monkeypatch): + """Skip real sleeps in the device-code poll loop.""" + monkeypatch.setattr("parallel_web_tools.core.auth.time.sleep", mock.MagicMock()) -class TestTokenStorage: - """Tests for token storage functions.""" - def test_save_and_load_token(self, tmp_path): - """Token should be saveable and loadable.""" - test_token = "test_token_12345" - token_file = tmp_path / "tokens.json" +@pytest.fixture +def mock_ensure_client_id(monkeypatch): + """Stub out ensure_client_id to avoid real /getServiceKeys/register calls. - with mock.patch("parallel_web_tools.core.auth.TOKEN_FILE", token_file): - _save_token(test_token) - - # File should exist with correct permissions - assert token_file.exists() - assert oct(token_file.stat().st_mode)[-3:] == "600" - - # Token should be loadable - loaded = _load_stored_token() - assert loaded == test_token - - def test_load_nonexistent_token(self, tmp_path): - """Loading from nonexistent file should return None.""" - token_file = tmp_path / "nonexistent.json" - - with mock.patch("parallel_web_tools.core.auth.TOKEN_FILE", token_file): - loaded = _load_stored_token() - assert loaded is None - - def test_load_corrupted_token(self, tmp_path): - """Loading corrupted JSON should return None.""" - token_file = tmp_path / "corrupted.json" - token_file.write_text("not valid json {{{") - - with mock.patch("parallel_web_tools.core.auth.TOKEN_FILE", token_file): - loaded = _load_stored_token() - assert loaded is None - - -class TestGetApiKey: - """Tests for get_api_key function.""" - - def test_env_var_priority(self, tmp_path): - """Environment variable should take priority.""" - env_key = "test_env_key_12345" - token_file = tmp_path / "tokens.json" - - with mock.patch.dict(os.environ, {"PARALLEL_API_KEY": env_key}): - with mock.patch("parallel_web_tools.core.auth.TOKEN_FILE", token_file): - result = get_api_key() - assert result == env_key - - def test_stored_token_second_priority(self, tmp_path): - """Stored token should be used if no env var.""" - stored_token = "stored_token_12345" - token_file = tmp_path / "tokens.json" - token_file.parent.mkdir(parents=True, exist_ok=True) - token_file.write_text(json.dumps({"access_token": stored_token})) + Returns the value the stub will produce so tests can assert on it. + """ + value = "cid_test" + monkeypatch.setattr("parallel_web_tools.core.auth.ensure_client_id", lambda: value) + return value - with mock.patch.dict(os.environ, {}, clear=True): - # Remove PARALLEL_API_KEY if it exists - os.environ.pop("PARALLEL_API_KEY", None) - with mock.patch("parallel_web_tools.core.auth.TOKEN_FILE", token_file): - result = get_api_key() - assert result == stored_token +def _http_error(status: int, body: dict) -> urllib.error.HTTPError: + return urllib.error.HTTPError( + url="https://example.com", + code=status, + msg="Error", + hdrs=Message(), + fp=io.BytesIO(json.dumps(body).encode()), + ) - def test_force_login_ignores_env_var(self, tmp_path): - """force_login should skip env var and stored token.""" - env_key = "test_env_key" - token_file = tmp_path / "tokens.json" - with mock.patch.dict(os.environ, {"PARALLEL_API_KEY": env_key}): - with mock.patch("parallel_web_tools.core.auth.TOKEN_FILE", token_file): - with mock.patch("parallel_web_tools.core.auth._do_oauth_flow") as mock_oauth: - with mock.patch("parallel_web_tools.core.auth._do_device_flow") as mock_device: - mock_oauth.return_value = "new_oauth_token" - mock_device.return_value = "new_device_token" +def _urlopen_stub(responses, capture: dict | None = None): + """Build a urlopen side_effect that yields each response in order. - result = get_api_key(force_login=True) + Each entry in ``responses`` is a dict (JSON-encoded body), bytes (raw body), + or pre-built HTTPError. A single value may be passed directly. When + ``capture`` is provided it is populated on each call with url/body/headers/method. + """ + if not isinstance(responses, list): + responses = [responses] + idx = [0] - # Either flow may be chosen depending on environment - assert result in ("new_oauth_token", "new_device_token") + @contextmanager + def impl(req, timeout=None): + if capture is not None: + capture["url"] = req.full_url + capture["body"] = req.data.decode() if req.data else "" + capture["headers"] = dict(req.header_items()) + capture["method"] = req.get_method() + i = min(idx[0], len(responses) - 1) + idx[0] += 1 + r = responses[i] + if isinstance(r, urllib.error.HTTPError): + raise r + payload = r if isinstance(r, (bytes, bytearray)) else json.dumps(r).encode() + yield io.BytesIO(bytes(payload)) + + return impl + + +def _patch_auth_urlopen(responses, capture: dict | None = None): + return mock.patch( + "parallel_web_tools.core.auth.urllib.request.urlopen", + side_effect=_urlopen_stub(responses, capture), + ) -class TestAuthStatus: - """Tests for get_auth_status function.""" +DEVICE_RESPONSE = { + "device_code": "a" * 48, + "user_code": "BCDF-GHJK", + "verification_uri": "http://localhost:3000/getServiceKeys/device", + "verification_uri_complete": "http://localhost:3000/getServiceKeys/device?user_code=BCDF-GHJK", + "expires_in": 600, + "interval": 5, +} - def test_status_with_env_var(self): - """Status should show environment method when env var set.""" - with mock.patch.dict(os.environ, {"PARALLEL_API_KEY": "test_key"}): - status = get_auth_status() - assert status["authenticated"] is True - assert status["method"] == "environment" - def test_status_with_stored_token(self, tmp_path): - """Status should show oauth method when token stored.""" - token_file = tmp_path / "tokens.json" - token_file.parent.mkdir(parents=True, exist_ok=True) - token_file.write_text(json.dumps({"access_token": "stored_token"})) +TOKEN_RESPONSE_JSON = { + "access_token": "at_123", + "refresh_token": "rt_123", + "expires_in": 600, + "refresh_token_expires_in": 604800, + "authorization_expires_in": 7776000, + "org_id": "org_abc", + "org_name": "Acme Org", + "scope": "keys:read balance:write", + "token_type": "Bearer", +} - with mock.patch.dict(os.environ, {}, clear=True): - os.environ.pop("PARALLEL_API_KEY", None) - with mock.patch("parallel_web_tools.core.auth.TOKEN_FILE", token_file): - status = get_auth_status() - assert status["authenticated"] is True - assert status["method"] == "oauth" - assert status["token_file"] == str(token_file) +SAMPLE_DEVICE_CODE_INFO = DeviceCodeInfo( + device_code="a" * 48, + user_code="BCDF-GHJK", + verification_uri="http://localhost:3000/getServiceKeys/device", + verification_uri_complete="http://localhost:3000/getServiceKeys/device?user_code=BCDF-GHJK", + expires_in=600, + interval=5, +) - def test_status_not_authenticated(self, tmp_path): - """Status should show not authenticated when nothing configured.""" - token_file = tmp_path / "nonexistent.json" - with mock.patch.dict(os.environ, {}, clear=True): - os.environ.pop("PARALLEL_API_KEY", None) +_TOKEN_RESPONSE_DEFAULT = TokenResponse( + access_token="at_123", + refresh_token="rt_123", + expires_in=600, + refresh_token_expires_in=604800, + authorization_expires_in=7776000, + org_id="org_abc", + org_name="Acme Org", + scope="keys:read balance:write", +) - with mock.patch("parallel_web_tools.core.auth.TOKEN_FILE", token_file): - status = get_auth_status() - assert status["authenticated"] is False - assert status["method"] is None +def _token_response(**overrides) -> TokenResponse: + """Build a TokenResponse with test defaults.""" + return replace(_TOKEN_RESPONSE_DEFAULT, **overrides) -class TestLogout: - """Tests for logout function.""" - def test_logout_removes_token(self, tmp_path): - """Logout should remove stored token file.""" - token_file = tmp_path / "tokens.json" - token_file.parent.mkdir(parents=True, exist_ok=True) - token_file.write_text(json.dumps({"access_token": "test"})) +# --------------------------------------------------------------------------- +# build_verification_uri +# --------------------------------------------------------------------------- - with mock.patch("parallel_web_tools.core.auth.TOKEN_FILE", token_file): - result = logout() - assert result is True - assert not token_file.exists() - def test_logout_no_token(self, tmp_path): - """Logout should return False if no token exists.""" - token_file = tmp_path / "nonexistent.json" +class TestBuildVerificationUri: + def test_appends_agent_true(self): + url = build_verification_uri("http://localhost:3000/getServiceKeys/device?user_code=ABCD", None) + assert "user_code=ABCD" in url - with mock.patch("parallel_web_tools.core.auth.TOKEN_FILE", token_file): - result = logout() - assert result is False + def test_passes_login_hint_through_url_encoded(self): + url = build_verification_uri( + "http://localhost:3000/getServiceKeys/device?user_code=ABCD", + "login=email,e=user@example.com", + ) + # The hint value is URL-encoded: ','→'%2C', '='→'%3D', '@'→'%40' + assert "login_hint=login%3Demail%2Ce%3Duser%40example.com" in url + + def test_supports_non_email_methods(self): + # google: no email needed + google_url = build_verification_uri("http://localhost:3000/d", "login=google") + assert "login_hint=login%3Dgoogle" in google_url + # sso: email carried as ,e=… + sso_url = build_verification_uri("http://localhost:3000/d", "login=sso,e=u@example.com") + assert "login_hint=login%3Dsso%2Ce%3Du%40example.com" in sso_url + + def test_no_hint_omits_login_hint(self): + url = build_verification_uri("http://localhost:3000/getServiceKeys/device", None) + assert "login_hint" not in url + + +# --------------------------------------------------------------------------- +# register_client / ensure_client_id +# --------------------------------------------------------------------------- + + +class TestRegisterClient: + def test_returns_client_id_from_response(self): + with _patch_auth_urlopen({"client_id": "cid_xyz"}): + assert register_client() == "cid_xyz" + + def test_posts_json_with_expected_payload(self): + captured: dict = {} + with _patch_auth_urlopen({"client_id": "cid_xyz"}, capture=captured): + register_client() + + assert "/getServiceKeys/register" in captured["url"] + assert captured["method"] == "POST" + body = json.loads(captured["body"]) + assert body["client_name"] == "parallel-cli" + # Per user request, no redirect_uris field is sent. + assert "redirect_uris" not in body + # Platform block present with at least system/machine (always populated + # by the stdlib platform module). + assert "system" in body["platform"] + assert "machine" in body["platform"] + assert body["platform"]["os_name"] == os.name + def test_raises_on_http_error(self): + with _patch_auth_urlopen(_http_error(500, {"error": "internal"})): + with pytest.raises(Exception, match="Client registration failed"): + register_client() + + def test_ignores_platform_processor_failures(self): + with ( + mock.patch("parallel_web_tools.core.auth._platform.processor", side_effect=AssertionError("blocked")), + _patch_auth_urlopen({"client_id": "cid_xyz"}), + ): + assert register_client() == "cid_xyz" + + +class TestEnsureClientId: + def test_returns_stored_client_id_without_registering(self, creds_file): + credentials.save(credentials.Credentials(client_id="cid_stored")) + with mock.patch("parallel_web_tools.core.auth.register_client") as mock_reg: + assert ensure_client_id() == "cid_stored" + mock_reg.assert_not_called() + + def test_registers_and_persists_when_missing(self, creds_file): + with mock.patch("parallel_web_tools.core.auth.register_client", return_value="cid_fresh"): + assert ensure_client_id() == "cid_fresh" + + creds = credentials.load() + assert creds is not None + assert creds.client_id == "cid_fresh" + + def test_registers_again_when_stored_client_id_is_none(self, creds_file): + # Simulate a prior registration failure: file exists but client_id is None. + credentials.save(credentials.Credentials(selected_org_id="x", orgs={"x": credentials.OrgCredentials()})) + with mock.patch("parallel_web_tools.core.auth.register_client", return_value="cid_new") as mock_reg: + assert ensure_client_id() == "cid_new" + mock_reg.assert_called_once() + + creds = credentials.load() + assert creds is not None + assert creds.client_id == "cid_new" + + def test_falls_back_to_hardcoded_on_registration_failure(self, creds_file, capsys): + with mock.patch( + "parallel_web_tools.core.auth.register_client", + side_effect=Exception("server down"), + ): + assert ensure_client_id() == "parallel-cli" + + # Failure leaves client_id unset so the next call retries. + creds = credentials.load() + assert creds is None or creds.client_id is None + err = capsys.readouterr().err + assert "client registration failed" in err + + +# --------------------------------------------------------------------------- +# send_magic_link +# --------------------------------------------------------------------------- + + +class TestSendMagicLink: + def test_happy_path(self): + with _patch_auth_urlopen({"ok": True}): + # No return value; success = no exception. + send_magic_link(client_id="cid_xyz", email="u@example.com", user_code="ABCD-1234") + + def test_posts_expected_payload(self): + captured: dict = {} + with _patch_auth_urlopen({"ok": True}, capture=captured): + send_magic_link(client_id="cid_xyz", email="u@example.com", user_code="ABCD-1234") + + assert captured["method"] == "POST" + assert captured["url"].endswith("/api/auth/send-magic-link") + body = json.loads(captured["body"]) + assert body == { + "client_id": "cid_xyz", + "email": "u@example.com", + "emailType": "deviceCode", + "queryParams": {"user_code": "ABCD-1234"}, + } + assert any(v == "application/json" for v in captured["headers"].values()) + + def test_custom_email_type(self): + captured: dict = {} + with _patch_auth_urlopen({"ok": True}, capture=captured): + send_magic_link( + client_id="cid_xyz", + email="u@example.com", + user_code="ABCD-1234", + email_type="customType", + ) + assert json.loads(captured["body"])["emailType"] == "customType" -class TestCreateClient: - """Tests for create_client function.""" + def test_raises_on_http_error(self): + with _patch_auth_urlopen(_http_error(422, {"error": "invalid_email"})): + with pytest.raises(Exception, match="Magic link send failed: 422"): + send_magic_link(client_id="cid_xyz", email="bad@x", user_code="ABCD-1234") - def test_creates_client_with_explicit_key(self): - """Should create Parallel client with explicit API key.""" - with mock.patch("parallel_web_tools.core.auth.Parallel") as mock_parallel: - create_client(api_key="test-key-123", source="cli") - mock_parallel.assert_called_once() - call_kwargs = mock_parallel.call_args.kwargs - assert call_kwargs["api_key"] == "test-key-123" - assert "User-Agent" in call_kwargs["default_headers"] - assert "(cli)" in call_kwargs["default_headers"]["User-Agent"] +# --------------------------------------------------------------------------- +# request_device_code +# --------------------------------------------------------------------------- - def test_creates_client_with_env_key(self): - """Should resolve API key from environment when not explicit.""" - with mock.patch.dict(os.environ, {"PARALLEL_API_KEY": "env-key"}): - with mock.patch("parallel_web_tools.core.auth.Parallel") as mock_parallel: - create_client(source="duckdb") - call_kwargs = mock_parallel.call_args.kwargs - assert call_kwargs["api_key"] == "env-key" +class TestRequestDeviceCode: + def test_returns_device_code_info(self): + with _patch_auth_urlopen(DEVICE_RESPONSE): + info = request_device_code() + assert isinstance(info, DeviceCodeInfo) + assert info.user_code == "BCDF-GHJK" + assert info.expires_in == 600 - def test_raises_without_key(self, tmp_path): - """Should raise ValueError when no API key is available.""" - token_file = tmp_path / "nonexistent.json" + def test_hits_get_service_keys_endpoint(self): + from parallel_web_tools.core.endpoints import DEFAULT_SCOPE - with mock.patch.dict(os.environ, {}, clear=True): - os.environ.pop("PARALLEL_API_KEY", None) - with mock.patch("parallel_web_tools.core.auth.TOKEN_FILE", token_file): - with pytest.raises(ValueError, match="Parallel API key required"): - create_client() + captured: dict = {} + with _patch_auth_urlopen(DEVICE_RESPONSE, capture=captured): + request_device_code() - def test_default_source_is_python(self): - """Should default to python source.""" - with mock.patch.dict(os.environ, {"PARALLEL_API_KEY": "test-key"}): - with mock.patch("parallel_web_tools.core.auth.Parallel") as mock_parallel: - create_client() + assert "/getServiceKeys/device/code" in captured["url"] + assert "client_id=parallel-cli" in captured["body"] + # Scope must be present and URL-form-encoded — check for its head and a colon-encoded marker. + first_scope = DEFAULT_SCOPE.split()[0] # e.g. "keys:read" + assert first_scope.replace(":", "%3A") in captured["body"] or first_scope in captured["body"] - call_kwargs = mock_parallel.call_args.kwargs - assert "(python)" in call_kwargs["default_headers"]["User-Agent"] + def test_respects_platform_url_env_var(self, monkeypatch): + monkeypatch.setenv("PARALLEL_PLATFORM_URL", "http://localhost:3000") + captured: dict = {} + with _patch_auth_urlopen(DEVICE_RESPONSE, capture=captured): + request_device_code() + assert captured["url"].startswith("http://localhost:3000/") + def test_raises_on_http_error(self): + with _patch_auth_urlopen(_http_error(500, {"error": "internal"})): + with pytest.raises(Exception, match="Device code request failed"): + request_device_code() -class TestResolveApiKeyInAuth: - """Additional tests for resolve_api_key edge cases.""" - def test_empty_string_key_is_falsy(self): - """Empty string api_key should fall through to env var.""" - with mock.patch.dict(os.environ, {"PARALLEL_API_KEY": "env-key"}): - result = resolve_api_key(api_key="") - assert result == "env-key" +# --------------------------------------------------------------------------- +# poll_device_token +# --------------------------------------------------------------------------- - def test_stored_token_used_as_fallback(self, tmp_path): - """Should use stored OAuth token when no env var.""" - token_file = tmp_path / "creds.json" - token_file.write_text(json.dumps({"access_token": "stored-token"})) - with mock.patch.dict(os.environ, {}, clear=True): - os.environ.pop("PARALLEL_API_KEY", None) - with mock.patch("parallel_web_tools.core.auth.TOKEN_FILE", token_file): - result = resolve_api_key() - assert result == "stored-token" +class TestPollDeviceToken: + def test_returns_token_response_on_success(self, no_sleep): + with _patch_auth_urlopen(TOKEN_RESPONSE_JSON): + resp = poll_device_token(SAMPLE_DEVICE_CODE_INFO) + assert isinstance(resp, TokenResponse) + assert resp.access_token == "at_123" + assert resp.refresh_token == "rt_123" + assert resp.org_id == "org_abc" + assert resp.scopes == ["keys:read", "balance:write"] + + def test_polls_through_pending(self, monkeypatch): + sleep_mock = mock.MagicMock() + monkeypatch.setattr("parallel_web_tools.core.auth.time.sleep", sleep_mock) + responses = [ + _http_error(400, {"error": "authorization_pending"}), + _http_error(400, {"error": "authorization_pending"}), + TOKEN_RESPONSE_JSON, + ] + with _patch_auth_urlopen(responses): + resp = poll_device_token(SAMPLE_DEVICE_CODE_INFO) + assert resp.access_token == "at_123" + # Polls first, then sleeps between polls — 3 polls means 2 sleeps. + assert sleep_mock.call_count == 2 + + def test_slow_down_increases_interval(self, monkeypatch): + sleep_mock = mock.MagicMock() + monkeypatch.setattr("parallel_web_tools.core.auth.time.sleep", sleep_mock) + responses = [_http_error(400, {"error": "slow_down"}), TOKEN_RESPONSE_JSON] + with _patch_auth_urlopen(responses): + poll_device_token(SAMPLE_DEVICE_CODE_INFO) + # First poll returns slow_down; we bump interval to 10, sleep 10, poll again. + assert sleep_mock.call_args_list == [mock.call(10)] + + def test_polls_immediately_without_initial_sleep(self, monkeypatch): + """Happy path: auth already granted at entry → return on first poll, zero sleeps.""" + sleep_mock = mock.MagicMock() + monkeypatch.setattr("parallel_web_tools.core.auth.time.sleep", sleep_mock) + with _patch_auth_urlopen(TOKEN_RESPONSE_JSON): + poll_device_token(SAMPLE_DEVICE_CODE_INFO) + sleep_mock.assert_not_called() + def test_raises_on_access_denied(self, no_sleep): + with _patch_auth_urlopen(_http_error(400, {"error": "access_denied"})): + with pytest.raises(Exception, match="Authorization denied"): + poll_device_token(SAMPLE_DEVICE_CODE_INFO) -class TestIsHeadless: - """Tests for headless environment detection.""" + def test_raises_on_expired_token(self, no_sleep): + with _patch_auth_urlopen(_http_error(400, {"error": "expired_token"})): + with pytest.raises(Exception, match="expired"): + poll_device_token(SAMPLE_DEVICE_CODE_INFO) - def test_ssh_client_detected(self): - with mock.patch.dict(os.environ, {"SSH_CLIENT": "1.2.3.4 54321 22"}): - assert _is_headless() is True - def test_ssh_tty_detected(self): - with mock.patch.dict(os.environ, {"SSH_TTY": "/dev/pts/0"}): - assert _is_headless() is True +# --------------------------------------------------------------------------- +# refresh_access_token / revoke_token +# --------------------------------------------------------------------------- - def test_ci_detected(self): - with mock.patch.dict(os.environ, {"CI": "true"}): - assert _is_headless() is True - def test_docker_detected(self): - with mock.patch("os.path.exists", return_value=True): - assert _is_headless() is True +class TestRefreshAccessToken: + def test_returns_new_token_response(self): + with _patch_auth_urlopen(TOKEN_RESPONSE_JSON): + resp = refresh_access_token("rt_old") + assert resp.access_token == "at_123" - def test_container_env_detected(self): - with mock.patch.dict(os.environ, {"container": "podman"}): - with mock.patch("os.path.exists", return_value=False): - assert _is_headless() is True + def test_hits_token_endpoint_with_refresh_grant(self): + captured: dict = {} + with _patch_auth_urlopen(TOKEN_RESPONSE_JSON, capture=captured): + refresh_access_token("rt_old") - def test_normal_env_not_headless(self): - env = {k: v for k, v in os.environ.items() if k not in ("SSH_CLIENT", "SSH_TTY", "CI", "container")} - env["DISPLAY"] = ":0" # Ensure Linux display check passes - with mock.patch.dict(os.environ, env, clear=True): - with mock.patch("os.path.exists", return_value=False): - assert _is_headless() is False + assert "/getServiceKeys/token" in captured["url"] + assert "grant_type=refresh_token" in captured["body"] + assert "refresh_token=rt_old" in captured["body"] -def _make_http_error(status, body): - """Helper to create a urllib HTTPError with a JSON body.""" - import io - from email.message import Message +class TestRevokeToken: + def test_sends_form_encoded_refresh_token(self): + captured: dict = {} + with _patch_auth_urlopen(b"", capture=captured): + revoke_token("rt_xyz") - resp = io.BytesIO(json.dumps(body).encode()) - return urllib.error.HTTPError( - url="https://example.com", - code=status, - msg="Bad Request", - hdrs=Message(), - fp=resp, - ) + assert "/getServiceKeys/token/revoke" in captured["url"] + assert captured["method"] == "POST" + # Body is form-encoded refresh_token=; no bearer header. + assert captured["body"] == "refresh_token=rt_xyz" + assert any(v == "application/x-www-form-urlencoded" for v in captured["headers"].values()) + assert not any(k.lower() == "authorization" for k in captured["headers"]) + def test_raises_on_http_error(self): + with _patch_auth_urlopen(_http_error(400, {"error": "invalid_request"})): + with pytest.raises(Exception, match="Token revocation failed: 400"): + revoke_token("rt_bad") -SAMPLE_DEVICE_CODE_INFO = DeviceCodeInfo( - device_code="a" * 48, - user_code="BCDF-GHJK", - verification_uri="https://platform.parallel.ai/getKeys/device", - verification_uri_complete="https://platform.parallel.ai/getKeys/device?user_code=BCDF-GHJK", - expires_in=600, - interval=5, -) +# --------------------------------------------------------------------------- +# _do_device_flow +# --------------------------------------------------------------------------- -def _mock_urlopen_sequence(responses): - """Create a mock urlopen that returns a sequence of responses. - Each response is either a dict (success) or an HTTPError (error). - """ - import io - from contextlib import contextmanager +class TestDoDeviceFlow: + @mock.patch("parallel_web_tools.core.auth.webbrowser.open") + @mock.patch("parallel_web_tools.core.auth.is_headless", return_value=False) + def test_opens_browser_when_not_headless(self, _headless, mock_browser_open, no_sleep): + with _patch_auth_urlopen([DEVICE_RESPONSE, TOKEN_RESPONSE_JSON]): + resp = _do_device_flow() + assert isinstance(resp, TokenResponse) + mock_browser_open.assert_called_once() - call_count = 0 + @mock.patch("parallel_web_tools.core.auth.webbrowser.open") + @mock.patch("parallel_web_tools.core.auth.is_headless", return_value=True) + def test_skips_browser_when_headless(self, _headless, mock_browser_open, no_sleep): + with _patch_auth_urlopen([DEVICE_RESPONSE, TOKEN_RESPONSE_JSON]): + _do_device_flow() + mock_browser_open.assert_not_called() - @contextmanager - def mock_urlopen(req, timeout=None): - nonlocal call_count - idx = min(call_count, len(responses) - 1) - resp = responses[idx] - call_count += 1 + @mock.patch("parallel_web_tools.core.auth.webbrowser.open") + @mock.patch("parallel_web_tools.core.auth.is_headless", return_value=False) + def test_opens_browser_with_login_hint(self, _headless, mock_browser_open, no_sleep): + with _patch_auth_urlopen([DEVICE_RESPONSE, TOKEN_RESPONSE_JSON]): + _do_device_flow(login_hint="login=email,e=user@example.com") + opened_url = mock_browser_open.call_args.args[0] + assert "login_hint=login%3Demail%2Ce%3Duser%40example.com" in opened_url - if isinstance(resp, urllib.error.HTTPError): - raise resp + @mock.patch("parallel_web_tools.core.auth.webbrowser.open") + def test_callback_receives_device_code_info(self, mock_browser_open, no_sleep): + captured = [] + with _patch_auth_urlopen([DEVICE_RESPONSE, TOKEN_RESPONSE_JSON]): + _do_device_flow(on_device_code=lambda info: captured.append(info)) + assert len(captured) == 1 + assert isinstance(captured[0], DeviceCodeInfo) + # Browser should NOT be opened when callback is provided. + mock_browser_open.assert_not_called() - body = json.dumps(resp).encode() - fp = io.BytesIO(body) - yield fp + def test_reregisters_and_retries_when_client_id_is_unknown(self, creds_file, no_sleep): + invalid_client = Exception( + 'Device code request failed: 401 - {"error":"invalid_client","error_description":"Unknown client_id. Register the client first."}' + ) + credentials.save(credentials.Credentials(client_id="cid_stale")) + + with ( + mock.patch( + "parallel_web_tools.core.auth.request_device_code", + side_effect=[invalid_client, SAMPLE_DEVICE_CODE_INFO], + ) as mock_request, + mock.patch("parallel_web_tools.core.auth.register_client", return_value="cid_fresh") as mock_register, + mock.patch("parallel_web_tools.core.auth.poll_device_token", return_value=_token_response()) as mock_poll, + ): + _do_device_flow(client_id="cid_stale") + + assert mock_request.call_args_list[0].kwargs["client_id"] == "cid_stale" + assert mock_request.call_args_list[1].kwargs["client_id"] == "cid_fresh" + mock_register.assert_called_once_with() + mock_poll.assert_called_once_with(SAMPLE_DEVICE_CODE_INFO, client_id="cid_fresh") + creds = credentials.load() + assert creds is not None + assert creds.client_id == "cid_fresh" + + +# --------------------------------------------------------------------------- +# _persist_token_response +# --------------------------------------------------------------------------- + + +class TestPersistTokenResponse: + def test_writes_control_api_tokens_to_selected_org(self, creds_file): + _persist_token_response(_token_response(access_token="at_new", refresh_token="rt_new", org_id="org_real")) + creds = credentials.load() + assert creds is not None + assert creds.selected_org_id == "org_real" + assert creds.orgs["org_real"].org_name == "Acme Org" + control = creds.orgs["org_real"].control_api + assert control.access_token == "at_new" + assert control.refresh_token == "rt_new" + assert control.access_token_scopes == ["keys:read", "balance:write"] + # Expiries are absolute timestamps ordered access < refresh ≤ authorization. + assert control.access_token_expires_at is not None + assert control.refresh_token_expires_at is not None + assert control.authorization_expires_at is not None + assert control.access_token_expires_at > 0 + assert control.refresh_token_expires_at > control.access_token_expires_at + assert control.authorization_expires_at >= control.refresh_token_expires_at + + +# --------------------------------------------------------------------------- +# login_flow + get_api_key +# --------------------------------------------------------------------------- + + +class TestLoginFlow: + def test_provisions_api_key_and_stores(self, creds_file, mock_ensure_client_id): + token_resp = _token_response(access_token="at_x", refresh_token="rt_x", org_id="org_real") + with ( + mock.patch("parallel_web_tools.core.auth._do_device_flow", return_value=token_resp) as mock_flow, + mock.patch( + "parallel_web_tools.core.auth.service.provision_cli_api_key", + return_value=("sk_minted", "cid_test-2026-04-21-1432"), + ) as mock_provision, + ): + api_key = login_flow(login_hint="login=email,e=user@example.com") + + assert api_key == "sk_minted" + # The registered client_id must be threaded into both the device flow + # and the data-API key provisioning call. + assert mock_flow.call_args.kwargs.get("client_id") == mock_ensure_client_id + mock_provision.assert_called_once_with("at_x", client_id=mock_ensure_client_id) + + creds = credentials.load() + assert creds is not None + assert creds.selected_org_id == "org_real" + assert creds.orgs["org_real"].api_key == "sk_minted" + assert creds.orgs["org_real"].org_name == "Acme Org" + assert creds.orgs["org_real"].control_api.access_token == "at_x" + + def test_removes_legacy_org_after_successful_login(self, creds_file, mock_ensure_client_id): + # Seed credentials with a v0-style legacy entry, as if the user was upgraded + # from an older credentials file before running their first real login. + credentials.save( + credentials.Credentials( + selected_org_id=credentials.LEGACY_ORG_ID, + orgs={credentials.LEGACY_ORG_ID: credentials.OrgCredentials(api_key="legacy_key")}, + ) + ) - return mock_urlopen + token_resp = _token_response(access_token="at_new", refresh_token="rt_new", org_id="org_real") + with ( + mock.patch("parallel_web_tools.core.auth._do_device_flow", return_value=token_resp), + mock.patch( + "parallel_web_tools.core.auth.service.provision_cli_api_key", + return_value=("sk_minted", "name"), + ), + ): + login_flow() + + creds = credentials.load() + assert creds is not None + assert creds.selected_org_id == "org_real" + assert "org_real" in creds.orgs + # Legacy placeholder must be purged after a successful login. + assert credentials.LEGACY_ORG_ID not in creds.orgs + + def test_registers_client_when_missing(self, creds_file): + """First-boot login triggers /getServiceKeys/register and persists the id.""" + token_resp = _token_response(org_id="org_real") + with ( + mock.patch("parallel_web_tools.core.auth.register_client", return_value="cid_fresh") as mock_reg, + mock.patch("parallel_web_tools.core.auth._do_device_flow", return_value=token_resp) as mock_flow, + mock.patch( + "parallel_web_tools.core.auth.service.provision_cli_api_key", + return_value=("sk_minted", "name"), + ), + ): + login_flow() + + mock_reg.assert_called_once() + assert mock_flow.call_args.kwargs.get("client_id") == "cid_fresh" + + creds = credentials.load() + assert creds is not None + assert creds.client_id == "cid_fresh" + + def test_skips_registration_when_client_id_already_stored(self, creds_file): + credentials.save(credentials.Credentials(client_id="cid_existing")) + token_resp = _token_response(org_id="org_real") + with ( + mock.patch("parallel_web_tools.core.auth.register_client") as mock_reg, + mock.patch("parallel_web_tools.core.auth._do_device_flow", return_value=token_resp) as mock_flow, + mock.patch( + "parallel_web_tools.core.auth.service.provision_cli_api_key", + return_value=("sk_minted", "name"), + ), + ): + login_flow() + + mock_reg.assert_not_called() + assert mock_flow.call_args.kwargs.get("client_id") == "cid_existing" -class TestRequestDeviceCode: - """Tests for the request_device_code public function.""" +class TestGetApiKey: + def test_env_var_first_priority(self, creds_file, monkeypatch): + monkeypatch.setenv("PARALLEL_API_KEY", "env_key") + credentials.set_api_key_for_org("org_a", "stored_key") + # Env var must win over stored credentials (operator override). + assert get_api_key() == "env_key" + + def test_env_var_used_when_no_stored_key(self, creds_file, monkeypatch): + monkeypatch.setenv("PARALLEL_API_KEY", "env_key") + assert get_api_key() == "env_key" + + def test_env_var_short_circuits_service_api_mint(self, creds_file, monkeypatch): + # When the env var is set, control-API minting must NOT be triggered; + # the env var is authoritative and the mint path is unnecessary work. + monkeypatch.setenv("PARALLEL_API_KEY", "env_key") + credentials.save( + credentials.Credentials( + selected_org_id="org_a", + orgs={"org_a": credentials.OrgCredentials()}, + ) + ) - DEVICE_RESPONSE = { - "device_code": "a" * 48, - "user_code": "BCDF-GHJK", - "verification_uri": "https://platform.parallel.ai/getKeys/device", - "verification_uri_complete": "https://platform.parallel.ai/getKeys/device?user_code=BCDF-GHJK", - "expires_in": 600, - "interval": 5, - } + with ( + mock.patch("parallel_web_tools.core.auth.get_control_api_access_token") as mock_at, + mock.patch("parallel_web_tools.core.auth.service.provision_cli_api_key") as mock_provision, + ): + assert get_api_key() == "env_key" + + mock_at.assert_not_called() + mock_provision.assert_not_called() + + def test_stored_only_without_env(self, creds_file, monkeypatch): + monkeypatch.delenv("PARALLEL_API_KEY", raising=False) + credentials.set_api_key_for_org("org_a", "stored_key") + assert get_api_key() == "stored_key" + + def test_force_login_runs_login_flow(self, creds_file, monkeypatch): + monkeypatch.setenv("PARALLEL_API_KEY", "env_key") # should still be ignored with force_login + with mock.patch("parallel_web_tools.core.auth.login_flow", return_value="minted_key") as mock_flow: + result = get_api_key(force_login=True, login_hint="login=google") + assert result == "minted_key" + assert mock_flow.call_args.kwargs.get("login_hint") == "login=google" + + def test_provisions_via_service_api_when_stored_api_key_missing(self, creds_file, monkeypatch): + monkeypatch.delenv("PARALLEL_API_KEY", raising=False) + credentials.save( + credentials.Credentials( + selected_org_id="org_a", + orgs={"org_a": credentials.OrgCredentials()}, + ) + ) - def test_returns_device_code_info(self): - """Should return a DeviceCodeInfo dataclass.""" - mock_urlopen = _mock_urlopen_sequence([self.DEVICE_RESPONSE]) + with ( + mock.patch( + "parallel_web_tools.core.auth.get_control_api_access_token", return_value="at_existing" + ) as mock_at, + mock.patch("parallel_web_tools.core.auth.ensure_client_id", return_value="cid_existing") as mock_client_id, + mock.patch( + "parallel_web_tools.core.auth.service.provision_cli_api_key", + return_value=("sk_minted", "cid_existing-2026-04-23-1212"), + ) as mock_provision, + mock.patch("parallel_web_tools.core.auth.login_flow") as mock_login, + ): + assert get_api_key() == "sk_minted" + + mock_at.assert_called_once_with() + mock_client_id.assert_called_once_with() + mock_provision.assert_called_once_with("at_existing", client_id="cid_existing") + mock_login.assert_not_called() + + creds = credentials.load() + assert creds is not None + assert creds.orgs["org_a"].api_key == "sk_minted" + + def test_falls_back_to_login_when_control_api_requires_reauth(self, creds_file, monkeypatch): + monkeypatch.delenv("PARALLEL_API_KEY", raising=False) + with ( + mock.patch( + "parallel_web_tools.core.auth.get_control_api_access_token", + side_effect=ReauthenticationRequired("not logged in; run 'parallel-cli login'"), + ), + mock.patch("parallel_web_tools.core.auth.login_flow", return_value="sk_from_login") as mock_login, + ): + assert get_api_key(login_hint="user@example.com") == "sk_from_login" + + assert mock_login.call_args.kwargs.get("login_hint") == "user@example.com" + + def test_env_var_returned_without_touching_control_api(self, creds_file, monkeypatch): + # With the new env-first priority, control-API state is irrelevant + # when PARALLEL_API_KEY is set: no reauth check, no login flow. + monkeypatch.setenv("PARALLEL_API_KEY", "env_key") + credentials.save( + credentials.Credentials( + selected_org_id="org_a", + orgs={"org_a": credentials.OrgCredentials()}, + ) + ) - with mock.patch("parallel_web_tools.core.auth.urllib.request.urlopen", side_effect=mock_urlopen): - info = request_device_code() + with ( + mock.patch("parallel_web_tools.core.auth.get_control_api_access_token") as mock_at, + mock.patch("parallel_web_tools.core.auth.login_flow") as mock_login, + ): + assert get_api_key() == "env_key" - assert isinstance(info, DeviceCodeInfo) - assert info.device_code == "a" * 48 - assert info.user_code == "BCDF-GHJK" - assert info.verification_uri == "https://platform.parallel.ai/getKeys/device" - assert info.expires_in == 600 - assert info.interval == 5 + mock_at.assert_not_called() + mock_login.assert_not_called() - def test_raises_on_http_error(self): - """Should raise on server error.""" - error = _make_http_error(500, {"error": "internal"}) - mock_urlopen = _mock_urlopen_sequence([error]) - with mock.patch("parallel_web_tools.core.auth.urllib.request.urlopen", side_effect=mock_urlopen): - with pytest.raises(Exception, match="Device code request failed"): - request_device_code() +# --------------------------------------------------------------------------- +# get_auth_status / logout +# --------------------------------------------------------------------------- -class TestPollDeviceToken: - """Tests for the poll_device_token public function.""" - - TOKEN_RESPONSE = { - "access_token": "test-api-key-from-device", - "token_type": "bearer", - "scope": "key:read", - } - - @mock.patch("parallel_web_tools.core.auth.time.sleep") - def test_returns_token_on_success(self, mock_sleep): - """Should return access token when approved.""" - mock_urlopen = _mock_urlopen_sequence([self.TOKEN_RESPONSE]) - - with mock.patch("parallel_web_tools.core.auth.urllib.request.urlopen", side_effect=mock_urlopen): - token = poll_device_token(SAMPLE_DEVICE_CODE_INFO) - - assert token == "test-api-key-from-device" - - @mock.patch("parallel_web_tools.core.auth.time.sleep") - def test_polls_through_pending(self, mock_sleep): - """Should keep polling on authorization_pending.""" - mock_urlopen = _mock_urlopen_sequence( - [ - _make_http_error(400, {"error": "authorization_pending"}), - _make_http_error(400, {"error": "authorization_pending"}), - self.TOKEN_RESPONSE, - ] +class TestAuthStatus: + def test_status_with_env_var(self, creds_file, monkeypatch): + monkeypatch.setenv("PARALLEL_API_KEY", "test_key") + status = get_auth_status() + assert status["authenticated"] is True + assert status["method"] == "environment" + + def test_env_var_overrides_stored_in_status(self, creds_file, monkeypatch): + credentials.save( + credentials.Credentials( + selected_org_id="org_a", + orgs={"org_a": credentials.OrgCredentials(api_key="stored_key", org_name="Acme Org")}, + ) ) + monkeypatch.setenv("PARALLEL_API_KEY", "env_key") + status = get_auth_status() + assert status["authenticated"] is True + # Env var wins; stored creds are reported but flagged as overridden. + assert status["method"] == "environment" + assert status["env_var_set"] is True + assert status["has_stored_credentials"] is True + assert status["stored_overridden_by_env"] is True + assert status["selected_org_name"] == "Acme Org" + + def test_status_with_stored_token(self, creds_file, legacy_file, monkeypatch): + legacy_file.parent.mkdir(parents=True, exist_ok=True) + legacy_file.write_text(json.dumps({"access_token": "stored_token"})) + monkeypatch.delenv("PARALLEL_API_KEY", raising=False) + + status = get_auth_status() + assert status["authenticated"] is True + assert status["method"] == "oauth" + assert status["version"] == 1 + assert status["selected_org_id"] == "legacy" + assert status["selected_org_name"] is None + assert status["has_control_api_tokens"] is False + assert status["token_file"] == str(creds_file) + assert creds_file.exists() + + def test_status_not_authenticated(self, creds_file, monkeypatch): + monkeypatch.delenv("PARALLEL_API_KEY", raising=False) + status = get_auth_status() + assert status["authenticated"] is False + assert status["method"] is None - with mock.patch("parallel_web_tools.core.auth.urllib.request.urlopen", side_effect=mock_urlopen): - token = poll_device_token(SAMPLE_DEVICE_CODE_INFO) - assert token == "test-api-key-from-device" - assert mock_sleep.call_count == 3 - - @mock.patch("parallel_web_tools.core.auth.time.sleep") - def test_slow_down_increases_interval(self, mock_sleep): - """slow_down should increase polling interval by 5 seconds.""" - mock_urlopen = _mock_urlopen_sequence( - [ - _make_http_error(400, {"error": "slow_down"}), - self.TOKEN_RESPONSE, - ] +class TestLogout: + def test_logout_removes_token_no_revoke_when_missing(self, creds_file, legacy_file): + legacy_file.parent.mkdir(parents=True, exist_ok=True) + legacy_file.write_text(json.dumps({"access_token": "test"})) + + with mock.patch("parallel_web_tools.core.auth.revoke_token") as mock_revoke: + assert logout() is True + # Legacy tokens have no refresh_token — revoke should be skipped. + mock_revoke.assert_not_called() + assert not legacy_file.exists() + + def test_login_flow_writes_structured_auth_and_removes_legacy_file( + self, creds_file, legacy_file, mock_ensure_client_id + ): + legacy_file.write_text(json.dumps({"access_token": "legacy_key"})) + token_resp = _token_response(access_token="at_x", refresh_token="rt_x", org_id="org_real") + + with ( + mock.patch("parallel_web_tools.core.auth._do_device_flow", return_value=token_resp), + mock.patch( + "parallel_web_tools.core.auth.service.provision_cli_api_key", + return_value=("sk_minted", "cid_test-2026-04-21-1432"), + ), + ): + login_flow() + + auth_disk = json.loads(creds_file.read_text()) + assert auth_disk["version"] == 1 + assert auth_disk["selected_org_id"] == "org_real" + # Migration removes the legacy file once auth.json is durably written. + assert not legacy_file.exists() + + def test_logout_revokes_refresh_token_when_present(self, creds_file): + credentials.save( + credentials.Credentials( + selected_org_id="org_a", + orgs={ + "org_a": credentials.OrgCredentials( + api_key="sk", + control_api=credentials.ControlApiTokens(refresh_token="rt_keep"), + ) + }, + ) ) - with mock.patch("parallel_web_tools.core.auth.urllib.request.urlopen", side_effect=mock_urlopen): - poll_device_token(SAMPLE_DEVICE_CODE_INFO) - - assert mock_sleep.call_args_list[0] == mock.call(5) - assert mock_sleep.call_args_list[1] == mock.call(10) - - @mock.patch("parallel_web_tools.core.auth.time.sleep") - def test_raises_on_access_denied(self, mock_sleep): - mock_urlopen = _mock_urlopen_sequence( - [ - _make_http_error(400, {"error": "access_denied"}), - ] + with mock.patch("parallel_web_tools.core.auth.revoke_token") as mock_revoke: + assert logout() is True + mock_revoke.assert_called_once_with("rt_keep") + assert not creds_file.exists() + + def test_logout_revokes_refresh_tokens_for_all_orgs(self, creds_file): + credentials.save( + credentials.Credentials( + selected_org_id="org_a", + orgs={ + "org_a": credentials.OrgCredentials( + control_api=credentials.ControlApiTokens(refresh_token="rt_a"), + ), + "org_b": credentials.OrgCredentials( + control_api=credentials.ControlApiTokens(refresh_token="rt_b"), + ), + "org_c": credentials.OrgCredentials( + control_api=credentials.ControlApiTokens(refresh_token=None), + ), + }, + ) ) - with mock.patch("parallel_web_tools.core.auth.urllib.request.urlopen", side_effect=mock_urlopen): - with pytest.raises(Exception, match="Authorization denied"): - poll_device_token(SAMPLE_DEVICE_CODE_INFO) - - @mock.patch("parallel_web_tools.core.auth.time.sleep") - def test_raises_on_expired_token(self, mock_sleep): - mock_urlopen = _mock_urlopen_sequence( - [ - _make_http_error(400, {"error": "expired_token"}), - ] + with mock.patch("parallel_web_tools.core.auth.revoke_token") as mock_revoke: + assert logout() is True + assert mock_revoke.call_count == 2 + mock_revoke.assert_any_call("rt_a") + mock_revoke.assert_any_call("rt_b") + assert not creds_file.exists() + + def test_logout_deduplicates_shared_refresh_tokens(self, creds_file): + credentials.save( + credentials.Credentials( + selected_org_id="org_a", + orgs={ + "org_a": credentials.OrgCredentials( + control_api=credentials.ControlApiTokens(refresh_token="rt_shared"), + ), + "org_b": credentials.OrgCredentials( + control_api=credentials.ControlApiTokens(refresh_token="rt_shared"), + ), + }, + ) ) - with mock.patch("parallel_web_tools.core.auth.urllib.request.urlopen", side_effect=mock_urlopen): - with pytest.raises(Exception, match="expired"): - poll_device_token(SAMPLE_DEVICE_CODE_INFO) - - -class TestDoDeviceFlow: - """Tests for the _do_device_flow convenience wrapper.""" - - DEVICE_RESPONSE = { - "device_code": "a" * 48, - "user_code": "BCDF-GHJK", - "verification_uri": "https://platform.parallel.ai/getKeys/device", - "verification_uri_complete": "https://platform.parallel.ai/getKeys/device?user_code=BCDF-GHJK", - "expires_in": 600, - "interval": 5, - } - - TOKEN_RESPONSE = { - "access_token": "test-api-key-from-device", - "token_type": "bearer", - "scope": "key:read", - } - - @mock.patch("parallel_web_tools.core.auth.webbrowser.open") - @mock.patch("parallel_web_tools.core.auth.time.sleep") - def test_default_prints_to_stderr(self, mock_sleep, mock_browser_open): - """Without callback, should print instructions to stderr.""" - mock_urlopen = _mock_urlopen_sequence( - [ - self.DEVICE_RESPONSE, - self.TOKEN_RESPONSE, - ] + with mock.patch("parallel_web_tools.core.auth.revoke_token") as mock_revoke: + assert logout() is True + mock_revoke.assert_called_once_with("rt_shared") + + def test_logout_best_effort_on_revoke_failure(self, creds_file): + credentials.save( + credentials.Credentials( + selected_org_id="org_a", + orgs={ + "org_a": credentials.OrgCredentials( + control_api=credentials.ControlApiTokens(refresh_token="rt_bad"), + ) + }, + ) ) - with mock.patch("parallel_web_tools.core.auth.urllib.request.urlopen", side_effect=mock_urlopen): - token = _do_device_flow() + with mock.patch( + "parallel_web_tools.core.auth.revoke_token", + side_effect=Exception("server down"), + ): + assert logout() is True + assert not creds_file.exists() - assert token == "test-api-key-from-device" - mock_browser_open.assert_called_once() + def test_logout_no_token(self, creds_file): + assert logout() is False - @mock.patch("parallel_web_tools.core.auth.webbrowser.open") - @mock.patch("parallel_web_tools.core.auth.time.sleep") - def test_callback_receives_device_code_info(self, mock_sleep, mock_browser_open): - """on_device_code callback should receive DeviceCodeInfo.""" - mock_urlopen = _mock_urlopen_sequence( - [ - self.DEVICE_RESPONSE, - self.TOKEN_RESPONSE, - ] - ) - captured = [] +# --------------------------------------------------------------------------- +# Client creation +# --------------------------------------------------------------------------- - with mock.patch("parallel_web_tools.core.auth.urllib.request.urlopen", side_effect=mock_urlopen): - token = _do_device_flow(on_device_code=lambda info: captured.append(info)) - assert token == "test-api-key-from-device" - assert len(captured) == 1 - assert isinstance(captured[0], DeviceCodeInfo) - assert captured[0].user_code == "BCDF-GHJK" - # Browser should NOT be opened when callback is provided - mock_browser_open.assert_not_called() +class TestCreateClient: + def test_creates_client_with_explicit_key(self): + with mock.patch("parallel_web_tools.core.auth.Parallel") as mock_parallel: + create_client(api_key="test-key-123", source="cli") + mock_parallel.assert_called_once() + kwargs = mock_parallel.call_args.kwargs + assert kwargs["api_key"] == "test-key-123" + assert "(cli)" in kwargs["default_headers"]["User-Agent"] + def test_creates_client_with_env_key(self, creds_file, monkeypatch): + monkeypatch.setenv("PARALLEL_API_KEY", "env-key") + with mock.patch("parallel_web_tools.core.auth.Parallel") as mock_parallel: + create_client(source="duckdb") + assert mock_parallel.call_args.kwargs["api_key"] == "env-key" -class TestGetApiKeyDeviceFlow: - """Tests for get_api_key with device flow integration.""" + def test_raises_without_key(self, creds_file, monkeypatch): + monkeypatch.delenv("PARALLEL_API_KEY", raising=False) + with pytest.raises(ValueError, match="Parallel API key required"): + create_client() - def test_device_flag_uses_device_flow(self, tmp_path): - """device=True should use device flow instead of browser OAuth.""" - token_file = tmp_path / "tokens.json" + def test_passes_default_base_url(self): + with mock.patch("parallel_web_tools.core.auth.Parallel") as mock_parallel: + create_client(api_key="k", source="cli") + assert mock_parallel.call_args.kwargs["base_url"] == "https://api.parallel.ai" - with mock.patch.dict(os.environ, {}, clear=True): - os.environ.pop("PARALLEL_API_KEY", None) - with mock.patch("parallel_web_tools.core.auth.TOKEN_FILE", token_file): - with mock.patch("parallel_web_tools.core.auth._do_device_flow") as mock_device: - mock_device.return_value = "device-token" + def test_respects_parallel_api_url_env(self, monkeypatch): + monkeypatch.setenv("PARALLEL_API_URL", "http://localhost:9000") + with mock.patch("parallel_web_tools.core.auth.Parallel") as mock_parallel: + create_client(api_key="k", source="cli") + assert mock_parallel.call_args.kwargs["base_url"] == "http://localhost:9000" - result = get_api_key(force_login=True, device=True) - assert result == "device-token" - mock_device.assert_called_once() +class TestResolveApiKey: + def test_empty_string_key_is_falsy(self, creds_file, monkeypatch): + monkeypatch.setenv("PARALLEL_API_KEY", "env-key") + assert resolve_api_key(api_key="") == "env-key" - def test_headless_auto_selects_device_flow(self, tmp_path): - """Headless environment should auto-select device flow.""" - token_file = tmp_path / "tokens.json" + def test_stored_token_used_as_fallback(self, creds_file, legacy_file, monkeypatch): + legacy_file.write_text(json.dumps({"access_token": "stored-token"})) + monkeypatch.delenv("PARALLEL_API_KEY", raising=False) + assert resolve_api_key() == "stored-token" + assert creds_file.exists() - with mock.patch.dict(os.environ, {"SSH_CLIENT": "1.2.3.4 54321 22"}, clear=True): - os.environ.pop("PARALLEL_API_KEY", None) - with mock.patch("parallel_web_tools.core.auth.TOKEN_FILE", token_file): - with mock.patch("parallel_web_tools.core.auth._do_device_flow") as mock_device: - mock_device.return_value = "ssh-device-token" + def test_env_var_beats_stored(self, creds_file, monkeypatch): + credentials.set_api_key_for_org("org_a", "stored-key") + monkeypatch.setenv("PARALLEL_API_KEY", "env-key") + assert resolve_api_key() == "env-key" - result = get_api_key(force_login=True) - assert result == "ssh-device-token" - mock_device.assert_called_once() +# --------------------------------------------------------------------------- +# is_headless +# --------------------------------------------------------------------------- - def test_non_headless_uses_browser_flow(self, tmp_path): - """Non-headless environment should use browser-based OAuth.""" - token_file = tmp_path / "tokens.json" - env = { - k: v - for k, v in os.environ.items() - if k not in ("SSH_CLIENT", "SSH_TTY", "CI", "container", "PARALLEL_API_KEY") - } - env["DISPLAY"] = ":0" # Ensure Linux display check passes - with mock.patch.dict(os.environ, env, clear=True): - with mock.patch("parallel_web_tools.core.auth.TOKEN_FILE", token_file): - with mock.patch("os.path.exists", return_value=False): - with mock.patch("parallel_web_tools.core.auth._do_oauth_flow") as mock_oauth: - mock_oauth.return_value = "browser-token" +class TestIsHeadless: + def test_ssh_client_detected(self): + with mock.patch.dict(os.environ, {"SSH_CLIENT": "1.2.3.4 54321 22"}): + assert is_headless() is True + + def test_ssh_tty_detected(self): + with mock.patch.dict(os.environ, {"SSH_TTY": "/dev/pts/0"}): + assert is_headless() is True - result = get_api_key(force_login=True) + def test_ci_detected(self): + with mock.patch.dict(os.environ, {"CI": "true"}): + assert is_headless() is True - assert result == "browser-token" - mock_oauth.assert_called_once() + def test_docker_detected(self): + with mock.patch("os.path.exists", return_value=True): + assert is_headless() is True - def test_on_device_code_callback_passed_through(self, tmp_path): - """on_device_code callback should be passed to _do_device_flow.""" - token_file = tmp_path / "tokens.json" - callback = mock.Mock() + def test_container_env_detected(self): + with mock.patch.dict(os.environ, {"container": "podman"}): + with mock.patch("os.path.exists", return_value=False): + assert is_headless() is True - with mock.patch.dict(os.environ, {}, clear=True): - os.environ.pop("PARALLEL_API_KEY", None) - with mock.patch("parallel_web_tools.core.auth.TOKEN_FILE", token_file): - with mock.patch("parallel_web_tools.core.auth._do_device_flow") as mock_device: - mock_device.return_value = "callback-token" + def test_normal_env_not_headless(self): + env = {k: v for k, v in os.environ.items() if k not in ("SSH_CLIENT", "SSH_TTY", "CI", "container")} + env["DISPLAY"] = ":0" + with mock.patch.dict(os.environ, env, clear=True): + with mock.patch("os.path.exists", return_value=False): + assert is_headless() is False + + +# --------------------------------------------------------------------------- +# get_control_api_access_token +# --------------------------------------------------------------------------- + + +NOW_FIXED = 1_800_000_000 # arbitrary "now" for clock-controlled tests + + +def _seed_control_api( + creds_file, + *, + access_token: str | None = "at_current", + access_token_expires_at: int | None = NOW_FIXED + 600, + refresh_token: str | None = "rt_current", + refresh_token_expires_at: int | None = NOW_FIXED + 604800, + authorization_expires_at: int | None = NOW_FIXED + 7776000, + org_id: str = "org_abc", +) -> None: + """Write a credentials file with a specific control_api state for testing.""" + credentials.save( + credentials.Credentials( + selected_org_id=org_id, + orgs={ + org_id: credentials.OrgCredentials( + api_key="sk_data", + control_api=credentials.ControlApiTokens( + access_token=access_token, + access_token_expires_at=access_token_expires_at, + access_token_scopes=["keys:read", "balance:write"], + refresh_token=refresh_token, + refresh_token_expires_at=refresh_token_expires_at, + authorization_expires_at=authorization_expires_at, + ), + ) + }, + ) + ) - result = get_api_key(force_login=True, device=True, on_device_code=callback) - assert result == "callback-token" - mock_device.assert_called_once_with(on_device_code=callback) +@pytest.fixture +def frozen_now(monkeypatch): + """Freeze auth.time.time() to NOW_FIXED.""" + monkeypatch.setattr("parallel_web_tools.core.auth.time.time", lambda: NOW_FIXED) + + +class TestGetControlApiAccessToken: + def test_returns_cached_when_valid(self, creds_file, frozen_now): + _seed_control_api(creds_file) + with mock.patch("parallel_web_tools.core.auth.refresh_access_token") as mock_refresh: + assert get_control_api_access_token() == "at_current" + mock_refresh.assert_not_called() + + def test_refreshes_when_access_token_expired(self, creds_file, frozen_now, mock_ensure_client_id): + _seed_control_api(creds_file, access_token_expires_at=NOW_FIXED - 10) + with mock.patch( + "parallel_web_tools.core.auth.refresh_access_token", + return_value=_token_response(access_token="at_refreshed", refresh_token="rt_new"), + ) as mock_refresh: + assert get_control_api_access_token() == "at_refreshed" + mock_refresh.assert_called_once_with("rt_current", client_id=mock_ensure_client_id) + + # Refreshed tokens must be persisted — loaded file reflects new state. + creds = credentials.load() + assert creds is not None + org = creds.selected_org() + assert org is not None + assert org.control_api.access_token == "at_refreshed" + assert org.control_api.refresh_token == "rt_new" + # New expiries are computed relative to the mocked "now". + assert org.control_api.access_token_expires_at == NOW_FIXED + 600 + + def test_skew_buffer_triggers_early_refresh(self, creds_file, frozen_now, mock_ensure_client_id): + # Access token technically valid but within the skew buffer — refresh. + _seed_control_api(creds_file, access_token_expires_at=NOW_FIXED + ACCESS_TOKEN_SKEW_SECONDS - 1) + with mock.patch( + "parallel_web_tools.core.auth.refresh_access_token", + return_value=_token_response(access_token="at_refreshed"), + ) as mock_refresh: + assert get_control_api_access_token() == "at_refreshed" + mock_refresh.assert_called_once() + + def test_raises_reauth_when_no_credentials(self, creds_file): + with pytest.raises(ReauthenticationRequired, match="not logged in"): + get_control_api_access_token() + + def test_raises_reauth_when_no_control_api_tokens(self, creds_file): + # Org exists but has no control_api.access_token (e.g. legacy-migrated org). + _seed_control_api( + creds_file, + access_token=None, + access_token_expires_at=None, + refresh_token=None, + refresh_token_expires_at=None, + authorization_expires_at=None, + ) + with mock.patch("parallel_web_tools.core.auth.refresh_access_token") as mock_refresh: + with pytest.raises(ReauthenticationRequired, match="not logged in"): + get_control_api_access_token() + mock_refresh.assert_not_called() + + def test_raises_reauth_when_authorization_expired(self, creds_file, frozen_now): + _seed_control_api(creds_file, authorization_expires_at=NOW_FIXED - 1) + with mock.patch("parallel_web_tools.core.auth.refresh_access_token") as mock_refresh: + with pytest.raises(ReauthenticationRequired, match="authorization grant"): + get_control_api_access_token() + mock_refresh.assert_not_called() + + def test_raises_reauth_when_refresh_token_expired(self, creds_file, frozen_now): + _seed_control_api( + creds_file, + access_token_expires_at=NOW_FIXED - 10, + refresh_token_expires_at=NOW_FIXED - 1, + ) + with mock.patch("parallel_web_tools.core.auth.refresh_access_token") as mock_refresh: + with pytest.raises(ReauthenticationRequired, match="refresh token"): + get_control_api_access_token() + mock_refresh.assert_not_called() + + def test_bubbles_up_refresh_http_error(self, creds_file, frozen_now): + _seed_control_api(creds_file, access_token_expires_at=NOW_FIXED - 10) + with mock.patch( + "parallel_web_tools.core.auth.refresh_access_token", + side_effect=Exception("500 Internal Server Error"), + ): + with pytest.raises(Exception, match="500 Internal Server Error"): + get_control_api_access_token() diff --git a/tests/test_cli.py b/tests/test_cli.py index 0218460..03b992c 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -2,7 +2,6 @@ import json import os -import sys from unittest import mock import pytest @@ -34,6 +33,14 @@ def runner(): return CliRunner() +@pytest.fixture +def mock_cli_client(): + """Patch CLI client creation and return the injected mock client.""" + client = mock.MagicMock() + with mock.patch("parallel_web_tools.core.auth.get_client", return_value=client): + yield client + + class TestParseCommaSeparated: """Tests for parse_comma_separated helper function.""" @@ -245,12 +252,17 @@ def test_version(self, runner): class TestAuthCommand: """Tests for the auth command.""" - def test_auth_with_env_var(self, runner): - """Should show authenticated via environment.""" + def test_auth_with_env_var(self, runner, tmp_path): + """Should show authenticated via environment when no stored credentials.""" + token_file = tmp_path / "nonexistent.json" with mock.patch.dict(os.environ, {"PARALLEL_API_KEY": "test-key"}): - result = runner.invoke(main, ["auth"]) - assert result.exit_code == 0 - assert "PARALLEL_API_KEY" in result.output or "environment" in result.output + with ( + mock.patch("parallel_web_tools.core.credentials.CREDENTIALS_FILE", token_file), + mock.patch("parallel_web_tools.core.credentials.LEGACY_CREDENTIALS_FILE", token_file), + ): + result = runner.invoke(main, ["auth"]) + assert result.exit_code == 0 + assert "PARALLEL_API_KEY" in result.output or "environment" in result.output def test_auth_not_authenticated(self, runner, tmp_path): """Should show not authenticated when no credentials.""" @@ -258,11 +270,59 @@ def test_auth_not_authenticated(self, runner, tmp_path): with mock.patch.dict(os.environ, {}, clear=True): os.environ.pop("PARALLEL_API_KEY", None) - with mock.patch("parallel_web_tools.core.auth.TOKEN_FILE", token_file): + with ( + mock.patch("parallel_web_tools.core.credentials.CREDENTIALS_FILE", token_file), + mock.patch("parallel_web_tools.core.credentials.LEGACY_CREDENTIALS_FILE", token_file), + ): result = runner.invoke(main, ["auth"]) assert result.exit_code == 0 assert "Not authenticated" in result.output or "not" in result.output.lower() + def test_auth_warns_when_env_var_overrides_stored(self, runner): + """When PARALLEL_API_KEY is set AND stored creds exist, the override must be obvious.""" + status = { + "authenticated": True, + "method": "environment", + "env_var_set": True, + "has_stored_credentials": True, + "stored_overridden_by_env": True, + "token_file": "/tmp/auth.json", + "version": 1, + "selected_org_id": "org_123", + "selected_org_name": "Acme Org", + "has_control_api_tokens": True, + } + with mock.patch("parallel_web_tools.cli.commands.get_auth_status", return_value=status): + result = runner.invoke(main, ["auth"]) + + assert result.exit_code == 0 + # Active source labelled. + assert "PARALLEL_API_KEY" in result.output + # Override is loud. + assert "OVERRIDES" in result.output + # Stored org is shown so the user knows what's being shadowed. + assert "Acme Org" in result.output + assert "inactive" in result.output + + def test_auth_json_includes_selected_org_name(self, runner): + """Should include selected org name in JSON output for OAuth auth.""" + status = { + "authenticated": True, + "method": "oauth", + "token_file": "/tmp/auth.json", + "version": 1, + "selected_org_id": "org_123", + "selected_org_name": "Acme Org", + "has_control_api_tokens": True, + } + + with mock.patch("parallel_web_tools.cli.commands.get_auth_status", return_value=status): + result = runner.invoke(main, ["auth", "--json"]) + + assert result.exit_code == 0 + output = json.loads(result.output) + assert output["selected_org_name"] == "Acme Org" + class TestLogoutCommand: """Tests for the logout command.""" @@ -271,7 +331,10 @@ def test_logout_no_credentials(self, runner, tmp_path): """Should handle logout when no credentials exist.""" token_file = tmp_path / "nonexistent.json" - with mock.patch("parallel_web_tools.core.auth.TOKEN_FILE", token_file): + with ( + mock.patch("parallel_web_tools.core.credentials.CREDENTIALS_FILE", token_file), + mock.patch("parallel_web_tools.core.credentials.LEGACY_CREDENTIALS_FILE", token_file), + ): result = runner.invoke(main, ["logout"]) assert result.exit_code == 0 assert "No stored credentials" in result.output or "no" in result.output.lower() @@ -976,7 +1039,7 @@ def test_all_none_does_not_raise(self): class TestSearchCommandMocked: """Tests for the search command with mocked Parallel SDK.""" - def test_search_successful_json_output(self, runner): + def test_search_successful_json_output(self, runner, mock_cli_client): """Should output JSON for successful search.""" mock_search_result = mock.MagicMock() mock_search_result.search_id = "search_123" @@ -992,17 +1055,8 @@ def test_search_successful_json_output(self, runner): mock_search_result.usage = None mock_search_result.warnings = [] - with mock.patch("parallel_web_tools.cli.commands.get_api_key", return_value="test-key"): - with mock.patch.dict("sys.modules"): - mock_parallel_mod = mock.MagicMock() - mock_client = mock.MagicMock() - mock_client.search.return_value = mock_search_result - mock_parallel_mod.Parallel.return_value = mock_client - sys.modules["parallel"] = mock_parallel_mod - - result = runner.invoke(main, ["search", "test query", "--json"]) - - del sys.modules["parallel"] + mock_cli_client.search.return_value = mock_search_result + result = runner.invoke(main, ["search", "test query", "--json"]) assert result.exit_code == 0 output = json.loads(result.output) @@ -1011,7 +1065,7 @@ def test_search_successful_json_output(self, runner): assert len(output["results"]) == 1 assert output["results"][0]["url"] == "https://example.com" - def test_search_warnings_serialized_in_json_output(self, runner): + def test_search_warnings_serialized_in_json_output(self, runner, mock_cli_client): """Should serialize SDK Warning objects as dicts in JSON output.""" mock_search_result = mock.MagicMock() mock_search_result.search_id = "search_456" @@ -1032,17 +1086,8 @@ def test_search_warnings_serialized_in_json_output(self, runner): mock_search_result.usage = None mock_search_result.warnings = [warning_obj] - with mock.patch("parallel_web_tools.cli.commands.get_api_key", return_value="test-key"): - with mock.patch.dict("sys.modules"): - mock_parallel_mod = mock.MagicMock() - mock_client = mock.MagicMock() - mock_client.search.return_value = mock_search_result - mock_parallel_mod.Parallel.return_value = mock_client - sys.modules["parallel"] = mock_parallel_mod - - result = runner.invoke(main, ["search", "test query", "--json"]) - - del sys.modules["parallel"] + mock_cli_client.search.return_value = mock_search_result + result = runner.invoke(main, ["search", "test query", "--json"]) assert result.exit_code == 0 output = json.loads(result.output) @@ -1052,38 +1097,20 @@ def test_search_warnings_serialized_in_json_output(self, runner): assert warning["message"] == "Excerpts truncated to 500 characters" assert warning["detail"] == {"max_chars_total": 500} - def test_search_api_error_json_mode(self, runner): + def test_search_api_error_json_mode(self, runner, mock_cli_client): """Should output JSON error when API fails in --json mode.""" - with mock.patch("parallel_web_tools.cli.commands.get_api_key", return_value="test-key"): - with mock.patch.dict("sys.modules"): - mock_parallel_mod = mock.MagicMock() - mock_client = mock.MagicMock() - mock_client.search.side_effect = RuntimeError("API unavailable") - mock_parallel_mod.Parallel.return_value = mock_client - sys.modules["parallel"] = mock_parallel_mod - - result = runner.invoke(main, ["search", "test query", "--json"]) - - del sys.modules["parallel"] + mock_cli_client.search.side_effect = RuntimeError("API unavailable") + result = runner.invoke(main, ["search", "test query", "--json"]) assert result.exit_code == EXIT_API_ERROR output = json.loads(result.output) assert output["error"]["message"] == "API unavailable" assert output["error"]["type"] == "RuntimeError" - def test_search_api_error_console_mode(self, runner): + def test_search_api_error_console_mode(self, runner, mock_cli_client): """Should output formatted error when API fails in console mode.""" - with mock.patch("parallel_web_tools.cli.commands.get_api_key", return_value="test-key"): - with mock.patch.dict("sys.modules"): - mock_parallel_mod = mock.MagicMock() - mock_client = mock.MagicMock() - mock_client.search.side_effect = RuntimeError("API unavailable") - mock_parallel_mod.Parallel.return_value = mock_client - sys.modules["parallel"] = mock_parallel_mod - - result = runner.invoke(main, ["search", "test query"]) - - del sys.modules["parallel"] + mock_cli_client.search.side_effect = RuntimeError("API unavailable") + result = runner.invoke(main, ["search", "test query"]) assert result.exit_code == EXIT_API_ERROR assert "API unavailable" in result.output @@ -1436,27 +1463,18 @@ def test_extract_rejects_more_than_20_urls(self, runner): assert result.exit_code != 0 assert "20 URLs" in result.output - def test_extract_accepts_exactly_20_urls(self, runner): + def test_extract_accepts_exactly_20_urls(self, runner, mock_cli_client): """At-the-limit case should not be blocked by the check.""" urls = [f"https://example.com/{i}" for i in range(20)] - with mock.patch("parallel_web_tools.cli.commands.get_api_key", return_value="test-key"): - with mock.patch.dict("sys.modules"): - mock_parallel_mod = mock.MagicMock() - mock_client = mock.MagicMock() - mock_result = mock.MagicMock() - mock_result.extract_id = "ext_20" - mock_result.session_id = None - mock_result.results = [] - mock_result.errors = [] - mock_result.usage = None - mock_result.warnings = None - mock_client.extract.return_value = mock_result - mock_parallel_mod.Parallel.return_value = mock_client - sys.modules["parallel"] = mock_parallel_mod - - result = runner.invoke(main, ["extract", *urls, "--json"]) - - del sys.modules["parallel"] + mock_result = mock.MagicMock() + mock_result.extract_id = "ext_20" + mock_result.session_id = None + mock_result.results = [] + mock_result.errors = [] + mock_result.usage = None + mock_result.warnings = None + mock_cli_client.extract.return_value = mock_result + result = runner.invoke(main, ["extract", *urls, "--json"]) assert result.exit_code == 0 def test_extract_rejects_objective_over_5000_chars(self, runner): @@ -1489,7 +1507,7 @@ def test_search_rejects_combined_domain_count_over_200(self, runner): class TestV1ResponseFieldsSurfaced: """Tests that V1 response fields (session_id, usage) are surfaced in output.""" - def test_search_output_includes_session_id_and_usage(self, runner): + def test_search_output_includes_session_id_and_usage(self, runner, mock_cli_client): """Search output should include session_id and usage fields when present.""" mock_result = mock.MagicMock() mock_result.search_id = "search_v1" @@ -1501,24 +1519,15 @@ def test_search_output_includes_session_id_and_usage(self, runner): mock_result.usage = [usage_item] mock_result.warnings = [] - with mock.patch("parallel_web_tools.cli.commands.get_api_key", return_value="test-key"): - with mock.patch.dict("sys.modules"): - mock_parallel_mod = mock.MagicMock() - mock_client = mock.MagicMock() - mock_client.search.return_value = mock_result - mock_parallel_mod.Parallel.return_value = mock_client - sys.modules["parallel"] = mock_parallel_mod - - result = runner.invoke(main, ["search", "test", "--json"]) - - del sys.modules["parallel"] + mock_cli_client.search.return_value = mock_result + result = runner.invoke(main, ["search", "test", "--json"]) assert result.exit_code == 0 output = json.loads(result.stdout) assert output["session_id"] == "sess_xyz" assert output["usage"] == [{"name": "search_basic", "count": 1}] - def test_extract_output_includes_session_id_and_usage(self, runner): + def test_extract_output_includes_session_id_and_usage(self, runner, mock_cli_client): """Extract output should include session_id and usage fields when present.""" mock_result = mock.MagicMock() mock_result.extract_id = "ext_v1" @@ -1537,17 +1546,8 @@ def test_extract_output_includes_session_id_and_usage(self, runner): mock_result.usage = [usage_item] mock_result.warnings = None - with mock.patch("parallel_web_tools.cli.commands.get_api_key", return_value="test-key"): - with mock.patch.dict("sys.modules"): - mock_parallel_mod = mock.MagicMock() - mock_client = mock.MagicMock() - mock_client.extract.return_value = mock_result - mock_parallel_mod.Parallel.return_value = mock_client - sys.modules["parallel"] = mock_parallel_mod - - result = runner.invoke(main, ["extract", "https://example.com", "--json"]) - - del sys.modules["parallel"] + mock_cli_client.extract.return_value = mock_result + result = runner.invoke(main, ["extract", "https://example.com", "--json"]) assert result.exit_code == 0 output = json.loads(result.stdout) @@ -1575,22 +1575,13 @@ def _setup_mock_search(self, mock_client): ("agentic", "advanced"), ], ) - def test_deprecated_modes_emit_warning_to_stderr(self, runner, deprecated_mode, expected_new): + def test_deprecated_modes_emit_warning_to_stderr(self, runner, mock_cli_client, deprecated_mode, expected_new): """Should warn on deprecated mode values and translate them.""" - with mock.patch("parallel_web_tools.cli.commands.get_api_key", return_value="test-key"): - with mock.patch.dict("sys.modules"): - mock_parallel_mod = mock.MagicMock() - mock_client = mock.MagicMock() - self._setup_mock_search(mock_client) - mock_parallel_mod.Parallel.return_value = mock_client - sys.modules["parallel"] = mock_parallel_mod - - result = runner.invoke( - main, - ["search", "test", "--mode", deprecated_mode, "--json"], - ) - - del sys.modules["parallel"] + self._setup_mock_search(mock_cli_client) + result = runner.invoke( + main, + ["search", "test", "--mode", deprecated_mode, "--json"], + ) assert result.exit_code == 0 assert "[deprecated]" in result.stderr @@ -1599,26 +1590,17 @@ def test_deprecated_modes_emit_warning_to_stderr(self, runner, deprecated_mode, # JSON stdout must remain clean json.loads(result.stdout) # SDK call uses translated mode - call_kwargs = mock_client.search.call_args.kwargs + call_kwargs = mock_cli_client.search.call_args.kwargs assert call_kwargs["mode"] == expected_new @pytest.mark.parametrize("new_mode", ["basic", "advanced"]) - def test_new_modes_do_not_emit_warning(self, runner, new_mode): + def test_new_modes_do_not_emit_warning(self, runner, mock_cli_client, new_mode): """Should not warn when V1-native mode values are used.""" - with mock.patch("parallel_web_tools.cli.commands.get_api_key", return_value="test-key"): - with mock.patch.dict("sys.modules"): - mock_parallel_mod = mock.MagicMock() - mock_client = mock.MagicMock() - self._setup_mock_search(mock_client) - mock_parallel_mod.Parallel.return_value = mock_client - sys.modules["parallel"] = mock_parallel_mod - - result = runner.invoke( - main, - ["search", "test", "--mode", new_mode, "--json"], - ) - - del sys.modules["parallel"] + self._setup_mock_search(mock_cli_client) + result = runner.invoke( + main, + ["search", "test", "--mode", new_mode, "--json"], + ) assert result.exit_code == 0 assert "[deprecated]" not in result.stderr @@ -1643,22 +1625,13 @@ def _setup_mock_extract(self, mock_client, with_excerpts=True): mock_result.warnings = None mock_client.extract.return_value = mock_result - def test_no_excerpts_emits_warning_and_strips_excerpts_from_output(self, runner): + def test_no_excerpts_emits_warning_and_strips_excerpts_from_output(self, runner, mock_cli_client): """--no-excerpts should warn (semantics changed) and strip excerpts client-side.""" - with mock.patch("parallel_web_tools.cli.commands.get_api_key", return_value="test-key"): - with mock.patch.dict("sys.modules"): - mock_parallel_mod = mock.MagicMock() - mock_client = mock.MagicMock() - self._setup_mock_extract(mock_client) - mock_parallel_mod.Parallel.return_value = mock_client - sys.modules["parallel"] = mock_parallel_mod - - result = runner.invoke( - main, - ["extract", "https://example.com", "--no-excerpts", "--json"], - ) - - del sys.modules["parallel"] + self._setup_mock_extract(mock_cli_client) + result = runner.invoke( + main, + ["extract", "https://example.com", "--no-excerpts", "--json"], + ) assert result.exit_code == 0 assert "[deprecated]" in result.stderr @@ -1667,22 +1640,13 @@ def test_no_excerpts_emits_warning_and_strips_excerpts_from_output(self, runner) # Excerpts should be stripped from the CLI output assert "excerpts" not in output["results"][0] - def test_no_no_excerpts_keeps_excerpts_and_no_warning(self, runner): + def test_no_no_excerpts_keeps_excerpts_and_no_warning(self, runner, mock_cli_client): """Default extract should not warn and should include excerpts.""" - with mock.patch("parallel_web_tools.cli.commands.get_api_key", return_value="test-key"): - with mock.patch.dict("sys.modules"): - mock_parallel_mod = mock.MagicMock() - mock_client = mock.MagicMock() - self._setup_mock_extract(mock_client) - mock_parallel_mod.Parallel.return_value = mock_client - sys.modules["parallel"] = mock_parallel_mod - - result = runner.invoke( - main, - ["extract", "https://example.com", "--json"], - ) - - del sys.modules["parallel"] + self._setup_mock_extract(mock_cli_client) + result = runner.invoke( + main, + ["extract", "https://example.com", "--json"], + ) assert result.exit_code == 0 assert "[deprecated]" not in result.stderr @@ -1693,26 +1657,17 @@ def test_no_no_excerpts_keeps_excerpts_and_no_warning(self, runner): class TestExtractCommandMocked: """Tests for the extract command with mocked Parallel SDK.""" - def test_extract_api_error_json_mode(self, runner): + def test_extract_api_error_json_mode(self, runner, mock_cli_client): """Should output JSON error when extract API fails in --json mode.""" - with mock.patch("parallel_web_tools.cli.commands.get_api_key", return_value="test-key"): - with mock.patch.dict("sys.modules"): - mock_parallel_mod = mock.MagicMock() - mock_client = mock.MagicMock() - mock_client.extract.side_effect = ConnectionError("Network error") - mock_parallel_mod.Parallel.return_value = mock_client - sys.modules["parallel"] = mock_parallel_mod - - result = runner.invoke(main, ["extract", "https://example.com", "--json"]) - - del sys.modules["parallel"] + mock_cli_client.extract.side_effect = ConnectionError("Network error") + result = runner.invoke(main, ["extract", "https://example.com", "--json"]) assert result.exit_code == EXIT_API_ERROR output = json.loads(result.output) assert output["error"]["type"] == "ConnectionError" assert "Network error" in output["error"]["message"] - def test_extract_successful_json_output(self, runner): + def test_extract_successful_json_output(self, runner, mock_cli_client): """Should output structured JSON for successful extraction.""" mock_extract_result = mock.MagicMock() mock_extract_result.extract_id = "ext_123" @@ -1728,17 +1683,8 @@ def test_extract_successful_json_output(self, runner): mock_extract_result.usage = None mock_extract_result.warnings = None - with mock.patch("parallel_web_tools.cli.commands.get_api_key", return_value="test-key"): - with mock.patch.dict("sys.modules"): - mock_parallel_mod = mock.MagicMock() - mock_client = mock.MagicMock() - mock_client.extract.return_value = mock_extract_result - mock_parallel_mod.Parallel.return_value = mock_client - sys.modules["parallel"] = mock_parallel_mod - - result = runner.invoke(main, ["extract", "https://example.com", "--json"]) - - del sys.modules["parallel"] + mock_cli_client.extract.return_value = mock_extract_result + result = runner.invoke(main, ["extract", "https://example.com", "--json"]) assert result.exit_code == 0 output = json.loads(result.output) @@ -1749,7 +1695,7 @@ def test_extract_successful_json_output(self, runner): assert output["results"][0]["publish_date"] == "2025-01-15" assert output["warnings"] == [] - def test_extract_warnings_serialized_in_json_output(self, runner): + def test_extract_warnings_serialized_in_json_output(self, runner, mock_cli_client): """Should serialize SDK Warning objects as dicts in JSON output.""" mock_extract_result = mock.MagicMock() mock_extract_result.extract_id = "ext_456" @@ -1769,17 +1715,8 @@ def test_extract_warnings_serialized_in_json_output(self, runner): mock_extract_result.usage = None mock_extract_result.warnings = [warning_obj] - with mock.patch("parallel_web_tools.cli.commands.get_api_key", return_value="test-key"): - with mock.patch.dict("sys.modules"): - mock_parallel_mod = mock.MagicMock() - mock_client = mock.MagicMock() - mock_client.extract.return_value = mock_extract_result - mock_parallel_mod.Parallel.return_value = mock_client - sys.modules["parallel"] = mock_parallel_mod - - result = runner.invoke(main, ["extract", "https://example.com", "--json"]) - - del sys.modules["parallel"] + mock_cli_client.extract.return_value = mock_extract_result + result = runner.invoke(main, ["extract", "https://example.com", "--json"]) assert result.exit_code == 0 output = json.loads(result.output) @@ -1789,7 +1726,7 @@ def test_extract_warnings_serialized_in_json_output(self, runner): assert warning["message"] == "Excerpts truncated" assert warning["detail"] == {"max_chars_total": 500} - def test_extract_errors_serialized_in_json_output(self, runner): + def test_extract_errors_serialized_in_json_output(self, runner, mock_cli_client): """Should serialize extract errors with correct API field names.""" mock_extract_result = mock.MagicMock() mock_extract_result.extract_id = "ext_789" @@ -1804,17 +1741,8 @@ def test_extract_errors_serialized_in_json_output(self, runner): mock_extract_result.usage = None mock_extract_result.warnings = None - with mock.patch("parallel_web_tools.cli.commands.get_api_key", return_value="test-key"): - with mock.patch.dict("sys.modules"): - mock_parallel_mod = mock.MagicMock() - mock_client = mock.MagicMock() - mock_client.extract.return_value = mock_extract_result - mock_parallel_mod.Parallel.return_value = mock_client - sys.modules["parallel"] = mock_parallel_mod - - result = runner.invoke(main, ["extract", "https://example.com/broken", "--json"]) - - del sys.modules["parallel"] + mock_cli_client.extract.return_value = mock_extract_result + result = runner.invoke(main, ["extract", "https://example.com/broken", "--json"]) assert result.exit_code == 0 output = json.loads(result.output) @@ -2685,3 +2613,496 @@ def test_completion_install_standalone_rejected(self, runner): with mock.patch("parallel_web_tools.cli.commands._STANDALONE_MODE", True): result = runner.invoke(main, ["completion", "install", "--shell", "bash"]) assert result.exit_code != 0 + + +# --------------------------------------------------------------------------- +# login email → magic link +# --------------------------------------------------------------------------- + + +def _device_info(): + from parallel_web_tools.core.auth import DeviceCodeInfo + + return DeviceCodeInfo( + device_code="dc_xyz", + user_code="ABCD-1234", + verification_uri="http://verif.example", + verification_uri_complete="http://verif.example?user_code=ABCD-1234", + expires_in=600, + interval=5, + ) + + +def _fake_get_api_key(info): + """Factory: a get_api_key stub that invokes on_device_code(info) then returns.""" + + def fake(force_login=False, on_device_code=None, login_hint=None, **_): + # Match auth.get_api_key's signature loosely so both kwargs- and args-based calls work. + assert on_device_code is not None + on_device_code(info) + return "sk_fake" + + return fake + + +class TestLoginEmailCommand: + def test_sends_magic_link_and_skips_browser(self, runner): + info = _device_info() + with ( + mock.patch( + "parallel_web_tools.cli.commands.get_api_key", + side_effect=_fake_get_api_key(info), + ), + mock.patch("parallel_web_tools.core.auth.send_magic_link") as mock_send, + mock.patch("parallel_web_tools.core.auth.ensure_client_id", return_value="cid_xyz"), + mock.patch("webbrowser.open") as mock_browser, + ): + result = runner.invoke(main, ["login", "email", "u@example.com"]) + + assert result.exit_code == 0 + mock_send.assert_called_once_with(client_id="cid_xyz", email="u@example.com", user_code="ABCD-1234") + mock_browser.assert_not_called() + assert "Magic link sent to u@example.com" in result.output + # Still shows the code as a fallback path. + assert "ABCD-1234" in result.output + + def test_json_mode_reports_magic_link_sent(self, runner): + info = _device_info() + with ( + mock.patch( + "parallel_web_tools.cli.commands.get_api_key", + side_effect=_fake_get_api_key(info), + ), + mock.patch("parallel_web_tools.core.auth.send_magic_link"), + mock.patch("parallel_web_tools.core.auth.ensure_client_id", return_value="cid_xyz"), + mock.patch("webbrowser.open") as mock_browser, + ): + result = runner.invoke(main, ["login", "--json", "email", "u@example.com"]) + + assert result.exit_code == 0 + mock_browser.assert_not_called() + # First line is the waiting_for_authorization payload; the trailing + # "authenticated" line is appended by _run_login. + first_line = result.output.splitlines()[0] + payload = json.loads(first_line) + assert payload["status"] == "waiting_for_authorization" + assert payload["magic_link_sent"] is True + assert payload["user_code"] == "ABCD-1234" + + def test_falls_back_when_magic_link_fails(self, runner): + info = _device_info() + with ( + mock.patch( + "parallel_web_tools.cli.commands.get_api_key", + side_effect=_fake_get_api_key(info), + ), + mock.patch( + "parallel_web_tools.core.auth.send_magic_link", + side_effect=Exception("SMTP unavailable"), + ), + mock.patch("parallel_web_tools.core.auth.ensure_client_id", return_value="cid_xyz"), + mock.patch( + "parallel_web_tools.core.auth.is_headless", + return_value=True, # keep the test hermetic: don't attempt real browser open + ), + mock.patch("webbrowser.open") as mock_browser, + ): + result = runner.invoke(main, ["login", "email", "u@example.com"]) + + assert result.exit_code == 0 + # Magic-link failure path falls through to the manual-flow display. + assert "Could not send magic link" in result.output + assert "SMTP unavailable" in result.output + assert "ABCD-1234" in result.output + # Headless env: browser must not open even in the fallback path. + mock_browser.assert_not_called() + + def test_json_mode_reports_magic_link_error(self, runner): + info = _device_info() + with ( + mock.patch( + "parallel_web_tools.cli.commands.get_api_key", + side_effect=_fake_get_api_key(info), + ), + mock.patch( + "parallel_web_tools.core.auth.send_magic_link", + side_effect=Exception("SMTP unavailable"), + ), + mock.patch("parallel_web_tools.core.auth.ensure_client_id", return_value="cid_xyz"), + ): + result = runner.invoke(main, ["login", "--json", "email", "u@example.com"]) + + assert result.exit_code == 0 + first_line = result.output.splitlines()[0] + payload = json.loads(first_line) + assert payload["magic_link_sent"] is False + assert "SMTP unavailable" in payload["magic_link_error"] + + +class TestLoginWithoutEmailUnchanged: + def test_no_email_still_opens_browser(self, runner): + info = _device_info() + with ( + mock.patch( + "parallel_web_tools.cli.commands.get_api_key", + side_effect=_fake_get_api_key(info), + ), + mock.patch("parallel_web_tools.core.auth.send_magic_link") as mock_send, + mock.patch("parallel_web_tools.core.auth.is_headless", return_value=False), + mock.patch("webbrowser.open") as mock_browser, + ): + result = runner.invoke(main, ["login"]) + + assert result.exit_code == 0 + # No email → no magic-link call. + mock_send.assert_not_called() + # Browser still opens in the plain `login` flow. + mock_browser.assert_called_once() + + +class TestLoginGoogleCommand: + def test_opens_browser_with_google_login_hint(self, runner): + info = _device_info() + with ( + mock.patch( + "parallel_web_tools.cli.commands.get_api_key", + side_effect=_fake_get_api_key(info), + ), + mock.patch("parallel_web_tools.core.auth.send_magic_link") as mock_send, + mock.patch("parallel_web_tools.core.auth.is_headless", return_value=False), + mock.patch("webbrowser.open") as mock_browser, + ): + result = runner.invoke(main, ["login", "google"]) + + assert result.exit_code == 0 + # No magic-link send on google login. + mock_send.assert_not_called() + # Browser opens with the google hint. + mock_browser.assert_called_once() + opened_url = mock_browser.call_args.args[0] + assert "login_hint=login%3Dgoogle" in opened_url + + +class TestLoginSsoCommand: + def test_opens_browser_with_sso_hint_and_separate_email_param(self, runner): + info = _device_info() + with ( + mock.patch( + "parallel_web_tools.cli.commands.get_api_key", + side_effect=_fake_get_api_key(info), + ), + mock.patch("parallel_web_tools.core.auth.send_magic_link") as mock_send, + mock.patch("parallel_web_tools.core.auth.is_headless", return_value=False), + mock.patch("webbrowser.open") as mock_browser, + ): + result = runner.invoke(main, ["login", "sso", "u@example.com"]) + + assert result.exit_code == 0 + # SSO still uses browser-based auth, no magic link. + mock_send.assert_not_called() + mock_browser.assert_called_once() + opened_url = mock_browser.call_args.args[0] + # URL-encoded login=sso (no comma-email inside the hint). + assert "login_hint=login%3Dsso" in opened_url + # Email is a separate top-level query param. + assert "email=u%40example.com" in opened_url + # And the old bundled form must not leak through. + assert "login%3Dsso%2Ce" not in opened_url + + +class TestBuildLoginHint: + def test_email_hint_does_not_include_email(self): + # Email travels as a separate `email=…` query param via _login_extra_params. + from parallel_web_tools.cli.commands import _build_login_hint + + assert _build_login_hint("email", "u@example.com") == "login=email" + + def test_login_extra_params_carries_email_for_email_and_sso(self): + from parallel_web_tools.cli.commands import _login_extra_params + + assert _login_extra_params("email", "u@example.com") == {"email": "u@example.com"} + assert _login_extra_params("sso", "u@example.com") == {"email": "u@example.com"} + # google / plain carry no identity → no extra param. + assert _login_extra_params("google", None) is None + assert _login_extra_params(None, None) is None + + def test_google_ignores_email(self): + from parallel_web_tools.cli.commands import _build_login_hint + + assert _build_login_hint("google", None) == "login=google" + + def test_sso_hint_does_not_include_email(self): + # SSO email travels as a separate `email=…` query param (see _login_extra_params), + # NOT embedded in the hint value. + from parallel_web_tools.cli.commands import _build_login_hint + + assert _build_login_hint("sso", "u@example.com") == "login=sso" + + def test_none_method_returns_none(self): + from parallel_web_tools.cli.commands import _build_login_hint + + assert _build_login_hint(None, None) is None + assert _build_login_hint(None, "u@example.com") is None + + def test_sso_without_email_errors(self): + from parallel_web_tools.cli.commands import _build_login_hint + + with pytest.raises(ValueError, match="requires an email"): + _build_login_hint("sso", None) + + def test_email_without_email_errors(self): + from parallel_web_tools.cli.commands import _build_login_hint + + with pytest.raises(ValueError, match="requires an email"): + _build_login_hint("email", None) + + def test_unknown_method_errors(self): + from parallel_web_tools.cli.commands import _build_login_hint + + with pytest.raises(ValueError, match="Unknown login_method"): + _build_login_hint("saml", None) + + +# --------------------------------------------------------------------------- +# balance get / balance add +# --------------------------------------------------------------------------- + + +def _balance_model(**overrides): + """Build a BalanceResponse pydantic instance for CLI-level mocking.""" + from parallel_web_tools.core.service_types import BalanceResponse + + base = BalanceResponse( + org_id="org_abc", + credit_balance_cents=1500, + pending_debit_balance_cents=0, + will_invoice=False, + ) + return base.model_copy(update=overrides) if overrides else base + + +class TestBalanceGroup: + def test_group_help_lists_subcommands(self, runner): + result = runner.invoke(main, ["balance", "--help"]) + assert result.exit_code == 0 + assert "get" in result.output + assert "add" in result.output + + def test_get_help(self, runner): + result = runner.invoke(main, ["balance", "get", "--help"]) + assert result.exit_code == 0 + assert "credit balance" in result.output.lower() + + def test_add_help(self, runner): + result = runner.invoke(main, ["balance", "add", "--help"]) + assert result.exit_code == 0 + assert "AMOUNT_CENTS" in result.output + assert "--idempotency-key" in result.output + + +class TestBalanceGetCommand: + def test_json_output(self, runner): + balance = _balance_model(credit_balance_cents=1234, pending_debit_balance_cents=56) + with ( + mock.patch("parallel_web_tools.cli.commands.get_control_api_access_token", return_value="atk"), + mock.patch("parallel_web_tools.core.service.get_balance", return_value=balance) as mock_get, + ): + result = runner.invoke(main, ["balance", "--json", "get"]) + + assert result.exit_code == 0 + mock_get.assert_called_once_with("atk") + output = json.loads(result.output) + assert output["org_id"] == "org_abc" + assert output["credit_balance_cents"] == 1234 + assert output["pending_debit_balance_cents"] == 56 + + def test_console_output(self, runner): + balance = _balance_model(credit_balance_cents=250) + with ( + mock.patch("parallel_web_tools.cli.commands.get_control_api_access_token", return_value="atk"), + mock.patch("parallel_web_tools.core.service.get_balance", return_value=balance), + ): + result = runner.invoke(main, ["balance", "get"]) + + assert result.exit_code == 0 + assert "org_abc" in result.output + # $2.50 with a cents-in-parens suffix. + assert "$2.50" in result.output + assert "250" in result.output + + def test_will_invoice_flag_shown(self, runner): + balance = _balance_model(credit_balance_cents=0, will_invoice=True) + with ( + mock.patch("parallel_web_tools.cli.commands.get_control_api_access_token", return_value="atk"), + mock.patch("parallel_web_tools.core.service.get_balance", return_value=balance), + ): + result = runner.invoke(main, ["balance", "get"]) + assert result.exit_code == 0 + assert "invoice" in result.output.lower() + + def test_reauth_required_exits_auth_error(self, runner): + from parallel_web_tools.core.auth import ReauthenticationRequired + + with mock.patch( + "parallel_web_tools.cli.commands.get_control_api_access_token", + side_effect=ReauthenticationRequired("not logged in"), + ): + result = runner.invoke(main, ["balance", "get"]) + + assert result.exit_code == EXIT_AUTH_ERROR + assert "Authentication required" in result.output + + def test_service_api_error_exits_api_error(self, runner): + from parallel_web_tools.core.service import ServiceApiError + + with ( + mock.patch("parallel_web_tools.cli.commands.get_control_api_access_token", return_value="atk"), + mock.patch("parallel_web_tools.core.service.get_balance", side_effect=ServiceApiError("boom")), + ): + result = runner.invoke(main, ["balance", "get"]) + + assert result.exit_code == EXIT_API_ERROR + assert "Balance API error" in result.output + + +class TestBalanceAddCommand: + def test_json_output_derives_idempotency_key(self, runner): + balance = _balance_model(credit_balance_cents=1600) + captured_key: dict = {} + + def fake_add(token, amount_cents, idempotency_key): + captured_key["key"] = idempotency_key + return balance + + with ( + mock.patch("parallel_web_tools.cli.commands.get_control_api_access_token", return_value="atk"), + mock.patch("parallel_web_tools.core.service.add_balance", side_effect=fake_add), + mock.patch("parallel_web_tools.core.auth.ensure_client_id", return_value="cid_xyz"), + mock.patch("parallel_web_tools.cli.commands.time.time", return_value=1_700_000_123.0), + ): + result = runner.invoke(main, ["balance", "--json", "add", "100"]) + + assert result.exit_code == 0 + # five_min_bucket = floor(1_700_000_123 / 300) * 300 = 1_700_000_100 + assert captured_key["key"] == "cid_xyz-100-1700000100" + output = json.loads(result.output) + assert output["credit_balance_cents"] == 1600 + + def test_console_output_shows_charge_and_new_balance(self, runner): + balance = _balance_model(credit_balance_cents=1600) + with ( + mock.patch("parallel_web_tools.cli.commands.get_control_api_access_token", return_value="atk"), + mock.patch("parallel_web_tools.core.service.add_balance", return_value=balance), + mock.patch("parallel_web_tools.core.auth.ensure_client_id", return_value="cid_xyz"), + ): + result = runner.invoke(main, ["balance", "add", "100"]) + + assert result.exit_code == 0 + assert "$1.00" in result.output # charge amount + assert "$16.00" in result.output # new balance + + def test_explicit_idempotency_key_overrides_derivation(self, runner): + balance = _balance_model() + with ( + mock.patch("parallel_web_tools.cli.commands.get_control_api_access_token", return_value="atk"), + mock.patch("parallel_web_tools.core.service.add_balance", return_value=balance) as mock_add, + mock.patch("parallel_web_tools.core.auth.ensure_client_id") as mock_ensure, + ): + result = runner.invoke(main, ["balance", "add", "100", "--idempotency-key", "fixed-key"]) + + assert result.exit_code == 0 + # ensure_client_id must NOT be called when an explicit key was provided. + mock_ensure.assert_not_called() + assert mock_add.call_args.args[2] == "fixed-key" + + def test_same_bucket_produces_same_key(self, runner): + """Two invocations inside the same 5-min bucket must derive the same key.""" + keys: list[str] = [] + + def capture_key(token, amount_cents, idempotency_key): + keys.append(idempotency_key) + return _balance_model() + + # 1_700_000_100 is 300-aligned (5_666_667 * 300). Both timestamps fall + # inside the [1_700_000_100, 1_700_000_400) bucket. + with ( + mock.patch("parallel_web_tools.cli.commands.get_control_api_access_token", return_value="atk"), + mock.patch("parallel_web_tools.core.service.add_balance", side_effect=capture_key), + mock.patch("parallel_web_tools.core.auth.ensure_client_id", return_value="cid_xyz"), + mock.patch("parallel_web_tools.cli.commands.time.time", side_effect=[1_700_000_100, 1_700_000_399]), + ): + assert runner.invoke(main, ["balance", "add", "100"]).exit_code == 0 + assert runner.invoke(main, ["balance", "add", "100"]).exit_code == 0 + + assert keys[0] == keys[1] == "cid_xyz-100-1700000100" + + def test_next_bucket_produces_different_key(self, runner): + keys: list[str] = [] + + def capture_key(token, amount_cents, idempotency_key): + keys.append(idempotency_key) + return _balance_model() + + with ( + mock.patch("parallel_web_tools.cli.commands.get_control_api_access_token", return_value="atk"), + mock.patch("parallel_web_tools.core.service.add_balance", side_effect=capture_key), + mock.patch("parallel_web_tools.core.auth.ensure_client_id", return_value="cid_xyz"), + mock.patch("parallel_web_tools.cli.commands.time.time", side_effect=[1_700_000_100, 1_700_000_400]), + ): + runner.invoke(main, ["balance", "add", "100"]) + runner.invoke(main, ["balance", "add", "100"]) + + assert keys[0] == "cid_xyz-100-1700000100" + assert keys[1] == "cid_xyz-100-1700000400" + + def test_zero_amount_passes_through_to_service(self, runner): + balance = _balance_model() + with ( + mock.patch("parallel_web_tools.cli.commands.get_control_api_access_token", return_value="atk"), + mock.patch("parallel_web_tools.core.service.add_balance", return_value=balance) as mock_add, + mock.patch("parallel_web_tools.core.auth.ensure_client_id", return_value="cid_xyz"), + ): + result = runner.invoke(main, ["balance", "add", "0"]) + + assert result.exit_code == 0 + assert mock_add.call_args.args[1] == 0 + + def test_large_amount_passes_through_to_service(self, runner): + balance = _balance_model() + with ( + mock.patch("parallel_web_tools.cli.commands.get_control_api_access_token", return_value="atk"), + mock.patch("parallel_web_tools.core.service.add_balance", return_value=balance) as mock_add, + mock.patch("parallel_web_tools.core.auth.ensure_client_id", return_value="cid_xyz"), + ): + result = runner.invoke(main, ["balance", "add", "1001"]) + + assert result.exit_code == 0 + assert mock_add.call_args.args[1] == 1001 + + def test_reauth_required_exits_auth_error(self, runner): + from parallel_web_tools.core.auth import ReauthenticationRequired + + with ( + mock.patch( + "parallel_web_tools.cli.commands.get_control_api_access_token", + side_effect=ReauthenticationRequired("not logged in"), + ), + mock.patch("parallel_web_tools.core.auth.ensure_client_id", return_value="cid_xyz"), + ): + result = runner.invoke(main, ["balance", "add", "100"]) + + assert result.exit_code == EXIT_AUTH_ERROR + assert "Authentication required" in result.output + + def test_service_api_error_exits_api_error(self, runner): + from parallel_web_tools.core.service import ServiceApiError + + with ( + mock.patch("parallel_web_tools.cli.commands.get_control_api_access_token", return_value="atk"), + mock.patch("parallel_web_tools.core.service.add_balance", side_effect=ServiceApiError("card declined")), + mock.patch("parallel_web_tools.core.auth.ensure_client_id", return_value="cid_xyz"), + ): + result = runner.invoke(main, ["balance", "add", "100"]) + + assert result.exit_code == EXIT_API_ERROR + assert "Balance API error" in result.output diff --git a/tests/test_credentials.py b/tests/test_credentials.py new file mode 100644 index 0000000..097b532 --- /dev/null +++ b/tests/test_credentials.py @@ -0,0 +1,259 @@ +"""Tests for the versioned credentials storage module.""" + +import json +import os + +import pytest + +from parallel_web_tools.core import credentials +from parallel_web_tools.core.credentials import ( + CURRENT_VERSION, + LEGACY_ORG_ID, + ControlApiTokens, + Credentials, + OrgCredentials, + _migrate_v0, + delete, + get_selected_api_key, + load, + save, + set_api_key_for_org, +) + + +@pytest.fixture +def creds_file(tmp_path, monkeypatch): + """Patch auth.json path to a tmp path for isolation.""" + path = tmp_path / "auth.json" + monkeypatch.setattr(credentials, "AUTH_FILE", path) + monkeypatch.setattr(credentials, "CREDENTIALS_FILE", path) + monkeypatch.setattr(credentials, "LEGACY_CREDENTIALS_FILE", tmp_path / "credentials.json") + return path + + +@pytest.fixture +def legacy_file(tmp_path): + return tmp_path / "credentials.json" + + +class TestMigrationV0: + def test_migrate_v0_with_access_token(self): + result = _migrate_v0({"access_token": "abc123"}) + assert result == { + "version": CURRENT_VERSION, + "selected_org_id": LEGACY_ORG_ID, + "orgs": {LEGACY_ORG_ID: {"api_key": "abc123"}}, + } + + def test_migrate_v0_empty(self): + result = _migrate_v0({}) + assert result["version"] == CURRENT_VERSION + assert result["selected_org_id"] is None + assert result["orgs"] == {} + + +class TestLoad: + def test_load_nonexistent_returns_none(self, creds_file): + assert load() is None + + def test_load_corrupted_returns_none(self, creds_file): + creds_file.parent.mkdir(parents=True, exist_ok=True) + creds_file.write_text("not valid json {{{") + assert load() is None + + def test_load_non_dict_returns_none(self, creds_file): + creds_file.parent.mkdir(parents=True, exist_ok=True) + creds_file.write_text(json.dumps(["a", "b"])) + assert load() is None + + def test_load_v0_migrates_forward_and_removes_legacy_file(self, creds_file, legacy_file): + legacy_file.parent.mkdir(parents=True, exist_ok=True) + legacy_file.write_text(json.dumps({"access_token": "tok_v0"})) + + creds = load() + + assert creds is not None + assert creds.version == CURRENT_VERSION + assert creds.selected_org_id == LEGACY_ORG_ID + assert creds.orgs[LEGACY_ORG_ID].api_key == "tok_v0" + + assert creds_file.exists() + auth_disk = json.loads(creds_file.read_text()) + assert auth_disk["version"] == CURRENT_VERSION + assert auth_disk["selected_org_id"] == LEGACY_ORG_ID + assert auth_disk["orgs"][LEGACY_ORG_ID]["api_key"] == "tok_v0" + # The legacy file should have been removed once migration succeeded. + assert not legacy_file.exists() + + def test_load_v1_roundtrip(self, creds_file): + original = Credentials( + selected_org_id="org_abc", + client_id="cid_registered", + orgs={ + "org_abc": OrgCredentials( + api_key="sk_test", + org_name="Acme Org", + control_api=ControlApiTokens( + access_token="atk", + access_token_expires_at=1710000600, + access_token_scopes=["keys:write", "balance:read"], + refresh_token="rtk", + refresh_token_expires_at=1710604800, + authorization_expires_at=1717776000, + ), + ) + }, + ) + save(original) + + loaded = load() + assert loaded == original + assert loaded is not None and loaded.client_id == "cid_registered" + + def test_load_prefers_existing_auth_file_over_legacy_file(self, creds_file, legacy_file): + creds_file.parent.mkdir(parents=True, exist_ok=True) + creds_file.write_text( + json.dumps({"version": 1, "selected_org_id": None, "orgs": {}, "client_id": "cid_registered"}) + ) + legacy_file.write_text(json.dumps({"access_token": "tok_v0"})) + + loaded = load() + + assert loaded is not None + assert loaded.client_id == "cid_registered" + assert loaded.selected_org_id is None + assert loaded.orgs == {} + + def test_migrated_v0_has_no_client_id(self, creds_file, legacy_file): + # v0 files never carried a client_id — migration must leave it unset + # so _ensure_client_id knows to register on the next login. + legacy_file.parent.mkdir(parents=True, exist_ok=True) + legacy_file.write_text(json.dumps({"access_token": "tok_v0"})) + loaded = load() + assert loaded is not None + assert loaded.client_id is None + assert loaded.orgs[LEGACY_ORG_ID].org_name is None + + +class TestSave: + def test_save_creates_parent_dir(self, tmp_path, monkeypatch): + path = tmp_path / "nested" / "subdir" / "auth.json" + monkeypatch.setattr(credentials, "CREDENTIALS_FILE", path) + monkeypatch.setattr(credentials, "AUTH_FILE", path) + monkeypatch.setattr(credentials, "LEGACY_CREDENTIALS_FILE", tmp_path / "nested" / "subdir" / "credentials.json") + save(Credentials()) + assert path.exists() + + def test_save_sets_0600_permissions(self, creds_file): + save(Credentials(selected_org_id="x", orgs={"x": OrgCredentials(api_key="k")})) + mode = oct(creds_file.stat().st_mode)[-3:] + assert mode == "600" + + def test_save_does_not_write_legacy_file(self, creds_file, legacy_file): + save(Credentials(selected_org_id="x", orgs={"x": OrgCredentials(api_key="k")})) + assert not legacy_file.exists() + + def test_atomic_write_preserves_existing_on_failure(self, creds_file, monkeypatch): + # Write an initial valid file. + save(Credentials(selected_org_id="orig", orgs={"orig": OrgCredentials(api_key="original")})) + original_contents = creds_file.read_text() + + # Make os.replace blow up during the next save. + def boom(src, dst): + raise OSError("simulated failure") + + monkeypatch.setattr(os, "replace", boom) + + with pytest.raises(OSError, match="simulated failure"): + save(Credentials(selected_org_id="new", orgs={"new": OrgCredentials(api_key="new")})) + + # Original file should be untouched. + assert creds_file.read_text() == original_contents + + # And there should be no leftover temp files in the parent dir. + leftovers = [p for p in creds_file.parent.iterdir() if p.name.startswith(".auth.")] + assert leftovers == [] + + +class TestDelete: + def test_delete_existing(self, creds_file): + save(Credentials()) + assert delete() is True + assert not creds_file.exists() + + def test_delete_removes_legacy_file_too(self, creds_file, legacy_file): + save(Credentials(selected_org_id="x", orgs={"x": OrgCredentials(api_key="k")})) + assert delete() is True + assert not creds_file.exists() + assert not legacy_file.exists() + + def test_delete_nonexistent(self, creds_file): + assert delete() is False + + +class TestHelpers: + def test_get_selected_api_key_none_when_empty(self, creds_file): + assert get_selected_api_key() is None + + def test_get_selected_api_key_returns_selected(self, creds_file): + save( + Credentials( + selected_org_id="a", + orgs={ + "a": OrgCredentials(api_key="key_a"), + "b": OrgCredentials(api_key="key_b"), + }, + ) + ) + assert get_selected_api_key() == "key_a" + + def test_get_selected_api_key_no_selection(self, creds_file): + save(Credentials(orgs={"a": OrgCredentials(api_key="key_a")})) + assert get_selected_api_key() is None + + def test_set_api_key_for_org_creates_and_selects(self, creds_file): + set_api_key_for_org("org_new", "sk_xyz") + creds = load() + assert creds is not None + assert creds.selected_org_id == "org_new" + assert creds.orgs["org_new"].api_key == "sk_xyz" + + def test_set_api_key_for_org_preserves_selection(self, creds_file): + set_api_key_for_org("org_a", "sk_a") # becomes selected + set_api_key_for_org("org_b", "sk_b") # should NOT change selection + creds = load() + assert creds is not None + assert creds.selected_org_id == "org_a" + assert creds.orgs["org_a"].api_key == "sk_a" + assert creds.orgs["org_b"].api_key == "sk_b" + + def test_set_api_key_for_org_updates_existing(self, creds_file): + set_api_key_for_org("org_a", "old_key") + set_api_key_for_org("org_a", "new_key") + creds = load() + assert creds is not None + assert creds.orgs["org_a"].api_key == "new_key" + + def test_set_api_key_for_org_preserves_control_api(self, creds_file): + # Seed with a control_api block. + save( + Credentials( + selected_org_id="org_a", + orgs={ + "org_a": OrgCredentials( + api_key="old", + control_api=ControlApiTokens( + access_token="atk", + refresh_token="rtk", + ), + ) + }, + ) + ) + set_api_key_for_org("org_a", "new") + + creds = load() + assert creds is not None + assert creds.orgs["org_a"].api_key == "new" + assert creds.orgs["org_a"].control_api.access_token == "atk" + assert creds.orgs["org_a"].control_api.refresh_token == "rtk" diff --git a/tests/test_enrichment.py b/tests/test_enrichment.py index e6e3f84..32da38a 100644 --- a/tests/test_enrichment.py +++ b/tests/test_enrichment.py @@ -29,17 +29,18 @@ def test_explicit_api_key(self): assert result == "test-key-123" def test_env_var_fallback(self): - """Should use PARALLEL_API_KEY env var when no explicit key.""" + """Should use PARALLEL_API_KEY env var when no explicit key. Env beats stored creds.""" with mock.patch.dict(os.environ, {"PARALLEL_API_KEY": "env-key-456"}): - result = resolve_api_key() - assert result == "env-key-456" + with mock.patch("parallel_web_tools.core.credentials.get_selected_api_key", return_value="stored-key"): + result = resolve_api_key() + assert result == "env-key-456" def test_oauth_fallback(self): """Should use stored OAuth credentials when no env var.""" with mock.patch.dict(os.environ, {}, clear=True): os.environ.pop("PARALLEL_API_KEY", None) - with mock.patch("parallel_web_tools.core.auth._load_stored_token") as mock_load: + with mock.patch("parallel_web_tools.core.credentials.get_selected_api_key") as mock_load: mock_load.return_value = "oauth-key-789" result = resolve_api_key() assert result == "oauth-key-789" @@ -49,7 +50,7 @@ def test_no_key_raises_error(self): with mock.patch.dict(os.environ, {}, clear=True): os.environ.pop("PARALLEL_API_KEY", None) - with mock.patch("parallel_web_tools.core.auth._load_stored_token") as mock_load: + with mock.patch("parallel_web_tools.core.credentials.get_selected_api_key") as mock_load: mock_load.return_value = None with pytest.raises(ValueError) as exc_info: diff --git a/tests/test_service.py b/tests/test_service.py new file mode 100644 index 0000000..e7f4bd3 --- /dev/null +++ b/tests/test_service.py @@ -0,0 +1,337 @@ +"""Tests for the service API client (apps + keys + balance).""" + +import io +import json +import urllib.error +from contextlib import contextmanager +from email.message import Message +from unittest import mock + +import pytest + +from parallel_web_tools.core import service +from parallel_web_tools.core.service import ( + ServiceApiError, + _build_key_name, + add_balance, + create_api_key, + get_balance, + list_apps, + provision_cli_api_key, +) + +# --------------------------------------------------------------------------- +# Helpers +# --------------------------------------------------------------------------- + + +def _http_error(status: int, body: dict) -> urllib.error.HTTPError: + return urllib.error.HTTPError( + url="https://example.com", + code=status, + msg="Error", + hdrs=Message(), + fp=io.BytesIO(json.dumps(body).encode()), + ) + + +def _patch_urlopen(responses, capture: dict | None = None): + """Patch service.urllib.request.urlopen to yield each response in order. + + ``responses`` may be a single value or a list; each entry is a dict + (JSON-encoded body), bytes (raw body), pre-built HTTPError, or a callable + ``req -> value`` to dispatch by request. When ``capture`` is provided it + is populated on each call with url/body/headers/method. + """ + if not isinstance(responses, list): + responses = [responses] + idx = [0] + + @contextmanager + def impl(req, timeout=None): + if capture is not None: + capture["url"] = req.full_url + capture["body"] = req.data.decode() if req.data else "" + capture["headers"] = dict(req.header_items()) + capture["method"] = req.get_method() + i = min(idx[0], len(responses) - 1) + idx[0] += 1 + r = responses[i] + if callable(r): + r = r(req) + if isinstance(r, urllib.error.HTTPError): + raise r + payload = r if isinstance(r, (bytes, bytearray)) else json.dumps(r).encode() + yield io.BytesIO(bytes(payload)) + + return mock.patch("parallel_web_tools.core.service.urllib.request.urlopen", side_effect=impl) + + +def _app_item(app_name: str, app_id: str = "app_x", org_id: str = "org_x") -> dict: + return {"app_name": app_name, "org_name": None, "app_id": app_id, "org_id": org_id} + + +def _api_key_response(raw_api_key: str | None = "sk_minted", name: str = "parallel-cli-2026-04-21-1432") -> dict: + """Build a full CreateKeyResponse payload (flat — no ``api_key`` wrapper).""" + return { + "api_key_id": "key_1", + "api_key_name": name, + "app_id": "app_cli", + "app_name": service.PARALLEL_CLI_APP_NAME, + "created_by_user_id": "user_1", + "created_by_user_email": "user@example.com", + "display_value": "sk_***1234", + "raw_api_key": raw_api_key, + "created_at": 1776800731, + } + + +def _apps(*names_and_ids: tuple[str, str]) -> dict: + """Shorthand for a GetAppsForOrgResponseModel payload.""" + return {"apps": [_app_item(name, app_id=app_id) for name, app_id in names_and_ids]} + + +def _balance_response(**overrides) -> dict: + """Build a BalanceResponse payload.""" + base = { + "org_id": "org_abc", + "credit_balance_cents": 1500, + "pending_debit_balance_cents": 0, + "will_invoice": False, + } + base.update(overrides) + return base + + +# --------------------------------------------------------------------------- +# list_apps +# --------------------------------------------------------------------------- + + +class TestListApps: + def test_parses_apps_list(self): + with _patch_urlopen(_apps(("parallel-cli Users", "app_1"))): + apps = list_apps("at_123") + assert len(apps) == 1 + assert apps[0].app_name == "parallel-cli Users" + assert apps[0].app_id == "app_1" + + def test_empty_apps_list(self): + with _patch_urlopen({"apps": []}): + assert list_apps("at_123") == [] + + def test_missing_apps_field_returns_empty(self): + # GetAppsForOrgResponseModel.apps is Optional; an omitted key is legal. + with _patch_urlopen({}): + assert list_apps("at_123") == [] + + def test_sends_bearer_auth(self): + captured: dict = {} + with _patch_urlopen({"apps": []}, capture=captured): + list_apps("at_xyz") + + assert captured["method"] == "GET" + assert "/service/v1/apps" in captured["url"] + assert any(v == "Bearer at_xyz" for v in captured["headers"].values()) + + def test_respects_service_api_url_env(self, monkeypatch): + monkeypatch.setenv("PARALLEL_SERVICE_API_URL", "http://localhost:8090") + captured: dict = {} + with _patch_urlopen({"apps": []}, capture=captured): + list_apps("at_xyz") + assert captured["url"].startswith("http://localhost:8090/") + + def test_raises_on_malformed_apps_shape(self): + # apps must be a list; a string is invalid. + with _patch_urlopen({"apps": "nope"}): + with pytest.raises(ServiceApiError, match="Unexpected /service/v1/apps response"): + list_apps("at_xyz") + + def test_raises_on_http_error(self): + with _patch_urlopen(_http_error(401, {"error": "unauthorized"})): + with pytest.raises(ServiceApiError, match="failed: 401"): + list_apps("at_xyz") + + +# --------------------------------------------------------------------------- +# create_api_key +# --------------------------------------------------------------------------- + + +class TestCreateApiKey: + def test_returns_typed_api_key_model(self): + with _patch_urlopen(_api_key_response()): + result = create_api_key("at_xyz", "app_1", "parallel-cli-2026-04-21-1432") + assert result.raw_api_key == "sk_minted" + assert result.api_key_name == "parallel-cli-2026-04-21-1432" + assert result.display_value == "sk_***1234" + + def test_request_body_has_only_api_key_name(self): + captured: dict = {} + with _patch_urlopen(_api_key_response(), capture=captured): + create_api_key("at_xyz", "app_42", "parallel-cli-2026-04-21-1432") + + assert captured["method"] == "POST" + assert captured["url"].endswith("/service/v1/apps/app_42/keys") + assert json.loads(captured["body"]) == {"api_key_name": "parallel-cli-2026-04-21-1432"} + + def test_raises_on_malformed_response(self): + # Missing required fields (e.g. api_key_id) — pydantic validation fails. + with _patch_urlopen({"display_value": "sk_***"}): + with pytest.raises(ServiceApiError, match="Unexpected create_api_key response"): + create_api_key("at_xyz", "app_1", "name_1") + + +# --------------------------------------------------------------------------- +# provision_cli_api_key +# --------------------------------------------------------------------------- + + +class TestProvisionCliApiKey: + def test_happy_path(self): + apps_payload = _apps(("Some Other App", "app_other"), (service.PARALLEL_CLI_APP_NAME, "app_cli")) + captured_paths: list[str] = [] + + def dispatch(req): + captured_paths.append(req.full_url) + if req.get_method() == "GET": + return apps_payload + return _api_key_response(raw_api_key="sk_provisioned") + + with _patch_urlopen([dispatch, dispatch]): + key, name = provision_cli_api_key("at_xyz") + + assert key == "sk_provisioned" + assert name.startswith("parallel-cli-") + # The created key must target the CLI app, not the other one. + assert any("/apps/app_cli/keys" in p for p in captured_paths) + + def test_raises_when_app_not_found(self): + with _patch_urlopen(_apps(("Some Other App", "app_other"))): + with pytest.raises(ServiceApiError, match="No app named"): + provision_cli_api_key("at_xyz") + + def test_raises_when_raw_api_key_missing(self): + apps_payload = _apps((service.PARALLEL_CLI_APP_NAME, "app_cli")) + with _patch_urlopen([apps_payload, _api_key_response(raw_api_key=None)]): + with pytest.raises(ServiceApiError, match="no raw_api_key"): + provision_cli_api_key("at_xyz") + + def test_client_id_is_used_in_created_key_name(self): + apps_payload = _apps((service.PARALLEL_CLI_APP_NAME, "app_cli")) + sent_body: dict = {} + + def dispatch(req): + if req.get_method() == "GET": + return apps_payload + sent_body.update(json.loads(req.data.decode())) + return _api_key_response(raw_api_key="sk_ok") + + with _patch_urlopen([dispatch, dispatch]): + _, name = provision_cli_api_key("at_xyz", client_id="cid_abc") + + assert name.startswith("cid_abc-") + assert sent_body["api_key_name"] == name + + +# --------------------------------------------------------------------------- +# _build_key_name +# --------------------------------------------------------------------------- + + +class TestBuildKeyName: + def test_falls_back_to_parallel_cli_prefix_without_client_id(self): + import re + + name = _build_key_name() + # parallel-cli-YYYY-MM-DD-HHMM (HHMM is 4 digits, no colon) + assert re.match(r"^parallel-cli-\d{4}-\d{2}-\d{2}-\d{4}$", name), name + + def test_uses_client_id_as_prefix_when_provided(self): + import re + + name = _build_key_name(client_id="cid_abc123") + # Same date suffix, but the client_id now carries the entropy. + assert re.match(r"^cid_abc123-\d{4}-\d{2}-\d{2}-\d{4}$", name), name + + +# --------------------------------------------------------------------------- +# get_balance +# --------------------------------------------------------------------------- + + +class TestGetBalance: + def test_parses_balance_response(self): + payload = _balance_response( + credit_balance_cents=1234, + pending_debit_balance_cents=56, + will_invoice=False, + ) + with _patch_urlopen(payload): + resp = get_balance("at_xyz") + assert resp.org_id == "org_abc" + assert resp.credit_balance_cents == 1234 + assert resp.pending_debit_balance_cents == 56 + assert resp.will_invoice is False + + def test_defaults_optional_fields_when_omitted(self): + # pending_debit_balance_cents and will_invoice are optional. + with _patch_urlopen({"org_id": "org_x", "credit_balance_cents": 0}): + resp = get_balance("at_xyz") + assert resp.pending_debit_balance_cents == 0 + assert resp.will_invoice is False + + def test_sends_bearer_auth_to_balance_endpoint(self): + captured: dict = {} + with _patch_urlopen(_balance_response(), capture=captured): + get_balance("at_xyz") + + assert captured["method"] == "GET" + assert captured["url"].endswith("/service/v1/balance") + assert any(v == "Bearer at_xyz" for v in captured["headers"].values()) + + def test_respects_service_api_url_env(self, monkeypatch): + monkeypatch.setenv("PARALLEL_SERVICE_API_URL", "http://localhost:8090") + captured: dict = {} + with _patch_urlopen(_balance_response(), capture=captured): + get_balance("at_xyz") + assert captured["url"].startswith("http://localhost:8090/") + + def test_raises_on_http_error(self): + with _patch_urlopen(_http_error(500, {"error": "internal"})): + with pytest.raises(ServiceApiError, match="failed: 500"): + get_balance("at_xyz") + + def test_raises_on_malformed_payload(self): + # Missing required field org_id. + with _patch_urlopen({"credit_balance_cents": 10}): + with pytest.raises(ServiceApiError, match="Unexpected /service/v1/balance response"): + get_balance("at_xyz") + + +# --------------------------------------------------------------------------- +# add_balance +# --------------------------------------------------------------------------- + + +class TestAddBalance: + def test_posts_expected_body_and_parses_response(self): + captured: dict = {} + with _patch_urlopen(_balance_response(credit_balance_cents=1600), capture=captured): + resp = add_balance("at_xyz", amount_cents=100, idempotency_key="key_1234") + + assert captured["method"] == "POST" + assert captured["url"].endswith("/service/v1/balance/add") + assert json.loads(captured["body"]) == {"amount_cents": 100, "idempotency_key": "key_1234"} + assert any(v == "Bearer at_xyz" for v in captured["headers"].values()) + assert resp.credit_balance_cents == 1600 + + def test_raises_on_http_error(self): + with _patch_urlopen(_http_error(402, {"error": "card_declined"})): + with pytest.raises(ServiceApiError, match="failed: 402"): + add_balance("at_xyz", amount_cents=100, idempotency_key="test-key-1") + + def test_raises_on_malformed_response(self): + with _patch_urlopen({"credit_balance_cents": 10}): + with pytest.raises(ServiceApiError, match="Unexpected /service/v1/balance/add response"): + add_balance("at_xyz", amount_cents=100, idempotency_key="test-key-2") diff --git a/tests/test_test_harness.py b/tests/test_test_harness.py new file mode 100644 index 0000000..a8541a0 --- /dev/null +++ b/tests/test_test_harness.py @@ -0,0 +1,38 @@ +"""Regression tests for the shared pytest safety harness.""" + +import asyncio +import socket +import subprocess +import urllib.request +import webbrowser + +import httpx +import pytest + + +class TestNoExternalIoHarness: + def test_blocks_low_level_socket_connections(self): + with pytest.raises(AssertionError, match="Network access is disabled"): + socket.create_connection(("example.com", 443), timeout=1) + + with pytest.raises(AssertionError, match="Network access is disabled"): + socket.getaddrinfo("example.com", 443) + + def test_blocks_high_level_http_clients(self): + with pytest.raises(AssertionError, match="Network access is disabled"): + urllib.request.urlopen("https://example.com") + + with pytest.raises(AssertionError, match="Network access is disabled"): + httpx.get("https://example.com") + + with pytest.raises(AssertionError, match="Network access is disabled"): + asyncio.run(asyncio.open_connection("example.com", 443)) + + def test_blocks_subprocesses(self): + with pytest.raises(AssertionError, match="Subprocess execution is disabled"): + subprocess.run(["true"], check=False) + + def test_browser_launches_are_stubbed(self): + assert webbrowser.open("https://example.com") is True + assert webbrowser.open_new("https://example.com") is True + assert webbrowser.open_new_tab("https://example.com") is True diff --git a/uv.lock b/uv.lock index 728f94b..3dab8b0 100644 --- a/uv.lock +++ b/uv.lock @@ -50,6 +50,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/81/29/5ecc3a15d5a33e31b26c11426c45c501e439cb865d0bff96315d86443b78/appnope-0.1.4-py2.py3-none-any.whl", hash = "sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c", size = 4321, upload-time = "2024-02-06T09:43:09.663Z" }, ] +[[package]] +name = "argcomplete" +version = "3.6.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/38/61/0b9ae6399dd4a58d8c1b1dc5a27d6f2808023d0b5dd3104bb99f45a33ff6/argcomplete-3.6.3.tar.gz", hash = "sha256:62e8ed4fd6a45864acc8235409461b72c9a28ee785a2011cc5eb78318786c89c", size = 73754, upload-time = "2025-10-20T03:33:34.741Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/74/f5/9373290775639cb67a2fce7f629a1c240dce9f12fe927bc32b2736e16dfc/argcomplete-3.6.3-py3-none-any.whl", hash = "sha256:f5007b3a600ccac5d25bbce33089211dfd49eab4a7718da3f10e3082525a92ce", size = 43846, upload-time = "2025-10-20T03:33:33.021Z" }, +] + [[package]] name = "asn1crypto" version = "1.5.1" @@ -68,6 +77,50 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d2/39/e7eaf1799466a4aef85b6a4fe7bd175ad2b1c6345066aa33f1f58d4b18d0/asttokens-3.0.1-py3-none-any.whl", hash = "sha256:15a3ebc0f43c2d0a50eeafea25e19046c68398e487b9f1f5b517f7c0f40f976a", size = 27047, upload-time = "2025-11-15T16:43:16.109Z" }, ] +[[package]] +name = "black" +version = "26.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "mypy-extensions" }, + { name = "packaging" }, + { name = "pathspec" }, + { name = "platformdirs" }, + { name = "pytokens" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e1/c5/61175d618685d42b005847464b8fb4743a67b1b8fdb75e50e5a96c31a27a/black-26.3.1.tar.gz", hash = "sha256:2c50f5063a9641c7eed7795014ba37b0f5fa227f3d408b968936e24bc0566b07", size = 666155, upload-time = "2026-03-12T03:36:03.593Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/32/a8/11170031095655d36ebc6664fe0897866f6023892396900eec0e8fdc4299/black-26.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:86a8b5035fce64f5dcd1b794cf8ec4d31fe458cf6ce3986a30deb434df82a1d2", size = 1866562, upload-time = "2026-03-12T03:39:58.639Z" }, + { url = "https://files.pythonhosted.org/packages/69/ce/9e7548d719c3248c6c2abfd555d11169457cbd584d98d179111338423790/black-26.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5602bdb96d52d2d0672f24f6ffe5218795736dd34807fd0fd55ccd6bf206168b", size = 1703623, upload-time = "2026-03-12T03:40:00.347Z" }, + { url = "https://files.pythonhosted.org/packages/7f/0a/8d17d1a9c06f88d3d030d0b1d4373c1551146e252afe4547ed601c0e697f/black-26.3.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6c54a4a82e291a1fee5137371ab488866b7c86a3305af4026bdd4dc78642e1ac", size = 1768388, upload-time = "2026-03-12T03:40:01.765Z" }, + { url = "https://files.pythonhosted.org/packages/52/79/c1ee726e221c863cde5164f925bacf183dfdf0397d4e3f94889439b947b4/black-26.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:6e131579c243c98f35bce64a7e08e87fb2d610544754675d4a0e73a070a5aa3a", size = 1412969, upload-time = "2026-03-12T03:40:03.252Z" }, + { url = "https://files.pythonhosted.org/packages/73/a5/15c01d613f5756f68ed8f6d4ec0a1e24b82b18889fa71affd3d1f7fad058/black-26.3.1-cp310-cp310-win_arm64.whl", hash = "sha256:5ed0ca58586c8d9a487352a96b15272b7fa55d139fc8496b519e78023a8dab0a", size = 1220345, upload-time = "2026-03-12T03:40:04.892Z" }, + { url = "https://files.pythonhosted.org/packages/17/57/5f11c92861f9c92eb9dddf515530bc2d06db843e44bdcf1c83c1427824bc/black-26.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:28ef38aee69e4b12fda8dba75e21f9b4f979b490c8ac0baa7cb505369ac9e1ff", size = 1851987, upload-time = "2026-03-12T03:40:06.248Z" }, + { url = "https://files.pythonhosted.org/packages/54/aa/340a1463660bf6831f9e39646bf774086dbd8ca7fc3cded9d59bbdf4ad0a/black-26.3.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bf9bf162ed91a26f1adba8efda0b573bc6924ec1408a52cc6f82cb73ec2b142c", size = 1689499, upload-time = "2026-03-12T03:40:07.642Z" }, + { url = "https://files.pythonhosted.org/packages/f3/01/b726c93d717d72733da031d2de10b92c9fa4c8d0c67e8a8a372076579279/black-26.3.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:474c27574d6d7037c1bc875a81d9be0a9a4f9ee95e62800dab3cfaadbf75acd5", size = 1754369, upload-time = "2026-03-12T03:40:09.279Z" }, + { url = "https://files.pythonhosted.org/packages/e3/09/61e91881ca291f150cfc9eb7ba19473c2e59df28859a11a88248b5cbbc4d/black-26.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:5e9d0d86df21f2e1677cc4bd090cd0e446278bcbbe49bf3659c308c3e402843e", size = 1413613, upload-time = "2026-03-12T03:40:10.943Z" }, + { url = "https://files.pythonhosted.org/packages/16/73/544f23891b22e7efe4d8f812371ab85b57f6a01b2fc45e3ba2e52ba985b8/black-26.3.1-cp311-cp311-win_arm64.whl", hash = "sha256:9a5e9f45e5d5e1c5b5c29b3bd4265dcc90e8b92cf4534520896ed77f791f4da5", size = 1219719, upload-time = "2026-03-12T03:40:12.597Z" }, + { url = "https://files.pythonhosted.org/packages/dc/f8/da5eae4fc75e78e6dceb60624e1b9662ab00d6b452996046dfa9b8a6025b/black-26.3.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b5e6f89631eb88a7302d416594a32faeee9fb8fb848290da9d0a5f2903519fc1", size = 1895920, upload-time = "2026-03-12T03:40:13.921Z" }, + { url = "https://files.pythonhosted.org/packages/2c/9f/04e6f26534da2e1629b2b48255c264cabf5eedc5141d04516d9d68a24111/black-26.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:41cd2012d35b47d589cb8a16faf8a32ef7a336f56356babd9fcf70939ad1897f", size = 1718499, upload-time = "2026-03-12T03:40:15.239Z" }, + { url = "https://files.pythonhosted.org/packages/04/91/a5935b2a63e31b331060c4a9fdb5a6c725840858c599032a6f3aac94055f/black-26.3.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f76ff19ec5297dd8e66eb64deda23631e642c9393ab592826fd4bdc97a4bce7", size = 1794994, upload-time = "2026-03-12T03:40:17.124Z" }, + { url = "https://files.pythonhosted.org/packages/e7/0a/86e462cdd311a3c2a8ece708d22aba17d0b2a0d5348ca34b40cdcbea512e/black-26.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:ddb113db38838eb9f043623ba274cfaf7d51d5b0c22ecb30afe58b1bb8322983", size = 1420867, upload-time = "2026-03-12T03:40:18.83Z" }, + { url = "https://files.pythonhosted.org/packages/5b/e5/22515a19cb7eaee3440325a6b0d95d2c0e88dd180cb011b12ae488e031d1/black-26.3.1-cp312-cp312-win_arm64.whl", hash = "sha256:dfdd51fc3e64ea4f35873d1b3fb25326773d55d2329ff8449139ebaad7357efb", size = 1230124, upload-time = "2026-03-12T03:40:20.425Z" }, + { url = "https://files.pythonhosted.org/packages/f5/77/5728052a3c0450c53d9bb3945c4c46b91baa62b2cafab6801411b6271e45/black-26.3.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:855822d90f884905362f602880ed8b5df1b7e3ee7d0db2502d4388a954cc8c54", size = 1895034, upload-time = "2026-03-12T03:40:21.813Z" }, + { url = "https://files.pythonhosted.org/packages/52/73/7cae55fdfdfbe9d19e9a8d25d145018965fe2079fa908101c3733b0c55a0/black-26.3.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8a33d657f3276328ce00e4d37fe70361e1ec7614da5d7b6e78de5426cb56332f", size = 1718503, upload-time = "2026-03-12T03:40:23.666Z" }, + { url = "https://files.pythonhosted.org/packages/e1/87/af89ad449e8254fdbc74654e6467e3c9381b61472cc532ee350d28cfdafb/black-26.3.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f1cd08e99d2f9317292a311dfe578fd2a24b15dbce97792f9c4d752275c1fa56", size = 1793557, upload-time = "2026-03-12T03:40:25.497Z" }, + { url = "https://files.pythonhosted.org/packages/43/10/d6c06a791d8124b843bf325ab4ac7d2f5b98731dff84d6064eafd687ded1/black-26.3.1-cp313-cp313-win_amd64.whl", hash = "sha256:c7e72339f841b5a237ff14f7d3880ddd0fc7f98a1199e8c4327f9a4f478c1839", size = 1422766, upload-time = "2026-03-12T03:40:27.14Z" }, + { url = "https://files.pythonhosted.org/packages/59/4f/40a582c015f2d841ac24fed6390bd68f0fc896069ff3a886317959c9daf8/black-26.3.1-cp313-cp313-win_arm64.whl", hash = "sha256:afc622538b430aa4c8c853f7f63bc582b3b8030fd8c80b70fb5fa5b834e575c2", size = 1232140, upload-time = "2026-03-12T03:40:28.882Z" }, + { url = "https://files.pythonhosted.org/packages/d5/da/e36e27c9cebc1311b7579210df6f1c86e50f2d7143ae4fcf8a5017dc8809/black-26.3.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:2d6bfaf7fd0993b420bed691f20f9492d53ce9a2bcccea4b797d34e947318a78", size = 1889234, upload-time = "2026-03-12T03:40:30.964Z" }, + { url = "https://files.pythonhosted.org/packages/0e/7b/9871acf393f64a5fa33668c19350ca87177b181f44bb3d0c33b2d534f22c/black-26.3.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:f89f2ab047c76a9c03f78d0d66ca519e389519902fa27e7a91117ef7611c0568", size = 1720522, upload-time = "2026-03-12T03:40:32.346Z" }, + { url = "https://files.pythonhosted.org/packages/03/87/e766c7f2e90c07fb7586cc787c9ae6462b1eedab390191f2b7fc7f6170a9/black-26.3.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b07fc0dab849d24a80a29cfab8d8a19187d1c4685d8a5e6385a5ce323c1f015f", size = 1787824, upload-time = "2026-03-12T03:40:33.636Z" }, + { url = "https://files.pythonhosted.org/packages/ac/94/2424338fb2d1875e9e83eed4c8e9c67f6905ec25afd826a911aea2b02535/black-26.3.1-cp314-cp314-win_amd64.whl", hash = "sha256:0126ae5b7c09957da2bdbd91a9ba1207453feada9e9fe51992848658c6c8e01c", size = 1445855, upload-time = "2026-03-12T03:40:35.442Z" }, + { url = "https://files.pythonhosted.org/packages/86/43/0c3338bd928afb8ee7471f1a4eec3bdbe2245ccb4a646092a222e8669840/black-26.3.1-cp314-cp314-win_arm64.whl", hash = "sha256:92c0ec1f2cc149551a2b7b47efc32c866406b6891b0ee4625e95967c8f4acfb1", size = 1258109, upload-time = "2026-03-12T03:40:36.832Z" }, + { url = "https://files.pythonhosted.org/packages/8e/0d/52d98722666d6fc6c3dd4c76df339501d6efd40e0ff95e6186a7b7f0befd/black-26.3.1-py3-none-any.whl", hash = "sha256:2bd5aa94fc267d38bb21a70d7410a89f1a1d318841855f698746f8e7f51acd1b", size = 207542, upload-time = "2026-03-12T03:36:01.668Z" }, +] + [[package]] name = "boto3" version = "1.43.0" @@ -509,6 +562,31 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ab/9c/51f28c3550276bcf35660703ba0ab829a90b88be8cd98a71ef23c2413913/cryptography-47.0.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:cffbba3392df0fa8629bb7f43454ee2925059ee158e23c54620b9063912b86c8", size = 3698916, upload-time = "2026-04-24T19:54:49.782Z" }, ] +[[package]] +name = "datamodel-code-generator" +version = "0.57.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "argcomplete" }, + { name = "black" }, + { name = "genson" }, + { name = "inflect" }, + { name = "isort" }, + { name = "jinja2" }, + { name = "pydantic" }, + { name = "pyyaml" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5d/44/87d5980f813a1e323c5d726b3ac5fec8c915ce8a77fcdceaf9c00457dbae/datamodel_code_generator-0.57.0.tar.gz", hash = "sha256:0eda778ea06eaa476e542a5f1fe1d14cc3bbf686edb33a0ad6151c7d19089906", size = 932941, upload-time = "2026-05-07T16:21:55.819Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c5/c1/4fb9a44bb4a305b860c5a5b1866dcccfac3b76f5f170a9e68fc7733e16d2/datamodel_code_generator-0.57.0-py3-none-any.whl", hash = "sha256:d26bf5defe5154493d0aa5a822b7725332b9e9dd2abccc2f8856052286aa83b5", size = 259343, upload-time = "2026-05-07T16:21:53.823Z" }, +] + +[package.optional-dependencies] +ruff = [ + { name = "ruff" }, +] + [[package]] name = "debugpy" version = "1.8.20" @@ -637,6 +715,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/81/47/dd9a212ef6e343a6857485ffe25bba537304f1913bdbed446a23f7f592e1/filelock-3.29.0-py3-none-any.whl", hash = "sha256:96f5f6344709aa1572bbf631c640e4ebeeb519e08da902c39a001882f30ac258", size = 39812, upload-time = "2026-04-19T15:39:08.752Z" }, ] +[[package]] +name = "genson" +version = "1.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c5/cf/2303c8ad276dcf5ee2ad6cf69c4338fd86ef0f471a5207b069adf7a393cf/genson-1.3.0.tar.gz", hash = "sha256:e02db9ac2e3fd29e65b5286f7135762e2cd8a986537c075b06fc5f1517308e37", size = 34919, upload-time = "2024-05-15T22:08:49.123Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f8/5c/e226de133afd8bb267ec27eead9ae3d784b95b39a287ed404caab39a5f50/genson-1.3.0-py3-none-any.whl", hash = "sha256:468feccd00274cc7e4c09e84b08704270ba8d95232aa280f65b986139cec67f7", size = 21470, upload-time = "2024-05-15T22:08:47.056Z" }, +] + [[package]] name = "gitdb" version = "4.0.12" @@ -970,6 +1057,19 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/5d/13/ad7d7ca3808a898b4612b6fe93cde56b53f3034dcde235acb1f0e1df24c6/idna-3.13-py3-none-any.whl", hash = "sha256:892ea0cde124a99ce773decba204c5552b69c3c67ffd5f232eb7696135bc8bb3", size = 68629, upload-time = "2026-04-22T16:42:40.909Z" }, ] +[[package]] +name = "inflect" +version = "7.5.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "more-itertools" }, + { name = "typeguard" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/78/c6/943357d44a21fd995723d07ccaddd78023eace03c1846049a2645d4324a3/inflect-7.5.0.tar.gz", hash = "sha256:faf19801c3742ed5a05a8ce388e0d8fe1a07f8d095c82201eb904f5d27ad571f", size = 73751, upload-time = "2024-12-28T17:11:18.897Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8a/eb/427ed2b20a38a4ee29f24dbe4ae2dafab198674fe9a85e3d6adf9e5f5f41/inflect-7.5.0-py3-none-any.whl", hash = "sha256:2aea70e5e70c35d8350b8097396ec155ffd68def678c7ff97f51aa69c1d92344", size = 35197, upload-time = "2024-12-28T17:11:15.931Z" }, +] + [[package]] name = "iniconfig" version = "2.3.0" @@ -1070,6 +1170,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d9/33/1f075bf72b0b747cb3288d011319aaf64083cf2efef8354174e3ed4540e2/ipython_pygments_lexers-1.1.1-py3-none-any.whl", hash = "sha256:a9462224a505ade19a605f71f8fa63c2048833ce50abc86768a0d81d876dc81c", size = 8074, upload-time = "2025-01-17T11:24:33.271Z" }, ] +[[package]] +name = "isort" +version = "8.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ef/7c/ec4ab396d31b3b395e2e999c8f46dec78c5e29209fac49d1f4dace04041d/isort-8.0.1.tar.gz", hash = "sha256:171ac4ff559cdc060bcfff550bc8404a486fee0caab245679c2abe7cb253c78d", size = 769592, upload-time = "2026-02-28T10:08:20.685Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3e/95/c7c34aa53c16353c56d0b802fba48d5f5caa2cdee7958acbcb795c830416/isort-8.0.1-py3-none-any.whl", hash = "sha256:28b89bc70f751b559aeca209e6120393d43fbe2490de0559662be7a9787e3d75", size = 89733, upload-time = "2026-02-28T10:08:19.466Z" }, +] + [[package]] name = "jedi" version = "0.19.2" @@ -1082,6 +1191,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c0/5a/9cac0c82afec3d09ccd97c8b6502d48f165f9124db81b4bcb90b4af974ee/jedi-0.19.2-py2.py3-none-any.whl", hash = "sha256:a8ef22bde8490f57fe5c7681a3c83cb58874daf72b4784de3cce5b6ef6edb5b9", size = 1572278, upload-time = "2024-11-11T01:41:40.175Z" }, ] +[[package]] +name = "jinja2" +version = "3.1.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115, upload-time = "2025-03-05T20:05:02.478Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" }, +] + [[package]] name = "jmespath" version = "1.1.0" @@ -1144,6 +1265,91 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321, upload-time = "2025-08-11T12:57:51.923Z" }, ] +[[package]] +name = "markupsafe" +version = "3.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7e/99/7690b6d4034fffd95959cbe0c02de8deb3098cc577c67bb6a24fe5d7caa7/markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698", size = 80313, upload-time = "2025-09-27T18:37:40.426Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e8/4b/3541d44f3937ba468b75da9eebcae497dcf67adb65caa16760b0a6807ebb/markupsafe-3.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2f981d352f04553a7171b8e44369f2af4055f888dfb147d55e42d29e29e74559", size = 11631, upload-time = "2025-09-27T18:36:05.558Z" }, + { url = "https://files.pythonhosted.org/packages/98/1b/fbd8eed11021cabd9226c37342fa6ca4e8a98d8188a8d9b66740494960e4/markupsafe-3.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e1c1493fb6e50ab01d20a22826e57520f1284df32f2d8601fdd90b6304601419", size = 12057, upload-time = "2025-09-27T18:36:07.165Z" }, + { url = "https://files.pythonhosted.org/packages/40/01/e560d658dc0bb8ab762670ece35281dec7b6c1b33f5fbc09ebb57a185519/markupsafe-3.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1ba88449deb3de88bd40044603fafffb7bc2b055d626a330323a9ed736661695", size = 22050, upload-time = "2025-09-27T18:36:08.005Z" }, + { url = "https://files.pythonhosted.org/packages/af/cd/ce6e848bbf2c32314c9b237839119c5a564a59725b53157c856e90937b7a/markupsafe-3.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f42d0984e947b8adf7dd6dde396e720934d12c506ce84eea8476409563607591", size = 20681, upload-time = "2025-09-27T18:36:08.881Z" }, + { url = "https://files.pythonhosted.org/packages/c9/2a/b5c12c809f1c3045c4d580b035a743d12fcde53cf685dbc44660826308da/markupsafe-3.0.3-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c0c0b3ade1c0b13b936d7970b1d37a57acde9199dc2aecc4c336773e1d86049c", size = 20705, upload-time = "2025-09-27T18:36:10.131Z" }, + { url = "https://files.pythonhosted.org/packages/cf/e3/9427a68c82728d0a88c50f890d0fc072a1484de2f3ac1ad0bfc1a7214fd5/markupsafe-3.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0303439a41979d9e74d18ff5e2dd8c43ed6c6001fd40e5bf2e43f7bd9bbc523f", size = 21524, upload-time = "2025-09-27T18:36:11.324Z" }, + { url = "https://files.pythonhosted.org/packages/bc/36/23578f29e9e582a4d0278e009b38081dbe363c5e7165113fad546918a232/markupsafe-3.0.3-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:d2ee202e79d8ed691ceebae8e0486bd9a2cd4794cec4824e1c99b6f5009502f6", size = 20282, upload-time = "2025-09-27T18:36:12.573Z" }, + { url = "https://files.pythonhosted.org/packages/56/21/dca11354e756ebd03e036bd8ad58d6d7168c80ce1fe5e75218e4945cbab7/markupsafe-3.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:177b5253b2834fe3678cb4a5f0059808258584c559193998be2601324fdeafb1", size = 20745, upload-time = "2025-09-27T18:36:13.504Z" }, + { url = "https://files.pythonhosted.org/packages/87/99/faba9369a7ad6e4d10b6a5fbf71fa2a188fe4a593b15f0963b73859a1bbd/markupsafe-3.0.3-cp310-cp310-win32.whl", hash = "sha256:2a15a08b17dd94c53a1da0438822d70ebcd13f8c3a95abe3a9ef9f11a94830aa", size = 14571, upload-time = "2025-09-27T18:36:14.779Z" }, + { url = "https://files.pythonhosted.org/packages/d6/25/55dc3ab959917602c96985cb1253efaa4ff42f71194bddeb61eb7278b8be/markupsafe-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:c4ffb7ebf07cfe8931028e3e4c85f0357459a3f9f9490886198848f4fa002ec8", size = 15056, upload-time = "2025-09-27T18:36:16.125Z" }, + { url = "https://files.pythonhosted.org/packages/d0/9e/0a02226640c255d1da0b8d12e24ac2aa6734da68bff14c05dd53b94a0fc3/markupsafe-3.0.3-cp310-cp310-win_arm64.whl", hash = "sha256:e2103a929dfa2fcaf9bb4e7c091983a49c9ac3b19c9061b6d5427dd7d14d81a1", size = 13932, upload-time = "2025-09-27T18:36:17.311Z" }, + { url = "https://files.pythonhosted.org/packages/08/db/fefacb2136439fc8dd20e797950e749aa1f4997ed584c62cfb8ef7c2be0e/markupsafe-3.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cc7ea17a6824959616c525620e387f6dd30fec8cb44f649e31712db02123dad", size = 11631, upload-time = "2025-09-27T18:36:18.185Z" }, + { url = "https://files.pythonhosted.org/packages/e1/2e/5898933336b61975ce9dc04decbc0a7f2fee78c30353c5efba7f2d6ff27a/markupsafe-3.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bd4cd07944443f5a265608cc6aab442e4f74dff8088b0dfc8238647b8f6ae9a", size = 12058, upload-time = "2025-09-27T18:36:19.444Z" }, + { url = "https://files.pythonhosted.org/packages/1d/09/adf2df3699d87d1d8184038df46a9c80d78c0148492323f4693df54e17bb/markupsafe-3.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b5420a1d9450023228968e7e6a9ce57f65d148ab56d2313fcd589eee96a7a50", size = 24287, upload-time = "2025-09-27T18:36:20.768Z" }, + { url = "https://files.pythonhosted.org/packages/30/ac/0273f6fcb5f42e314c6d8cd99effae6a5354604d461b8d392b5ec9530a54/markupsafe-3.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0bf2a864d67e76e5c9a34dc26ec616a66b9888e25e7b9460e1c76d3293bd9dbf", size = 22940, upload-time = "2025-09-27T18:36:22.249Z" }, + { url = "https://files.pythonhosted.org/packages/19/ae/31c1be199ef767124c042c6c3e904da327a2f7f0cd63a0337e1eca2967a8/markupsafe-3.0.3-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc51efed119bc9cfdf792cdeaa4d67e8f6fcccab66ed4bfdd6bde3e59bfcbb2f", size = 21887, upload-time = "2025-09-27T18:36:23.535Z" }, + { url = "https://files.pythonhosted.org/packages/b2/76/7edcab99d5349a4532a459e1fe64f0b0467a3365056ae550d3bcf3f79e1e/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:068f375c472b3e7acbe2d5318dea141359e6900156b5b2ba06a30b169086b91a", size = 23692, upload-time = "2025-09-27T18:36:24.823Z" }, + { url = "https://files.pythonhosted.org/packages/a4/28/6e74cdd26d7514849143d69f0bf2399f929c37dc2b31e6829fd2045b2765/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:7be7b61bb172e1ed687f1754f8e7484f1c8019780f6f6b0786e76bb01c2ae115", size = 21471, upload-time = "2025-09-27T18:36:25.95Z" }, + { url = "https://files.pythonhosted.org/packages/62/7e/a145f36a5c2945673e590850a6f8014318d5577ed7e5920a4b3448e0865d/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f9e130248f4462aaa8e2552d547f36ddadbeaa573879158d721bbd33dfe4743a", size = 22923, upload-time = "2025-09-27T18:36:27.109Z" }, + { url = "https://files.pythonhosted.org/packages/0f/62/d9c46a7f5c9adbeeeda52f5b8d802e1094e9717705a645efc71b0913a0a8/markupsafe-3.0.3-cp311-cp311-win32.whl", hash = "sha256:0db14f5dafddbb6d9208827849fad01f1a2609380add406671a26386cdf15a19", size = 14572, upload-time = "2025-09-27T18:36:28.045Z" }, + { url = "https://files.pythonhosted.org/packages/83/8a/4414c03d3f891739326e1783338e48fb49781cc915b2e0ee052aa490d586/markupsafe-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:de8a88e63464af587c950061a5e6a67d3632e36df62b986892331d4620a35c01", size = 15077, upload-time = "2025-09-27T18:36:29.025Z" }, + { url = "https://files.pythonhosted.org/packages/35/73/893072b42e6862f319b5207adc9ae06070f095b358655f077f69a35601f0/markupsafe-3.0.3-cp311-cp311-win_arm64.whl", hash = "sha256:3b562dd9e9ea93f13d53989d23a7e775fdfd1066c33494ff43f5418bc8c58a5c", size = 13876, upload-time = "2025-09-27T18:36:29.954Z" }, + { url = "https://files.pythonhosted.org/packages/5a/72/147da192e38635ada20e0a2e1a51cf8823d2119ce8883f7053879c2199b5/markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e", size = 11615, upload-time = "2025-09-27T18:36:30.854Z" }, + { url = "https://files.pythonhosted.org/packages/9a/81/7e4e08678a1f98521201c3079f77db69fb552acd56067661f8c2f534a718/markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce", size = 12020, upload-time = "2025-09-27T18:36:31.971Z" }, + { url = "https://files.pythonhosted.org/packages/1e/2c/799f4742efc39633a1b54a92eec4082e4f815314869865d876824c257c1e/markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d", size = 24332, upload-time = "2025-09-27T18:36:32.813Z" }, + { url = "https://files.pythonhosted.org/packages/3c/2e/8d0c2ab90a8c1d9a24f0399058ab8519a3279d1bd4289511d74e909f060e/markupsafe-3.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d6dd0be5b5b189d31db7cda48b91d7e0a9795f31430b7f271219ab30f1d3ac9d", size = 22947, upload-time = "2025-09-27T18:36:33.86Z" }, + { url = "https://files.pythonhosted.org/packages/2c/54/887f3092a85238093a0b2154bd629c89444f395618842e8b0c41783898ea/markupsafe-3.0.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:94c6f0bb423f739146aec64595853541634bde58b2135f27f61c1ffd1cd4d16a", size = 21962, upload-time = "2025-09-27T18:36:35.099Z" }, + { url = "https://files.pythonhosted.org/packages/c9/2f/336b8c7b6f4a4d95e91119dc8521402461b74a485558d8f238a68312f11c/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:be8813b57049a7dc738189df53d69395eba14fb99345e0a5994914a3864c8a4b", size = 23760, upload-time = "2025-09-27T18:36:36.001Z" }, + { url = "https://files.pythonhosted.org/packages/32/43/67935f2b7e4982ffb50a4d169b724d74b62a3964bc1a9a527f5ac4f1ee2b/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:83891d0e9fb81a825d9a6d61e3f07550ca70a076484292a70fde82c4b807286f", size = 21529, upload-time = "2025-09-27T18:36:36.906Z" }, + { url = "https://files.pythonhosted.org/packages/89/e0/4486f11e51bbba8b0c041098859e869e304d1c261e59244baa3d295d47b7/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:77f0643abe7495da77fb436f50f8dab76dbc6e5fd25d39589a0f1fe6548bfa2b", size = 23015, upload-time = "2025-09-27T18:36:37.868Z" }, + { url = "https://files.pythonhosted.org/packages/2f/e1/78ee7a023dac597a5825441ebd17170785a9dab23de95d2c7508ade94e0e/markupsafe-3.0.3-cp312-cp312-win32.whl", hash = "sha256:d88b440e37a16e651bda4c7c2b930eb586fd15ca7406cb39e211fcff3bf3017d", size = 14540, upload-time = "2025-09-27T18:36:38.761Z" }, + { url = "https://files.pythonhosted.org/packages/aa/5b/bec5aa9bbbb2c946ca2733ef9c4ca91c91b6a24580193e891b5f7dbe8e1e/markupsafe-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:26a5784ded40c9e318cfc2bdb30fe164bdb8665ded9cd64d500a34fb42067b1c", size = 15105, upload-time = "2025-09-27T18:36:39.701Z" }, + { url = "https://files.pythonhosted.org/packages/e5/f1/216fc1bbfd74011693a4fd837e7026152e89c4bcf3e77b6692fba9923123/markupsafe-3.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:35add3b638a5d900e807944a078b51922212fb3dedb01633a8defc4b01a3c85f", size = 13906, upload-time = "2025-09-27T18:36:40.689Z" }, + { url = "https://files.pythonhosted.org/packages/38/2f/907b9c7bbba283e68f20259574b13d005c121a0fa4c175f9bed27c4597ff/markupsafe-3.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e1cf1972137e83c5d4c136c43ced9ac51d0e124706ee1c8aa8532c1287fa8795", size = 11622, upload-time = "2025-09-27T18:36:41.777Z" }, + { url = "https://files.pythonhosted.org/packages/9c/d9/5f7756922cdd676869eca1c4e3c0cd0df60ed30199ffd775e319089cb3ed/markupsafe-3.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:116bb52f642a37c115f517494ea5feb03889e04df47eeff5b130b1808ce7c219", size = 12029, upload-time = "2025-09-27T18:36:43.257Z" }, + { url = "https://files.pythonhosted.org/packages/00/07/575a68c754943058c78f30db02ee03a64b3c638586fba6a6dd56830b30a3/markupsafe-3.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:133a43e73a802c5562be9bbcd03d090aa5a1fe899db609c29e8c8d815c5f6de6", size = 24374, upload-time = "2025-09-27T18:36:44.508Z" }, + { url = "https://files.pythonhosted.org/packages/a9/21/9b05698b46f218fc0e118e1f8168395c65c8a2c750ae2bab54fc4bd4e0e8/markupsafe-3.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfcd093f13f0f0b7fdd0f198b90053bf7b2f02a3927a30e63f3ccc9df56b676", size = 22980, upload-time = "2025-09-27T18:36:45.385Z" }, + { url = "https://files.pythonhosted.org/packages/7f/71/544260864f893f18b6827315b988c146b559391e6e7e8f7252839b1b846a/markupsafe-3.0.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:509fa21c6deb7a7a273d629cf5ec029bc209d1a51178615ddf718f5918992ab9", size = 21990, upload-time = "2025-09-27T18:36:46.916Z" }, + { url = "https://files.pythonhosted.org/packages/c2/28/b50fc2f74d1ad761af2f5dcce7492648b983d00a65b8c0e0cb457c82ebbe/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4afe79fb3de0b7097d81da19090f4df4f8d3a2b3adaa8764138aac2e44f3af1", size = 23784, upload-time = "2025-09-27T18:36:47.884Z" }, + { url = "https://files.pythonhosted.org/packages/ed/76/104b2aa106a208da8b17a2fb72e033a5a9d7073c68f7e508b94916ed47a9/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:795e7751525cae078558e679d646ae45574b47ed6e7771863fcc079a6171a0fc", size = 21588, upload-time = "2025-09-27T18:36:48.82Z" }, + { url = "https://files.pythonhosted.org/packages/b5/99/16a5eb2d140087ebd97180d95249b00a03aa87e29cc224056274f2e45fd6/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8485f406a96febb5140bfeca44a73e3ce5116b2501ac54fe953e488fb1d03b12", size = 23041, upload-time = "2025-09-27T18:36:49.797Z" }, + { url = "https://files.pythonhosted.org/packages/19/bc/e7140ed90c5d61d77cea142eed9f9c303f4c4806f60a1044c13e3f1471d0/markupsafe-3.0.3-cp313-cp313-win32.whl", hash = "sha256:bdd37121970bfd8be76c5fb069c7751683bdf373db1ed6c010162b2a130248ed", size = 14543, upload-time = "2025-09-27T18:36:51.584Z" }, + { url = "https://files.pythonhosted.org/packages/05/73/c4abe620b841b6b791f2edc248f556900667a5a1cf023a6646967ae98335/markupsafe-3.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:9a1abfdc021a164803f4d485104931fb8f8c1efd55bc6b748d2f5774e78b62c5", size = 15113, upload-time = "2025-09-27T18:36:52.537Z" }, + { url = "https://files.pythonhosted.org/packages/f0/3a/fa34a0f7cfef23cf9500d68cb7c32dd64ffd58a12b09225fb03dd37d5b80/markupsafe-3.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:7e68f88e5b8799aa49c85cd116c932a1ac15caaa3f5db09087854d218359e485", size = 13911, upload-time = "2025-09-27T18:36:53.513Z" }, + { url = "https://files.pythonhosted.org/packages/e4/d7/e05cd7efe43a88a17a37b3ae96e79a19e846f3f456fe79c57ca61356ef01/markupsafe-3.0.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:218551f6df4868a8d527e3062d0fb968682fe92054e89978594c28e642c43a73", size = 11658, upload-time = "2025-09-27T18:36:54.819Z" }, + { url = "https://files.pythonhosted.org/packages/99/9e/e412117548182ce2148bdeacdda3bb494260c0b0184360fe0d56389b523b/markupsafe-3.0.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3524b778fe5cfb3452a09d31e7b5adefeea8c5be1d43c4f810ba09f2ceb29d37", size = 12066, upload-time = "2025-09-27T18:36:55.714Z" }, + { url = "https://files.pythonhosted.org/packages/bc/e6/fa0ffcda717ef64a5108eaa7b4f5ed28d56122c9a6d70ab8b72f9f715c80/markupsafe-3.0.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4e885a3d1efa2eadc93c894a21770e4bc67899e3543680313b09f139e149ab19", size = 25639, upload-time = "2025-09-27T18:36:56.908Z" }, + { url = "https://files.pythonhosted.org/packages/96/ec/2102e881fe9d25fc16cb4b25d5f5cde50970967ffa5dddafdb771237062d/markupsafe-3.0.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8709b08f4a89aa7586de0aadc8da56180242ee0ada3999749b183aa23df95025", size = 23569, upload-time = "2025-09-27T18:36:57.913Z" }, + { url = "https://files.pythonhosted.org/packages/4b/30/6f2fce1f1f205fc9323255b216ca8a235b15860c34b6798f810f05828e32/markupsafe-3.0.3-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b8512a91625c9b3da6f127803b166b629725e68af71f8184ae7e7d54686a56d6", size = 23284, upload-time = "2025-09-27T18:36:58.833Z" }, + { url = "https://files.pythonhosted.org/packages/58/47/4a0ccea4ab9f5dcb6f79c0236d954acb382202721e704223a8aafa38b5c8/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9b79b7a16f7fedff2495d684f2b59b0457c3b493778c9eed31111be64d58279f", size = 24801, upload-time = "2025-09-27T18:36:59.739Z" }, + { url = "https://files.pythonhosted.org/packages/6a/70/3780e9b72180b6fecb83a4814d84c3bf4b4ae4bf0b19c27196104149734c/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:12c63dfb4a98206f045aa9563db46507995f7ef6d83b2f68eda65c307c6829eb", size = 22769, upload-time = "2025-09-27T18:37:00.719Z" }, + { url = "https://files.pythonhosted.org/packages/98/c5/c03c7f4125180fc215220c035beac6b9cb684bc7a067c84fc69414d315f5/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8f71bc33915be5186016f675cd83a1e08523649b0e33efdb898db577ef5bb009", size = 23642, upload-time = "2025-09-27T18:37:01.673Z" }, + { url = "https://files.pythonhosted.org/packages/80/d6/2d1b89f6ca4bff1036499b1e29a1d02d282259f3681540e16563f27ebc23/markupsafe-3.0.3-cp313-cp313t-win32.whl", hash = "sha256:69c0b73548bc525c8cb9a251cddf1931d1db4d2258e9599c28c07ef3580ef354", size = 14612, upload-time = "2025-09-27T18:37:02.639Z" }, + { url = "https://files.pythonhosted.org/packages/2b/98/e48a4bfba0a0ffcf9925fe2d69240bfaa19c6f7507b8cd09c70684a53c1e/markupsafe-3.0.3-cp313-cp313t-win_amd64.whl", hash = "sha256:1b4b79e8ebf6b55351f0d91fe80f893b4743f104bff22e90697db1590e47a218", size = 15200, upload-time = "2025-09-27T18:37:03.582Z" }, + { url = "https://files.pythonhosted.org/packages/0e/72/e3cc540f351f316e9ed0f092757459afbc595824ca724cbc5a5d4263713f/markupsafe-3.0.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ad2cf8aa28b8c020ab2fc8287b0f823d0a7d8630784c31e9ee5edea20f406287", size = 13973, upload-time = "2025-09-27T18:37:04.929Z" }, + { url = "https://files.pythonhosted.org/packages/33/8a/8e42d4838cd89b7dde187011e97fe6c3af66d8c044997d2183fbd6d31352/markupsafe-3.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:eaa9599de571d72e2daf60164784109f19978b327a3910d3e9de8c97b5b70cfe", size = 11619, upload-time = "2025-09-27T18:37:06.342Z" }, + { url = "https://files.pythonhosted.org/packages/b5/64/7660f8a4a8e53c924d0fa05dc3a55c9cee10bbd82b11c5afb27d44b096ce/markupsafe-3.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c47a551199eb8eb2121d4f0f15ae0f923d31350ab9280078d1e5f12b249e0026", size = 12029, upload-time = "2025-09-27T18:37:07.213Z" }, + { url = "https://files.pythonhosted.org/packages/da/ef/e648bfd021127bef5fa12e1720ffed0c6cbb8310c8d9bea7266337ff06de/markupsafe-3.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f34c41761022dd093b4b6896d4810782ffbabe30f2d443ff5f083e0cbbb8c737", size = 24408, upload-time = "2025-09-27T18:37:09.572Z" }, + { url = "https://files.pythonhosted.org/packages/41/3c/a36c2450754618e62008bf7435ccb0f88053e07592e6028a34776213d877/markupsafe-3.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:457a69a9577064c05a97c41f4e65148652db078a3a509039e64d3467b9e7ef97", size = 23005, upload-time = "2025-09-27T18:37:10.58Z" }, + { url = "https://files.pythonhosted.org/packages/bc/20/b7fdf89a8456b099837cd1dc21974632a02a999ec9bf7ca3e490aacd98e7/markupsafe-3.0.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e8afc3f2ccfa24215f8cb28dcf43f0113ac3c37c2f0f0806d8c70e4228c5cf4d", size = 22048, upload-time = "2025-09-27T18:37:11.547Z" }, + { url = "https://files.pythonhosted.org/packages/9a/a7/591f592afdc734f47db08a75793a55d7fbcc6902a723ae4cfbab61010cc5/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ec15a59cf5af7be74194f7ab02d0f59a62bdcf1a537677ce67a2537c9b87fcda", size = 23821, upload-time = "2025-09-27T18:37:12.48Z" }, + { url = "https://files.pythonhosted.org/packages/7d/33/45b24e4f44195b26521bc6f1a82197118f74df348556594bd2262bda1038/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:0eb9ff8191e8498cca014656ae6b8d61f39da5f95b488805da4bb029cccbfbaf", size = 21606, upload-time = "2025-09-27T18:37:13.485Z" }, + { url = "https://files.pythonhosted.org/packages/ff/0e/53dfaca23a69fbfbbf17a4b64072090e70717344c52eaaaa9c5ddff1e5f0/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2713baf880df847f2bece4230d4d094280f4e67b1e813eec43b4c0e144a34ffe", size = 23043, upload-time = "2025-09-27T18:37:14.408Z" }, + { url = "https://files.pythonhosted.org/packages/46/11/f333a06fc16236d5238bfe74daccbca41459dcd8d1fa952e8fbd5dccfb70/markupsafe-3.0.3-cp314-cp314-win32.whl", hash = "sha256:729586769a26dbceff69f7a7dbbf59ab6572b99d94576a5592625d5b411576b9", size = 14747, upload-time = "2025-09-27T18:37:15.36Z" }, + { url = "https://files.pythonhosted.org/packages/28/52/182836104b33b444e400b14f797212f720cbc9ed6ba34c800639d154e821/markupsafe-3.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:bdc919ead48f234740ad807933cdf545180bfbe9342c2bb451556db2ed958581", size = 15341, upload-time = "2025-09-27T18:37:16.496Z" }, + { url = "https://files.pythonhosted.org/packages/6f/18/acf23e91bd94fd7b3031558b1f013adfa21a8e407a3fdb32745538730382/markupsafe-3.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:5a7d5dc5140555cf21a6fefbdbf8723f06fcd2f63ef108f2854de715e4422cb4", size = 14073, upload-time = "2025-09-27T18:37:17.476Z" }, + { url = "https://files.pythonhosted.org/packages/3c/f0/57689aa4076e1b43b15fdfa646b04653969d50cf30c32a102762be2485da/markupsafe-3.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:1353ef0c1b138e1907ae78e2f6c63ff67501122006b0f9abad68fda5f4ffc6ab", size = 11661, upload-time = "2025-09-27T18:37:18.453Z" }, + { url = "https://files.pythonhosted.org/packages/89/c3/2e67a7ca217c6912985ec766c6393b636fb0c2344443ff9d91404dc4c79f/markupsafe-3.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1085e7fbddd3be5f89cc898938f42c0b3c711fdcb37d75221de2666af647c175", size = 12069, upload-time = "2025-09-27T18:37:19.332Z" }, + { url = "https://files.pythonhosted.org/packages/f0/00/be561dce4e6ca66b15276e184ce4b8aec61fe83662cce2f7d72bd3249d28/markupsafe-3.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1b52b4fb9df4eb9ae465f8d0c228a00624de2334f216f178a995ccdcf82c4634", size = 25670, upload-time = "2025-09-27T18:37:20.245Z" }, + { url = "https://files.pythonhosted.org/packages/50/09/c419f6f5a92e5fadde27efd190eca90f05e1261b10dbd8cbcb39cd8ea1dc/markupsafe-3.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fed51ac40f757d41b7c48425901843666a6677e3e8eb0abcff09e4ba6e664f50", size = 23598, upload-time = "2025-09-27T18:37:21.177Z" }, + { url = "https://files.pythonhosted.org/packages/22/44/a0681611106e0b2921b3033fc19bc53323e0b50bc70cffdd19f7d679bb66/markupsafe-3.0.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f190daf01f13c72eac4efd5c430a8de82489d9cff23c364c3ea822545032993e", size = 23261, upload-time = "2025-09-27T18:37:22.167Z" }, + { url = "https://files.pythonhosted.org/packages/5f/57/1b0b3f100259dc9fffe780cfb60d4be71375510e435efec3d116b6436d43/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e56b7d45a839a697b5eb268c82a71bd8c7f6c94d6fd50c3d577fa39a9f1409f5", size = 24835, upload-time = "2025-09-27T18:37:23.296Z" }, + { url = "https://files.pythonhosted.org/packages/26/6a/4bf6d0c97c4920f1597cc14dd720705eca0bf7c787aebc6bb4d1bead5388/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:f3e98bb3798ead92273dc0e5fd0f31ade220f59a266ffd8a4f6065e0a3ce0523", size = 22733, upload-time = "2025-09-27T18:37:24.237Z" }, + { url = "https://files.pythonhosted.org/packages/14/c7/ca723101509b518797fedc2fdf79ba57f886b4aca8a7d31857ba3ee8281f/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5678211cb9333a6468fb8d8be0305520aa073f50d17f089b5b4b477ea6e67fdc", size = 23672, upload-time = "2025-09-27T18:37:25.271Z" }, + { url = "https://files.pythonhosted.org/packages/fb/df/5bd7a48c256faecd1d36edc13133e51397e41b73bb77e1a69deab746ebac/markupsafe-3.0.3-cp314-cp314t-win32.whl", hash = "sha256:915c04ba3851909ce68ccc2b8e2cd691618c4dc4c4232fb7982bca3f41fd8c3d", size = 14819, upload-time = "2025-09-27T18:37:26.285Z" }, + { url = "https://files.pythonhosted.org/packages/1a/8a/0402ba61a2f16038b48b39bccca271134be00c5c9f0f623208399333c448/markupsafe-3.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4faffd047e07c38848ce017e8725090413cd80cbc23d86e55c587bf979e579c9", size = 15426, upload-time = "2025-09-27T18:37:27.316Z" }, + { url = "https://files.pythonhosted.org/packages/70/bc/6f1c2f612465f5fa89b95bead1f44dcb607670fd42891d8fdcd5d039f4f4/markupsafe-3.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:32001d6a8fc98c8cb5c947787c5d08b0a50663d139f1305bac5885d98d9b40fa", size = 14146, upload-time = "2025-09-27T18:37:28.327Z" }, +] + [[package]] name = "matplotlib-inline" version = "0.2.1" @@ -1165,6 +1371,24 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" }, ] +[[package]] +name = "more-itertools" +version = "11.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/f7/139d22fef48ac78127d18e01d80cf1be40236ae489769d17f35c3d425293/more_itertools-11.0.2.tar.gz", hash = "sha256:392a9e1e362cbc106a2457d37cabf9b36e5e12efd4ebff1654630e76597df804", size = 144659, upload-time = "2026-04-09T15:01:33.297Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/98/6af411189d9413534c3eb691182bff1f5c6d44ed2f93f2edfe52a1bbceb8/more_itertools-11.0.2-py3-none-any.whl", hash = "sha256:6e35b35f818b01f691643c6c611bc0902f2e92b46c18fffa77ae1e7c46e912e4", size = 71939, upload-time = "2026-04-09T15:01:32.21Z" }, +] + +[[package]] +name = "mypy-extensions" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/6e/371856a3fb9d31ca8dac321cda606860fa4548858c0cc45d9d1d4ca2628b/mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558", size = 6343, upload-time = "2025-04-22T14:54:24.164Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963, upload-time = "2025-04-22T14:54:22.983Z" }, +] + [[package]] name = "nest-asyncio" version = "1.6.0" @@ -1483,6 +1707,7 @@ cli = [ { name = "questionary" }, ] dev = [ + { name = "datamodel-code-generator", extra = ["ruff"] }, { name = "duckdb" }, { name = "nest-asyncio" }, { name = "pandas" }, @@ -1535,6 +1760,7 @@ dev = [ [package.metadata] requires-dist = [ { name = "click", specifier = ">=8.1.0" }, + { name = "datamodel-code-generator", extras = ["ruff"], marker = "extra == 'dev'", specifier = ">=0.26.0" }, { name = "duckdb", marker = "extra == 'duckdb'", specifier = ">=1.0.0" }, { name = "httpx", specifier = ">=0.25.0" }, { name = "nest-asyncio", marker = "extra == 'duckdb'", specifier = ">=1.6.0" }, @@ -1581,6 +1807,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b6/61/fae042894f4296ec49e3f193aff5d7c18440da9e48102c3315e1bc4519a7/parso-0.8.6-py2.py3-none-any.whl", hash = "sha256:2c549f800b70a5c4952197248825584cb00f033b29c692671d3bf08bf380baff", size = 106894, upload-time = "2026-02-09T15:45:21.391Z" }, ] +[[package]] +name = "pathspec" +version = "1.1.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5a/82/42f767fc1c1143d6fd36efb827202a2d997a375e160a71eb2888a925aac1/pathspec-1.1.1.tar.gz", hash = "sha256:17db5ecd524104a120e173814c90367a96a98d07c45b2e10c2f3919fff91bf5a", size = 135180, upload-time = "2026-04-27T01:46:08.907Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f1/d9/7fb5aa316bc299258e68c73ba3bddbc499654a07f151cba08f6153988714/pathspec-1.1.1-py3-none-any.whl", hash = "sha256:a00ce642f577bf7f473932318056212bc4f8bfdf53128c78bbd5af0b9b20b189", size = 57328, upload-time = "2026-04-27T01:46:07.06Z" }, +] + [[package]] name = "pefile" version = "2024.8.26" @@ -2147,6 +2382,45 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/0b/d7/1959b9648791274998a9c3526f6d0ec8fd2233e4d4acce81bbae76b44b2a/python_dotenv-1.2.2-py3-none-any.whl", hash = "sha256:1d8214789a24de455a8b8bd8ae6fe3c6b69a5e3d64aa8a8e5d68e694bbcb285a", size = 22101, upload-time = "2026-03-01T16:00:25.09Z" }, ] +[[package]] +name = "pytokens" +version = "0.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b6/34/b4e015b99031667a7b960f888889c5bd34ef585c85e1cb56a594b92836ac/pytokens-0.4.1.tar.gz", hash = "sha256:292052fe80923aae2260c073f822ceba21f3872ced9a68bb7953b348e561179a", size = 23015, upload-time = "2026-01-30T01:03:45.924Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/42/24/f206113e05cb8ef51b3850e7ef88f20da6f4bf932190ceb48bd3da103e10/pytokens-0.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2a44ed93ea23415c54f3face3b65ef2b844d96aeb3455b8a69b3df6beab6acc5", size = 161522, upload-time = "2026-01-30T01:02:50.393Z" }, + { url = "https://files.pythonhosted.org/packages/d4/e9/06a6bf1b90c2ed81a9c7d2544232fe5d2891d1cd480e8a1809ca354a8eb2/pytokens-0.4.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:add8bf86b71a5d9fb5b89f023a80b791e04fba57960aa790cc6125f7f1d39dfe", size = 246945, upload-time = "2026-01-30T01:02:52.399Z" }, + { url = "https://files.pythonhosted.org/packages/69/66/f6fb1007a4c3d8b682d5d65b7c1fb33257587a5f782647091e3408abe0b8/pytokens-0.4.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:670d286910b531c7b7e3c0b453fd8156f250adb140146d234a82219459b9640c", size = 259525, upload-time = "2026-01-30T01:02:53.737Z" }, + { url = "https://files.pythonhosted.org/packages/04/92/086f89b4d622a18418bac74ab5db7f68cf0c21cf7cc92de6c7b919d76c88/pytokens-0.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:4e691d7f5186bd2842c14813f79f8884bb03f5995f0575272009982c5ac6c0f7", size = 262693, upload-time = "2026-01-30T01:02:54.871Z" }, + { url = "https://files.pythonhosted.org/packages/b4/7b/8b31c347cf94a3f900bdde750b2e9131575a61fdb620d3d3c75832262137/pytokens-0.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:27b83ad28825978742beef057bfe406ad6ed524b2d28c252c5de7b4a6dd48fa2", size = 103567, upload-time = "2026-01-30T01:02:56.414Z" }, + { url = "https://files.pythonhosted.org/packages/3d/92/790ebe03f07b57e53b10884c329b9a1a308648fc083a6d4a39a10a28c8fc/pytokens-0.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d70e77c55ae8380c91c0c18dea05951482e263982911fc7410b1ffd1dadd3440", size = 160864, upload-time = "2026-01-30T01:02:57.882Z" }, + { url = "https://files.pythonhosted.org/packages/13/25/a4f555281d975bfdd1eba731450e2fe3a95870274da73fb12c40aeae7625/pytokens-0.4.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4a58d057208cb9075c144950d789511220b07636dd2e4708d5645d24de666bdc", size = 248565, upload-time = "2026-01-30T01:02:59.912Z" }, + { url = "https://files.pythonhosted.org/packages/17/50/bc0394b4ad5b1601be22fa43652173d47e4c9efbf0044c62e9a59b747c56/pytokens-0.4.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b49750419d300e2b5a3813cf229d4e5a4c728dae470bcc89867a9ad6f25a722d", size = 260824, upload-time = "2026-01-30T01:03:01.471Z" }, + { url = "https://files.pythonhosted.org/packages/4e/54/3e04f9d92a4be4fc6c80016bc396b923d2a6933ae94b5f557c939c460ee0/pytokens-0.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d9907d61f15bf7261d7e775bd5d7ee4d2930e04424bab1972591918497623a16", size = 264075, upload-time = "2026-01-30T01:03:04.143Z" }, + { url = "https://files.pythonhosted.org/packages/d1/1b/44b0326cb5470a4375f37988aea5d61b5cc52407143303015ebee94abfd6/pytokens-0.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:ee44d0f85b803321710f9239f335aafe16553b39106384cef8e6de40cb4ef2f6", size = 103323, upload-time = "2026-01-30T01:03:05.412Z" }, + { url = "https://files.pythonhosted.org/packages/41/5d/e44573011401fb82e9d51e97f1290ceb377800fb4eed650b96f4753b499c/pytokens-0.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:140709331e846b728475786df8aeb27d24f48cbcf7bcd449f8de75cae7a45083", size = 160663, upload-time = "2026-01-30T01:03:06.473Z" }, + { url = "https://files.pythonhosted.org/packages/f0/e6/5bbc3019f8e6f21d09c41f8b8654536117e5e211a85d89212d59cbdab381/pytokens-0.4.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6d6c4268598f762bc8e91f5dbf2ab2f61f7b95bdc07953b602db879b3c8c18e1", size = 255626, upload-time = "2026-01-30T01:03:08.177Z" }, + { url = "https://files.pythonhosted.org/packages/bf/3c/2d5297d82286f6f3d92770289fd439956b201c0a4fc7e72efb9b2293758e/pytokens-0.4.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:24afde1f53d95348b5a0eb19488661147285ca4dd7ed752bbc3e1c6242a304d1", size = 269779, upload-time = "2026-01-30T01:03:09.756Z" }, + { url = "https://files.pythonhosted.org/packages/20/01/7436e9ad693cebda0551203e0bf28f7669976c60ad07d6402098208476de/pytokens-0.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5ad948d085ed6c16413eb5fec6b3e02fa00dc29a2534f088d3302c47eb59adf9", size = 268076, upload-time = "2026-01-30T01:03:10.957Z" }, + { url = "https://files.pythonhosted.org/packages/2e/df/533c82a3c752ba13ae7ef238b7f8cdd272cf1475f03c63ac6cf3fcfb00b6/pytokens-0.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:3f901fe783e06e48e8cbdc82d631fca8f118333798193e026a50ce1b3757ea68", size = 103552, upload-time = "2026-01-30T01:03:12.066Z" }, + { url = "https://files.pythonhosted.org/packages/cb/dc/08b1a080372afda3cceb4f3c0a7ba2bde9d6a5241f1edb02a22a019ee147/pytokens-0.4.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8bdb9d0ce90cbf99c525e75a2fa415144fd570a1ba987380190e8b786bc6ef9b", size = 160720, upload-time = "2026-01-30T01:03:13.843Z" }, + { url = "https://files.pythonhosted.org/packages/64/0c/41ea22205da480837a700e395507e6a24425151dfb7ead73343d6e2d7ffe/pytokens-0.4.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5502408cab1cb18e128570f8d598981c68a50d0cbd7c61312a90507cd3a1276f", size = 254204, upload-time = "2026-01-30T01:03:14.886Z" }, + { url = "https://files.pythonhosted.org/packages/e0/d2/afe5c7f8607018beb99971489dbb846508f1b8f351fcefc225fcf4b2adc0/pytokens-0.4.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:29d1d8fb1030af4d231789959f21821ab6325e463f0503a61d204343c9b355d1", size = 268423, upload-time = "2026-01-30T01:03:15.936Z" }, + { url = "https://files.pythonhosted.org/packages/68/d4/00ffdbd370410c04e9591da9220a68dc1693ef7499173eb3e30d06e05ed1/pytokens-0.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:970b08dd6b86058b6dc07efe9e98414f5102974716232d10f32ff39701e841c4", size = 266859, upload-time = "2026-01-30T01:03:17.458Z" }, + { url = "https://files.pythonhosted.org/packages/a7/c9/c3161313b4ca0c601eeefabd3d3b576edaa9afdefd32da97210700e47652/pytokens-0.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:9bd7d7f544d362576be74f9d5901a22f317efc20046efe2034dced238cbbfe78", size = 103520, upload-time = "2026-01-30T01:03:18.652Z" }, + { url = "https://files.pythonhosted.org/packages/8f/a7/b470f672e6fc5fee0a01d9e75005a0e617e162381974213a945fcd274843/pytokens-0.4.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4a14d5f5fc78ce85e426aa159489e2d5961acf0e47575e08f35584009178e321", size = 160821, upload-time = "2026-01-30T01:03:19.684Z" }, + { url = "https://files.pythonhosted.org/packages/80/98/e83a36fe8d170c911f864bfded690d2542bfcfacb9c649d11a9e6eb9dc41/pytokens-0.4.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:97f50fd18543be72da51dd505e2ed20d2228c74e0464e4262e4899797803d7fa", size = 254263, upload-time = "2026-01-30T01:03:20.834Z" }, + { url = "https://files.pythonhosted.org/packages/0f/95/70d7041273890f9f97a24234c00b746e8da86df462620194cef1d411ddeb/pytokens-0.4.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dc74c035f9bfca0255c1af77ddd2d6ae8419012805453e4b0e7513e17904545d", size = 268071, upload-time = "2026-01-30T01:03:21.888Z" }, + { url = "https://files.pythonhosted.org/packages/da/79/76e6d09ae19c99404656d7db9c35dfd20f2086f3eb6ecb496b5b31163bad/pytokens-0.4.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:f66a6bbe741bd431f6d741e617e0f39ec7257ca1f89089593479347cc4d13324", size = 271716, upload-time = "2026-01-30T01:03:23.633Z" }, + { url = "https://files.pythonhosted.org/packages/79/37/482e55fa1602e0a7ff012661d8c946bafdc05e480ea5a32f4f7e336d4aa9/pytokens-0.4.1-cp314-cp314-win_amd64.whl", hash = "sha256:b35d7e5ad269804f6697727702da3c517bb8a5228afa450ab0fa787732055fc9", size = 104539, upload-time = "2026-01-30T01:03:24.788Z" }, + { url = "https://files.pythonhosted.org/packages/30/e8/20e7db907c23f3d63b0be3b8a4fd1927f6da2395f5bcc7f72242bb963dfe/pytokens-0.4.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:8fcb9ba3709ff77e77f1c7022ff11d13553f3c30299a9fe246a166903e9091eb", size = 168474, upload-time = "2026-01-30T01:03:26.428Z" }, + { url = "https://files.pythonhosted.org/packages/d6/81/88a95ee9fafdd8f5f3452107748fd04c24930d500b9aba9738f3ade642cc/pytokens-0.4.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:79fc6b8699564e1f9b521582c35435f1bd32dd06822322ec44afdeba666d8cb3", size = 290473, upload-time = "2026-01-30T01:03:27.415Z" }, + { url = "https://files.pythonhosted.org/packages/cf/35/3aa899645e29b6375b4aed9f8d21df219e7c958c4c186b465e42ee0a06bf/pytokens-0.4.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d31b97b3de0f61571a124a00ffe9a81fb9939146c122c11060725bd5aea79975", size = 303485, upload-time = "2026-01-30T01:03:28.558Z" }, + { url = "https://files.pythonhosted.org/packages/52/a0/07907b6ff512674d9b201859f7d212298c44933633c946703a20c25e9d81/pytokens-0.4.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:967cf6e3fd4adf7de8fc73cd3043754ae79c36475c1c11d514fc72cf5490094a", size = 306698, upload-time = "2026-01-30T01:03:29.653Z" }, + { url = "https://files.pythonhosted.org/packages/39/2a/cbbf9250020a4a8dd53ba83a46c097b69e5eb49dd14e708f496f548c6612/pytokens-0.4.1-cp314-cp314t-win_amd64.whl", hash = "sha256:584c80c24b078eec1e227079d56dc22ff755e0ba8654d8383b2c549107528918", size = 116287, upload-time = "2026-01-30T01:03:30.912Z" }, + { url = "https://files.pythonhosted.org/packages/c6/78/397db326746f0a342855b81216ae1f0a32965deccfd7c830a2dbc66d2483/pytokens-0.4.1-py3-none-any.whl", hash = "sha256:26cef14744a8385f35d0e095dc8b3a7583f6c953c2e3d269c7f82484bf5ad2de", size = 13729, upload-time = "2026-01-30T01:03:45.029Z" }, +] + [[package]] name = "pytz" version = "2026.1.post1" @@ -2719,6 +2993,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ab/62/7fb948aace38d2f6329261bb33c035a8484549c74f1db28649c7a4c6fed9/ty-0.0.33-py3-none-win_arm64.whl", hash = "sha256:0d44f99ba1b441e55e2aa301b2ac0a21112784931b46a5f66f4ea9efe5620d97", size = 10742673, upload-time = "2026-04-28T10:45:35.555Z" }, ] +[[package]] +name = "typeguard" +version = "4.5.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2b/e8/66e25efcc18542d58706ce4e50415710593721aae26e794ab1dec34fb66f/typeguard-4.5.1.tar.gz", hash = "sha256:f6f8ecbbc819c9bc749983cc67c02391e16a9b43b8b27f15dc70ed7c4a007274", size = 80121, upload-time = "2026-02-19T16:09:03.392Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/91/88/b55b3117287a8540b76dbdd87733808d4d01c8067a3b339408c250bb3600/typeguard-4.5.1-py3-none-any.whl", hash = "sha256:44d2bf329d49a244110a090b55f5f91aa82d9a9834ebfd30bcc73651e4a8cc40", size = 36745, upload-time = "2026-02-19T16:09:01.6Z" }, +] + [[package]] name = "typing-extensions" version = "4.15.0"