From a929d825d73a3e5ea74eff35721e8d19d4c09e67 Mon Sep 17 00:00:00 2001 From: JSONbored <49853598+JSONbored@users.noreply.github.com> Date: Sat, 9 May 2026 17:58:22 -0700 Subject: [PATCH] fix(security): harden central fleet checks --- .github/workflows/control-plane.yml | 2 - src/aio_fleet/cli.py | 27 ++++-- src/aio_fleet/control_plane.py | 37 ++++++-- src/aio_fleet/github_policy.py | 14 ++- src/aio_fleet/safety.py | 13 +++ src/aio_fleet/upstream.py | 136 +++++++++++++++++++++++++-- src/aio_fleet/validators.py | 107 ++++++++++++++++++--- tests/test_cli.py | 22 +++-- tests/test_control_plane.py | 17 +++- tests/test_control_plane_workflow.py | 3 +- tests/test_upstream.py | 70 ++++++++++++++ tests/test_validators.py | 132 ++++++++++++++++++++++++++ 12 files changed, 529 insertions(+), 51 deletions(-) diff --git a/.github/workflows/control-plane.yml b/.github/workflows/control-plane.yml index d339c80..ffba497 100644 --- a/.github/workflows/control-plane.yml +++ b/.github/workflows/control-plane.yml @@ -537,8 +537,6 @@ jobs: ) if [[ "${TARGET_PUBLISH}" == "true" ]]; then args+=(--publish) - else - args+=(--no-integration) fi python -m aio_fleet "${args[@]}" 2>&1 | tee central-control-check.log diff --git a/src/aio_fleet/cli.py b/src/aio_fleet/cli.py index f7df17d..9f314f8 100644 --- a/src/aio_fleet/cli.py +++ b/src/aio_fleet/cli.py @@ -72,6 +72,7 @@ catalog_repo_failures, derived_repo_failures, pinned_action_failures, + repo_local_workflow_failures, repo_policy_failures, template_metadata_failures, tracked_artifact_failures, @@ -536,6 +537,18 @@ def cmd_validate_actions(args: argparse.Namespace) -> int: return 0 +def cmd_verify_caller(args: argparse.Namespace) -> int: + manifest = load_manifest(Path(args.manifest)) + repo = _repo_for_identifier(manifest, args.repo) + repo_at_path = _repo_with_path(repo, Path(args.repo_path).resolve()) + failures = repo_local_workflow_failures(repo_at_path) + if failures: + print("\n".join(failures), file=sys.stderr) + return 1 + print(f"{repo.name} caller policy checks passed") + return 0 + + def cmd_validate_derived(args: argparse.Namespace) -> int: failures = derived_repo_failures( Path(args.repo_path).resolve(), @@ -1090,14 +1103,7 @@ def cmd_registry_publish(args: argparse.Namespace) -> int: if args.dry_run: print(" ".join(shlex.quote(part) for part in command)) return 0 - tags = compute_registry_tags(repo, sha=sha, component=args.component) - preflight_failures = verify_registry_tags(tags.all_tags) - if not preflight_failures: - print(f"{repo.name}:{args.component}: registry=already-current", flush=True) - return 0 - print(f"{repo.name}:{args.component}: registry=preflight-missing", flush=True) - for failure in preflight_failures: - print(f"{repo.name}:{args.component}: preflight: {failure}", file=sys.stderr) + print(f"{repo.name}:{args.component}: registry=publishing", flush=True) try: with _registry_publish_environment(repo) as publish_env: result = _run(command, cwd=repo.path, env=publish_env) @@ -2660,6 +2666,11 @@ def build_parser() -> argparse.ArgumentParser: actions.add_argument("--repo-path", default=".") actions.set_defaults(func=cmd_validate_actions) + caller = sub.add_parser("verify-caller") + caller.add_argument("--repo", required=True) + caller.add_argument("--repo-path", required=True) + caller.set_defaults(func=cmd_verify_caller) + derived = sub.add_parser("validate-derived") derived.add_argument("--repo-path", default=".") derived.add_argument("--strict-placeholders", action="store_true") diff --git a/src/aio_fleet/control_plane.py b/src/aio_fleet/control_plane.py index b306d0f..1761cbf 100644 --- a/src/aio_fleet/control_plane.py +++ b/src/aio_fleet/control_plane.py @@ -68,23 +68,40 @@ def central_check_steps( include_integration: bool = True, ) -> list[Step]: manifest_args = ["--manifest", str(manifest_path)] if manifest_path else [] + trusted_cwd = _trusted_aio_root() steps = [ Step( - "validate-template-common", + "validate-repo", [ sys.executable, "-m", "aio_fleet.cli", *manifest_args, - "validate-template-common", + "validate-repo", "--repo", repo.name, "--repo-path", str(repo.path), ], - repo.path, + trusted_cwd, inherit_secrets=False, - ) + ), + Step( + "verify-caller", + [ + sys.executable, + "-m", + "aio_fleet.cli", + *manifest_args, + "verify-caller", + "--repo", + repo.name, + "--repo-path", + str(repo.path), + ], + trusted_cwd, + inherit_secrets=False, + ), ] install = _install_test_dependencies_step(repo.path) if install is not None: @@ -113,7 +130,7 @@ def central_check_steps( prebuilt_integration_image = False if ( include_integration - and event in {"push", "release", "workflow_dispatch"} + and event in {"pull_request", "push", "release", "workflow_dispatch"} and integration_args ): if publish: @@ -157,7 +174,7 @@ def central_check_steps( str(repo.path), "--no-fix", ], - repo.path, + trusted_cwd, inherit_secrets=False, ) ) @@ -186,7 +203,7 @@ def central_check_steps( "--component", component, ], - repo.path, + trusted_cwd, stream_output=True, timeout_seconds=_repo_timeout_seconds( repo, "registry_publish_timeout_seconds", default=3600 @@ -196,6 +213,10 @@ def central_check_steps( return steps +def _trusted_aio_root() -> Path: + return Path(__file__).resolve().parents[2] + + def run_steps(steps: list[Step], *, dry_run: bool = False) -> list[str]: failures: list[str] = [] for step in steps: @@ -443,7 +464,7 @@ def _repo_python(repo_path: Path) -> str: def _install_test_dependencies_step(repo_path: Path) -> Step | None: if (repo_path / "tests").exists(): - aio_root = Path(__file__).resolve().parents[2] + aio_root = _trusted_aio_root() return Step( "install-test-deps", [ diff --git a/src/aio_fleet/github_policy.py b/src/aio_fleet/github_policy.py index 3badf57..888fea7 100644 --- a/src/aio_fleet/github_policy.py +++ b/src/aio_fleet/github_policy.py @@ -1,6 +1,7 @@ from __future__ import annotations import json +import os import shutil import subprocess # nosec B404 from pathlib import Path @@ -203,8 +204,9 @@ def _gh_json(args: list[str]) -> Any: gh = shutil.which("gh") if gh is None: raise RuntimeError("gh CLI is required for GitHub policy validation") + env = _gh_env() result = subprocess.run( # nosec B603 - [gh, *args], check=False, text=True, capture_output=True + [gh, *args], check=False, text=True, capture_output=True, env=env ) if result.returncode != 0: raise RuntimeError(result.stderr.strip() or f"gh {' '.join(args)} failed") @@ -212,6 +214,16 @@ def _gh_json(args: list[str]) -> Any: return json.loads(text) if text else None +def _gh_env() -> dict[str, str]: + env = dict(os.environ) + if not env.get("GH_TOKEN"): + for key in ("AIO_FLEET_WORKFLOW_TOKEN", "AIO_FLEET_CHECK_TOKEN", "APP_TOKEN"): + if env.get(key): + env["GH_TOKEN"] = env[key] + break + return env + + def _deep_merge(base: dict[str, Any], override: dict[str, Any]) -> dict[str, Any]: merged = dict(base) for key, value in override.items(): diff --git a/src/aio_fleet/safety.py b/src/aio_fleet/safety.py index 3a2f707..13a8fe9 100644 --- a/src/aio_fleet/safety.py +++ b/src/aio_fleet/safety.py @@ -2,6 +2,7 @@ import base64 import json +import os import re import subprocess # nosec B404 import urllib.error @@ -627,11 +628,13 @@ def _confidence(level: str, signals: list[str], warnings: list[str]) -> float: def _gh_json(args: list[str], *, check: bool = True) -> Any: + env = _gh_env() result = subprocess.run( # nosec ["gh", *args], text=True, capture_output=True, check=False, + env=env, ) if result.returncode != 0: if check: @@ -639,3 +642,13 @@ def _gh_json(args: list[str], *, check: bool = True) -> Any: return None text = result.stdout.strip() return json.loads(text) if text else None + + +def _gh_env() -> dict[str, str]: + env = dict(os.environ) + if not env.get("GH_TOKEN"): + for key in ("AIO_FLEET_WORKFLOW_TOKEN", "AIO_FLEET_CHECK_TOKEN", "APP_TOKEN"): + if env.get(key): + env["GH_TOKEN"] = env[key] + break + return env diff --git a/src/aio_fleet/upstream.py b/src/aio_fleet/upstream.py index 8bf7b98..53346cd 100644 --- a/src/aio_fleet/upstream.py +++ b/src/aio_fleet/upstream.py @@ -9,7 +9,7 @@ import urllib.error import urllib.parse import urllib.request -from dataclasses import dataclass +from dataclasses import dataclass, replace from functools import lru_cache from pathlib import Path from typing import Any @@ -78,17 +78,139 @@ def monitor_repo( *, write: bool = False, ) -> list[UpstreamMonitorResult]: - results: list[UpstreamMonitorResult] = [] - for config in monitor_configs(repo): - result = evaluate_monitor(repo, config) - results.append(result) - if write and result.updates_available and result.strategy == "pr": + configs = monitor_configs(repo) + results = [evaluate_monitor(repo, config) for config in configs] + results = _align_shared_release_digest_groups(configs, results) + if write: + _write_monitor_results(repo, configs, results) + return results + + +def _write_monitor_results( + repo: RepoConfig, + configs: list[dict[str, Any]], + results: list[UpstreamMonitorResult], +) -> None: + for config, result in zip(configs, results, strict=True): + if result.updates_available and result.strategy == "pr": write_arg(result.dockerfile, result.version_key, result.latest_version) if result.digest_key and result.latest_digest: write_arg(result.dockerfile, result.digest_key, result.latest_digest) if result.version_update: update_submodule(repo, config, result) - return results + + +def _align_shared_release_digest_groups( + configs: list[dict[str, Any]], + results: list[UpstreamMonitorResult], +) -> list[UpstreamMonitorResult]: + grouped: dict[tuple[Path, str, str, bool, str], list[int]] = {} + for index, (config, result) in enumerate(zip(configs, results, strict=True)): + if ( + config.get("source") != "github-releases" + or not result.digest_key + or not str(config.get("image", "")).strip() + or not str(config.get("digest_source", "")).strip() + or not result.version_key + ): + continue + key = ( + result.dockerfile.resolve(), + result.version_key, + str(config.get("repo", "")), + bool(config.get("stable_only", True)), + str(config.get("version_strip_prefix", "")), + ) + grouped.setdefault(key, []).append(index) + + aligned = list(results) + for ( + _dockerfile, + _version_key, + upstream_repo, + stable_only, + strip_prefix, + ), indexes in grouped.items(): + if len(indexes) < 2: + continue + candidates, skipped = github_release_candidates_result( + upstream_repo, + stable_only=stable_only, + strip_prefix=strip_prefix, + ) + current_version = aligned[indexes[0]].current_version + current_key = ( + version_sort_key(current_version) + if SEMVER_RE.match(current_version) + else None + ) + missing: list[dict[str, str]] = [] + selected_version = current_version + selected_digests = {index: aligned[index].current_digest for index in indexes} + selected_tag = candidates[0].tag + + for candidate in candidates: + selected_tag = candidate.tag + if ( + current_key is not None + and version_sort_key(candidate.version) <= current_key + ): + if candidate.version != current_version: + missing.append( + { + "version": candidate.version, + "reason": "not-newer-than-current", + } + ) + break + candidate_digests: dict[int, str] = {} + candidate_missing = False + for index in indexes: + config = configs[index] + image = str(config.get("image", "")).strip() + registry = str(config.get("digest_source", "")).strip() + try: + candidate_digests[index] = registry_digest_for_version( + image, + candidate.version, + registry=registry, + prefix=str(config.get("digest_tag_prefix", "")), + ) + except RegistryDigestNotFoundError: + missing.append( + { + "version": candidate.version, + "reason": f"missing-{registry}-digest", + } + ) + candidate_missing = True + if candidate_missing: + continue + selected_version = candidate.version + selected_digests = candidate_digests + break + + skipped_versions = tuple(missing) + skipped_github_release_report( + skipped, latest_tag=selected_tag + ) + for index in indexes: + result = aligned[index] + latest_digest = selected_digests[index] + aligned[index] = replace( + result, + latest_version=selected_version, + latest_digest=latest_digest, + version_update=selected_version != result.current_version, + digest_update=bool(result.digest_key) + and latest_digest != result.current_digest, + submodule_ref=submodule_ref_for_version( + configs[index], + latest_version=selected_version, + current_version=result.current_version, + ), + skipped_versions=skipped_versions, + ) + return aligned def monitor_configs(repo: RepoConfig) -> list[dict[str, Any]]: diff --git a/src/aio_fleet/validators.py b/src/aio_fleet/validators.py index ac00997..d3e65d3 100644 --- a/src/aio_fleet/validators.py +++ b/src/aio_fleet/validators.py @@ -10,6 +10,7 @@ from xml.etree.ElementTree import Element, ParseError, tostring # nosec B405 import defusedxml.ElementTree as DefusedET +from defusedxml.common import DefusedXmlException from aio_fleet.catalog import validate_catalog_asset_path from aio_fleet.manifest import FleetManifest, RepoConfig @@ -276,7 +277,9 @@ def publish_platform_failures(repo: RepoConfig) -> list[str]: def pinned_action_failures(repo_path: Path) -> list[str]: workflow_paths = [ *repo_path.joinpath(".github", "workflows").glob("*.yml"), + *repo_path.joinpath(".github", "workflows").glob("*.yaml"), *repo_path.joinpath(".github", "actions").glob("*/action.yml"), + *repo_path.joinpath(".github", "actions").glob("*/action.yaml"), ] failures: list[str] = [] @@ -293,12 +296,24 @@ def pinned_action_failures(repo_path: Path) -> list[str]: return failures +def repo_local_workflow_failures(repo: RepoConfig) -> list[str]: + if repo.publish_profile == "template": + return [] + workflow_dir = repo.path / ".github" / "workflows" + if workflow_dir.exists(): + return [ + f"{repo.name}: .github/workflows is disabled; app repos are checked by aio-fleet / required" + ] + return [] + + def repo_policy_failures(repo: RepoConfig, manifest: FleetManifest) -> list[str]: return [ *catalog_asset_failures(repo), *template_metadata_failures(repo, manifest), *runtime_contract_failures(repo), *publish_platform_failures(repo), + *repo_local_workflow_failures(repo), *pinned_action_failures(repo.path), *tracked_artifact_failures(repo.path), ] @@ -723,19 +738,39 @@ def _icon_quality_findings( def _require_file(repo_path: Path, relative_path: str, failures: list[str]) -> bool: - if not (repo_path / relative_path).is_file(): + path = _repo_relative_path(repo_path, relative_path, failures) + if path is None: + return False + if not path.is_file(): failures.append(f"missing required file: {relative_path}") return False return True def _require_absent(repo_path: Path, relative_path: str, failures: list[str]) -> None: - if (repo_path / relative_path).exists(): + path = _repo_relative_path(repo_path, relative_path, failures) + if path is not None and path.exists(): failures.append( f"remove template placeholder path in derived repo: {relative_path}" ) +def _repo_relative_path( + repo_path: Path, relative_path: str, failures: list[str] +) -> Path | None: + requested = Path(relative_path) + if requested.is_absolute(): + failures.append(f"repo path must be relative: {relative_path}") + return None + resolved = (repo_path / requested).resolve() + try: + resolved.relative_to(repo_path) + except ValueError: + failures.append(f"repo path escapes checkout: {relative_path}") + return None + return resolved + + def _effective_template_xml(repo_path: Path) -> str: root_xml_files = sorted( path.name for path in repo_path.glob("*.xml") if path.is_file() @@ -851,7 +886,11 @@ def _xml_runtime_contract_failures( target = (config.attrib.get("Target") or "").strip() if not target: continue - if config.attrib.get("Type") == "Port" and target not in exposed_ports: + if ( + config.attrib.get("Type") == "Port" + and target not in exposed_ports + and _port_config_requires_exposed_container_port(config) + ): failures.append( f"{repo.name}: {source} port target {target} is not exposed by {relative_dockerfile}" ) @@ -895,6 +934,14 @@ def _xml_runtime_contract_failures( return failures +def _port_config_requires_exposed_container_port(config: Element) -> bool: + if config.attrib.get("Required") == "true": + return True + default = (config.attrib.get("Default") or "").strip() + selected = (config.text or "").strip() + return bool(default or selected) + + def _dockerfile_arg_defaults(text: str) -> dict[str, str]: defaults: dict[str, str] = {} for line in text.splitlines(): @@ -939,7 +986,10 @@ def _check_no_placeholder( failures: list[str], ) -> None: existing_paths = [ - repo_path / path for path in relative_paths if (repo_path / path).exists() + resolved + for path in relative_paths + if (resolved := _repo_relative_path(repo_path, path, failures)) is not None + and resolved.exists() ] for path in existing_paths: if placeholder in path.read_text(errors="ignore"): @@ -964,9 +1014,12 @@ def _parse_xml(repo: RepoConfig, source: str, failures: list[str]) -> Element | xml_path = repo.path / source if not xml_path.exists(): return None + if not xml_path.is_file(): + failures.append(f"{repo.name}: catalog XML {source} must be a file") + return None try: return DefusedET.parse(xml_path).getroot() - except ParseError as exc: + except (DefusedXmlException, OSError, ParseError) as exc: failures.append(f"{repo.name}: unable to parse catalog XML {source}: {exc}") return None @@ -977,9 +1030,12 @@ def _parse_catalog_xml( xml_path: Path, failures: list[str], ) -> Element | None: + if not xml_path.is_file(): + failures.append(f"{repo_name}: catalog XML {target} must be a file") + return None try: return DefusedET.parse(xml_path).getroot() - except ParseError as exc: + except (DefusedXmlException, OSError, ParseError) as exc: failures.append(f"{repo_name}: unable to parse catalog XML {target}: {exc}") return None @@ -1218,16 +1274,45 @@ def _repository_registry_failures( ) image_name = _repository_image_name(repository) - if image_name.startswith("jsonbored/"): - expected_registry = f"https://hub.docker.com/r/{image_name}" - if registry and registry != expected_registry: + if repo.publish_profile != "template": + expected_images = _expected_image_names(repo, source.removeprefix("catalog ")) + if image_name not in expected_images: failures.append( - f"{repo.name}: {source} must be {expected_registry}, got {registry}" + f"{repo.name}: {source} must use one of {sorted(expected_images)}, got {repository}" ) + return failures + + expected_registry = f"https://hub.docker.com/r/{image_name}" + if registry and registry != expected_registry: + failures.append( + f"{repo.name}: {source} must be {expected_registry}, got {registry}" + ) return failures +def _expected_image_names(repo: RepoConfig, source: str) -> set[str]: + expected = {repo.image_name.lower()} + components = repo.raw.get("components", {}) + if isinstance(components, dict): + for component in components.values(): + if not isinstance(component, dict): + continue + image_name = str(component.get("image_name", "")).strip().lower() + if not image_name: + continue + xml_paths = component.get("xml_paths", []) + if isinstance(xml_paths, str): + component_xml_paths = {xml_paths} + elif isinstance(xml_paths, list): + component_xml_paths = {str(item) for item in xml_paths} + else: + component_xml_paths = set() + if source in component_xml_paths: + expected.add(image_name) + return expected + + def _repository_registry_host(repository: str) -> str: first_segment = repository.split("/", 1)[0].lower() if "." in first_segment or ":" in first_segment: @@ -1240,7 +1325,7 @@ def _repository_image_name(repository: str) -> str: tail = image.rsplit("/", 1)[-1] if ":" in tail: image = image.rsplit(":", 1)[0] - return image + return image.lower() def _common_template_quality_failures( diff --git a/tests/test_cli.py b/tests/test_cli.py index 99899d4..4c1b16f 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -732,11 +732,11 @@ def fake_registry_verify(args: Namespace) -> int: assert verify_calls[0].repo_path == str(repo_path) # nosec B101 assert verify_calls[0].sha == "a" * 40 # nosec B101 captured = capsys.readouterr() - assert "example-aio:aio: registry=preflight-missing" in captured.out # nosec B101 - assert "example-aio:aio: preflight: tag missing" in captured.err # nosec B101 + assert "example-aio:aio: registry=publishing" in captured.out # nosec B101 + assert "preflight" not in captured.err # nosec B101 -def test_registry_publish_skips_build_when_tags_are_current( +def test_registry_publish_rebuilds_when_tags_are_current( tmp_path: Path, monkeypatch, capsys ) -> None: manifest, repo_path = _write_minimal_manifest(tmp_path) @@ -744,14 +744,19 @@ def test_registry_publish_skips_build_when_tags_are_current( monkeypatch.delenv("DOCKERHUB_TOKEN", raising=False) monkeypatch.delenv("AIO_FLEET_GHCR_TOKEN", raising=False) - def fail_run( + seen: dict[str, object] = {} + + def fake_run( command: list[str], cwd: Path | None = None, env=None ) -> SimpleNamespace: - del command, cwd, env - raise AssertionError("current registry tags should skip docker build") + seen["publish_command"] = command + seen["publish_cwd"] = cwd + seen["publish_env"] = env + return SimpleNamespace(returncode=0, stdout="", stderr="") - monkeypatch.setattr(cli, "_run", fail_run) + monkeypatch.setattr(cli, "_run", fake_run) monkeypatch.setattr(cli, "verify_registry_tags", lambda _tags: []) + monkeypatch.setattr(cli, "cmd_registry_verify", lambda _args: 0) result = cmd_registry_publish( Namespace( @@ -765,8 +770,9 @@ def fail_run( ) assert result == 0 # nosec B101 + assert seen["publish_cwd"] == repo_path.resolve() # nosec B101 assert ( - "example-aio:aio: registry=already-current" in capsys.readouterr().out + "example-aio:aio: registry=publishing" in capsys.readouterr().out ) # nosec B101 diff --git a/tests/test_control_plane.py b/tests/test_control_plane.py index ce22d85..3a3fb45 100644 --- a/tests/test_control_plane.py +++ b/tests/test_control_plane.py @@ -14,21 +14,29 @@ ROOT = Path(__file__).resolve().parents[1] -def test_central_check_steps_for_pr_skip_publish_and_integration( +def test_central_check_steps_for_pr_include_policy_and_integration( tmp_path: Path, ) -> None: repo = _repo_with_path(load_manifest(ROOT / "fleet.yml").repo("sure-aio"), tmp_path) (tmp_path / "tests").mkdir() + (tmp_path / "aio_fleet").mkdir() + (tmp_path / "aio_fleet" / "__init__.py").write_text("") + (tmp_path / "aio_fleet" / "cli.py").write_text("raise SystemExit(0)\n") steps = central_check_steps(repo, event="pull_request", include_trunk=False) names = [step.name for step in steps] assert names == [ - "validate-template-common", + "validate-repo", + "verify-caller", "install-test-deps", + "integration-tests", ] # nosec B101 - assert str(ROOT) in steps[1].command[-1] # nosec B101 - assert steps[1].command[-1].endswith("[app-tests]") # nosec B101 + assert steps[0].cwd == ROOT # nosec B101 + assert steps[1].cwd == ROOT # nosec B101 + assert steps[2].cwd == tmp_path # nosec B101 + assert str(ROOT) in steps[2].command[-1] # nosec B101 + assert steps[2].command[-1].endswith("[app-tests]") # nosec B101 def test_central_check_steps_for_push_include_integration_trunk_and_publish() -> None: @@ -96,6 +104,7 @@ def test_central_check_steps_can_skip_integration_for_poll_runs() -> None: assert "integration-tests" not in names # nosec B101 assert "unit-tests" in names # nosec B101 assert "trunk" in names # nosec B101 + assert names[:2] == ["validate-repo", "verify-caller"] # nosec B101 def test_run_steps_reports_timeout(monkeypatch, tmp_path: Path) -> None: diff --git a/tests/test_control_plane_workflow.py b/tests/test_control_plane_workflow.py index d72bcca..178f78b 100644 --- a/tests/test_control_plane_workflow.py +++ b/tests/test_control_plane_workflow.py @@ -113,8 +113,7 @@ def test_control_check_steps_gate_publish_explicitly() -> None: assert "args+=(--publish)" in manual["run"] # nosec B101 assert 'if [[ "${TARGET_PUBLISH}" == "true" ]]' in poll["run"] # nosec B101 assert "args+=(--publish)" in poll["run"] # nosec B101 - assert "else" in poll["run"] # nosec B101 - assert "args+=(--no-integration)" in poll["run"] # nosec B101 + assert "args+=(--no-integration)" not in poll["run"] # nosec B101 def test_registry_credentials_are_not_logged_in_before_app_checks() -> None: diff --git a/tests/test_upstream.py b/tests/test_upstream.py index 4d07b07..95fabe8 100644 --- a/tests/test_upstream.py +++ b/tests/test_upstream.py @@ -114,6 +114,76 @@ def fake_digest(_image: str, version: str, **_kwargs) -> str: } +def test_shared_version_digest_group_uses_one_resolvable_release( + tmp_path: Path, monkeypatch +) -> None: + repo_path = tmp_path / "repo" + repo_path.mkdir() + dockerfile = repo_path / "Dockerfile" + dockerfile.write_text( + "ARG UPSTREAM_DIFY_VERSION=1.9.0\n" + "ARG UPSTREAM_DIFY_API_DIGEST=sha256:api-old\n" + "ARG UPSTREAM_DIFY_WEB_DIGEST=sha256:web-old\n" + ) + manifest = tmp_path / "fleet.yml" + manifest.write_text(f""" +owner: JSONbored +repos: + dify-aio: + path: {repo_path} + app_slug: dify-aio + image_name: jsonbored/dify-aio + docker_cache_scope: dify-aio-image + pytest_image_tag: dify-aio:pytest + upstream_monitor: + - component: dify-api + source: github-releases + repo: langgenius/dify + image: langgenius/dify-api + digest_source: dockerhub + dockerfile: Dockerfile + version_key: UPSTREAM_DIFY_VERSION + digest_key: UPSTREAM_DIFY_API_DIGEST + strategy: pr + - component: dify-web + source: github-releases + repo: langgenius/dify + image: langgenius/dify-web + digest_source: dockerhub + dockerfile: Dockerfile + version_key: UPSTREAM_DIFY_VERSION + digest_key: UPSTREAM_DIFY_WEB_DIGEST + strategy: pr +""") + candidates = ( + upstream.GitHubReleaseCandidate(tag="2.0.0", version="2.0.0"), + upstream.GitHubReleaseCandidate(tag="1.9.1", version="1.9.1"), + ) + + monkeypatch.setattr( + upstream, + "github_release_candidates_result", + lambda *_args, **_kwargs: (candidates, ()), + ) + + def fake_digest(image: str, version: str, **_kwargs) -> str: + if image == "langgenius/dify-web" and version == "2.0.0": + raise upstream.RegistryDigestNotFoundError("missing") + return f"sha256:{image.rsplit('-', 1)[-1]}-{version}" + + monkeypatch.setattr(upstream, "registry_digest_for_version", fake_digest) + + results = upstream.monitor_repo( + load_manifest(manifest).repo("dify-aio"), write=True + ) + + assert {result.latest_version for result in results} == {"1.9.1"} # nosec B101 + text = dockerfile.read_text() + assert "ARG UPSTREAM_DIFY_VERSION=1.9.1" in text # nosec B101 + assert "ARG UPSTREAM_DIFY_API_DIGEST=sha256:api-1.9.1" in text # nosec B101 + assert "ARG UPSTREAM_DIFY_WEB_DIGEST=sha256:web-1.9.1" in text # nosec B101 + + def test_upstream_monitor_write_updates_dockerfile(tmp_path: Path, monkeypatch) -> None: repo_path = tmp_path / "repo" repo_path.mkdir() diff --git a/tests/test_validators.py b/tests/test_validators.py index b819cc2..1c402ed 100644 --- a/tests/test_validators.py +++ b/tests/test_validators.py @@ -9,6 +9,8 @@ derived_repo_failures, pinned_action_failures, publish_platform_failures, + repo_local_workflow_failures, + runtime_contract_failures, template_metadata_failures, tracked_artifact_failures, ) @@ -116,6 +118,40 @@ def test_pinned_action_validation_rejects_tagged_actions(tmp_path: Path) -> None ] +def test_pinned_action_validation_covers_yaml_and_action_yaml(tmp_path: Path) -> None: + workflow_dir = tmp_path / ".github" / "workflows" + action_dir = tmp_path / ".github" / "actions" / "local" + workflow_dir.mkdir(parents=True) + action_dir.mkdir(parents=True) + (workflow_dir / "build.yaml").write_text(""" +jobs: + test: + steps: + - uses: actions/checkout@v6 +""") + (action_dir / "action.yaml").write_text(""" +runs: + using: composite + steps: + - uses: actions/setup-python@v6 +""") + + assert pinned_action_failures(tmp_path) == [ # nosec B101 + ".github/actions/local/action.yaml: action is not pinned to a full SHA -> actions/setup-python@v6", + ".github/workflows/build.yaml: action is not pinned to a full SHA -> actions/checkout@v6", + ] + + +def test_app_repo_local_workflows_are_rejected(tmp_path: Path) -> None: + workflow_dir = tmp_path / ".github" / "workflows" + workflow_dir.mkdir(parents=True) + (workflow_dir / "pwn.yml").write_text("name: pwn\n") + + assert repo_local_workflow_failures(_repo(tmp_path)) == [ # nosec B101 + "example-aio: .github/workflows is disabled; app repos are checked by aio-fleet / required" + ] + + def test_derived_repo_validation_accepts_minimal_repo(tmp_path: Path) -> None: _write_minimal_derived_repo(tmp_path) @@ -145,6 +181,20 @@ def test_derived_repo_validation_loads_component_templates(tmp_path: Path) -> No ] # nosec B101 +def test_derived_repo_validation_rejects_escaped_component_template( + tmp_path: Path, +) -> None: + _write_minimal_derived_repo(tmp_path) + (tmp_path / "components.toml").write_text(""" +[components.agent] +template = "../agent.xml" +""") + + assert derived_repo_failures(tmp_path) == [ + "repo path escapes checkout: ../agent.xml" + ] # nosec B101 + + def test_publish_platform_validation_rejects_unhandled_arm64(tmp_path: Path) -> None: (tmp_path / "Dockerfile").write_text(""" ARG TARGETARCH @@ -193,6 +243,55 @@ def test_template_metadata_validation_checks_catalog_urls(tmp_path: Path) -> Non ) +def test_template_metadata_validation_rejects_untrusted_repository_namespace( + tmp_path: Path, +) -> None: + (tmp_path / "example-aio.xml").write_text(""" + + example-aio + attacker/example-aio:latest + https://hub.docker.com/r/attacker/example-aio + https://github.com/JSONbored/example-aio + https://github.com/JSONbored/example-aio/issues + Example. + Tools: + https://raw.githubusercontent.com/JSONbored/awesome-unraid/main/example-aio.xml + https://raw.githubusercontent.com/JSONbored/awesome-unraid/main/icons/example.png + +""") + + failures = template_metadata_failures(_repo(tmp_path), _Manifest()) # type: ignore[arg-type] + + assert ( # nosec B101 + "example-aio: example-aio.xml must use one of ['jsonbored/example-aio'], got attacker/example-aio:latest" + in failures + ) + + +def test_template_metadata_validation_handles_non_file_xml(tmp_path: Path) -> None: + (tmp_path / "example-aio.xml").mkdir() + + failures = template_metadata_failures(_repo(tmp_path), _Manifest()) # type: ignore[arg-type] + + assert failures == [ # nosec B101 + "example-aio: catalog XML example-aio.xml must be a file" + ] + + +def test_template_metadata_validation_rejects_defused_xml(tmp_path: Path) -> None: + (tmp_path / "example-aio.xml").write_text(""" + ]> +&xxe; +""") + + failures = template_metadata_failures(_repo(tmp_path), _Manifest()) # type: ignore[arg-type] + + assert len(failures) == 1 # nosec B101 + assert failures[0].startswith( # nosec B101 + "example-aio: unable to parse catalog XML example-aio.xml:" + ) + + def test_template_metadata_validation_rejects_nested_options_and_bad_changes( tmp_path: Path, ) -> None: @@ -258,6 +357,39 @@ def test_template_metadata_validation_applies_manifest_declared_targets( assert any("manifest-forbidden" in failure for failure in failures) # nosec B101 +def test_runtime_contract_allows_unpublished_optional_port(tmp_path: Path) -> None: + (tmp_path / "Dockerfile").write_text( + "FROM ubuntu@sha256:aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\n" + 'ENTRYPOINT ["/init"]\n' + "S6_CMD_WAIT_FOR_SERVICES_MAXTIME=300000\n" + "S6_BEHAVIOUR_IF_STAGE2_FAILS=2\n" + 'VOLUME ["/config"]\n' + "HEALTHCHECK CMD curl -fsS http://localhost:3000/ || exit 1\n" + "EXPOSE 3000\n" + ) + (tmp_path / "example-aio.xml").write_text(""" + + example-aio + jsonbored/example-aio:latest + https://hub.docker.com/r/jsonbored/example-aio + https://github.com/JSONbored/example-aio + https://github.com/JSONbored/example-aio/issues + Example. + Tools: + https://raw.githubusercontent.com/JSONbored/awesome-unraid/main/example-aio.xml + https://raw.githubusercontent.com/JSONbored/awesome-unraid/main/icons/example.png + 3000 + + +""") + + failures = runtime_contract_failures(_repo(tmp_path)) + + assert not any( + "8765 is not exposed" in failure for failure in failures + ) # nosec B101 + + def test_template_metadata_validation_rejects_common_quality_drift( tmp_path: Path, ) -> None: