From 12683ac2e0d7278c7eea70cb7c5429e631d34561 Mon Sep 17 00:00:00 2001 From: "Jiaxiao (mossaka) Zhou" Date: Fri, 6 Feb 2026 22:38:25 +0000 Subject: [PATCH 1/6] fix: restrict host gateway iptables bypass to allowed ports only The --enable-host-access flag added an iptables ACCEPT rule for host.docker.internal with no port restriction, allowing agent code to reach ANY service on the host (databases, admin panels, etc.) and bypassing the dangerous-ports blocklist entirely. Changes: - Restrict host gateway FILTER ACCEPT to ports 80, 443, and any ports from --allow-host-ports (was: all ports) - Apply same port restriction to network gateway bypass - Add IPv4 format validation for dynamically resolved IPs before using them in iptables rules - Mount chroot-hosts as read-only (:ro) since host.docker.internal is pre-injected by docker-manager.ts before mounting The NAT RETURN rule (which prevents DNAT to Squid) is unchanged, so MCP traffic still bypasses Squid correctly. Non-allowed port traffic hits the final DROP rule in the FILTER chain. Co-Authored-By: Claude Opus 4.6 (1M context) --- containers/agent/setup-iptables.sh | 38 +++++++++++++++++++++++++++--- src/docker-manager.test.ts | 7 +++--- src/docker-manager.ts | 2 +- 3 files changed, 39 insertions(+), 8 deletions(-) diff --git a/containers/agent/setup-iptables.sh b/containers/agent/setup-iptables.sh index 47e946b4..27671e00 100644 --- a/containers/agent/setup-iptables.sh +++ b/containers/agent/setup-iptables.sh @@ -11,6 +11,12 @@ is_ipv6() { [[ "$ip" == *:* ]] } +# Function to validate an IPv4 address format (e.g., 172.17.0.1) +is_valid_ipv4() { + local ip="$1" + echo "$ip" | grep -qE '^([0-9]{1,3}\.){3}[0-9]{1,3}$' +} + # Function to check if ip6tables is available and functional has_ip6tables() { if command -v ip6tables &>/dev/null && ip6tables -L -n &>/dev/null; then @@ -124,12 +130,28 @@ iptables -t nat -A OUTPUT -d "$SQUID_IP" -j RETURN # Bypass Squid for host.docker.internal when host access is enabled. # MCP gateway traffic to host.docker.internal gets DNAT'd to Squid, # where Squid fails with "Invalid URL" because rmcp sends relative URLs. +# The NAT RETURN prevents DNAT to Squid (which would crash on MCP traffic). +# The FILTER ACCEPT is restricted to allowed ports only (80, 443, and --allow-host-ports). if [ -n "$AWF_ENABLE_HOST_ACCESS" ]; then HOST_GATEWAY_IP=$(getent hosts host.docker.internal | awk 'NR==1 { print $1 }') - if [ -n "$HOST_GATEWAY_IP" ]; then + if [ -n "$HOST_GATEWAY_IP" ] && is_valid_ipv4 "$HOST_GATEWAY_IP"; then echo "[iptables] Allow direct traffic to host gateway (${HOST_GATEWAY_IP}) - bypassing Squid..." + # NAT: skip DNAT to Squid for all traffic to host gateway (prevents Squid crash) iptables -t nat -A OUTPUT -d "$HOST_GATEWAY_IP" -j RETURN - iptables -A OUTPUT -d "$HOST_GATEWAY_IP" -j ACCEPT + # FILTER: only allow standard ports (80, 443) to host gateway + iptables -A OUTPUT -p tcp -d "$HOST_GATEWAY_IP" --dport 80 -j ACCEPT + iptables -A OUTPUT -p tcp -d "$HOST_GATEWAY_IP" --dport 443 -j ACCEPT + # FILTER: also allow user-specified ports from --allow-host-ports + if [ -n "$AWF_ALLOW_HOST_PORTS" ]; then + IFS=',' read -ra HOST_PORTS <<< "$AWF_ALLOW_HOST_PORTS" + for port_spec in "${HOST_PORTS[@]}"; do + port_spec=$(echo "$port_spec" | xargs) + echo "[iptables] Allow host gateway port $port_spec" + iptables -A OUTPUT -p tcp -d "$HOST_GATEWAY_IP" --dport "$port_spec" -j ACCEPT + done + fi + elif [ -n "$HOST_GATEWAY_IP" ]; then + echo "[iptables] WARNING: host.docker.internal resolved to invalid IP '${HOST_GATEWAY_IP}', skipping host gateway bypass" else echo "[iptables] WARNING: host.docker.internal could not be resolved, skipping host gateway bypass" fi @@ -139,10 +161,20 @@ if [ -n "$AWF_ENABLE_HOST_ACCESS" ]; then # instead of the Docker bridge gateway (172.17.0.1). Without this bypass, # MCP Streamable HTTP traffic goes through Squid, which crashes on SSE connections. NETWORK_GATEWAY_IP=$(route -n | awk '/^0\.0\.0\.0/ { print $2; exit }') - if [ -n "$NETWORK_GATEWAY_IP" ] && [ "$NETWORK_GATEWAY_IP" != "$HOST_GATEWAY_IP" ]; then + if [ -n "$NETWORK_GATEWAY_IP" ] && is_valid_ipv4 "$NETWORK_GATEWAY_IP" && [ "$NETWORK_GATEWAY_IP" != "$HOST_GATEWAY_IP" ]; then echo "[iptables] Allow direct traffic to network gateway (${NETWORK_GATEWAY_IP}) - bypassing Squid..." iptables -t nat -A OUTPUT -d "$NETWORK_GATEWAY_IP" -j RETURN iptables -A OUTPUT -p tcp -d "$NETWORK_GATEWAY_IP" --dport 80 -j ACCEPT + iptables -A OUTPUT -p tcp -d "$NETWORK_GATEWAY_IP" --dport 443 -j ACCEPT + if [ -n "$AWF_ALLOW_HOST_PORTS" ]; then + IFS=',' read -ra NET_GW_PORTS <<< "$AWF_ALLOW_HOST_PORTS" + for port_spec in "${NET_GW_PORTS[@]}"; do + port_spec=$(echo "$port_spec" | xargs) + iptables -A OUTPUT -p tcp -d "$NETWORK_GATEWAY_IP" --dport "$port_spec" -j ACCEPT + done + fi + elif [ -n "$NETWORK_GATEWAY_IP" ] && ! is_valid_ipv4 "$NETWORK_GATEWAY_IP"; then + echo "[iptables] WARNING: network gateway resolved to invalid IP '${NETWORK_GATEWAY_IP}', skipping" fi fi diff --git a/src/docker-manager.test.ts b/src/docker-manager.test.ts index bffec27f..2e21fd74 100644 --- a/src/docker-manager.test.ts +++ b/src/docker-manager.test.ts @@ -774,7 +774,7 @@ describe('docker-manager', () => { expect(volumes).toContain('/etc/nsswitch.conf:/host/etc/nsswitch.conf:ro'); }); - it('should mount writable chroot-hosts when enableChroot and enableHostAccess are true', () => { + it('should mount read-only chroot-hosts when enableChroot and enableHostAccess are true', () => { // Ensure workDir exists for chroot-hosts file creation fs.mkdirSync(mockConfig.workDir, { recursive: true }); try { @@ -787,11 +787,10 @@ describe('docker-manager', () => { const agent = result.services.agent; const volumes = agent.volumes as string[]; - // Should mount a writable copy of /etc/hosts (not the read-only original) + // Should mount a read-only copy of /etc/hosts with host.docker.internal pre-injected const hostsVolume = volumes.find((v: string) => v.includes('/host/etc/hosts')); expect(hostsVolume).toBeDefined(); - expect(hostsVolume).toContain('chroot-hosts:/host/etc/hosts'); - expect(hostsVolume).not.toContain(':ro'); + expect(hostsVolume).toContain('chroot-hosts:/host/etc/hosts:ro'); } finally { fs.rmSync(mockConfig.workDir, { recursive: true, force: true }); } diff --git a/src/docker-manager.ts b/src/docker-manager.ts index e30923c5..2742b071 100644 --- a/src/docker-manager.ts +++ b/src/docker-manager.ts @@ -521,7 +521,7 @@ export function generateDockerCompose( logger.debug(`Could not resolve Docker bridge gateway: ${err}`); } fs.chmodSync(chrootHostsPath, 0o644); - agentVolumes.push(`${chrootHostsPath}:/host/etc/hosts`); + agentVolumes.push(`${chrootHostsPath}:/host/etc/hosts:ro`); } else { agentVolumes.push('/etc/hosts:/host/etc/hosts:ro'); } From b2bfc1c7562de82a500d9fe65056a8f8959fc914 Mon Sep 17 00:00:00 2001 From: "Jiaxiao (mossaka) Zhou" Date: Tue, 10 Feb 2026 07:43:21 +0000 Subject: [PATCH 2/6] feat: add Maven proxy setup to Java workflow and Java/Maven docs Move Maven proxy configuration to the workflow markdown (settings.xml created at runtime using SQUID_PROXY_HOST/SQUID_PROXY_PORT env vars) rather than generating it in docker-manager.ts. Add Java/Maven/Gradle troubleshooting section to docs and JAVA_TOOL_OPTIONS documentation to CLAUDE.md. Recompile build-test-java workflow. Co-Authored-By: Claude Opus 4.6 (1M context) --- .github/aw/actions-lock.json | 5 + .github/workflows/build-test-java.lock.yml | 36 +++-- .github/workflows/build-test-java.md | 23 ++- CLAUDE.md | 8 + docs/troubleshooting.md | 48 ++++++ src/docker-manager.test.ts | 180 ++++++++++++++++----- src/docker-manager.ts | 54 +++++-- 7 files changed, 289 insertions(+), 65 deletions(-) diff --git a/.github/aw/actions-lock.json b/.github/aw/actions-lock.json index b3a2c581..036f5ea4 100644 --- a/.github/aw/actions-lock.json +++ b/.github/aw/actions-lock.json @@ -40,6 +40,11 @@ "version": "v0.42.0", "sha": "a7134347103ecf66b4bd422c3e9ce6466d400c02" }, + "github/gh-aw/actions/setup@v0.42.17": { + "repo": "github/gh-aw/actions/setup", + "version": "v0.42.17", + "sha": "7a970851c1090295e55a16e549c61ba1ce227f16" + }, "github/gh-aw/actions/setup@v0.42.7": { "repo": "github/gh-aw/actions/setup", "version": "v0.42.7", diff --git a/.github/workflows/build-test-java.lock.yml b/.github/workflows/build-test-java.lock.yml index 82a0ef1d..20e2ce0f 100644 --- a/.github/workflows/build-test-java.lock.yml +++ b/.github/workflows/build-test-java.lock.yml @@ -13,7 +13,7 @@ # \ /\ / (_) | | | | ( | | | | (_) \ V V /\__ \ # \/ \/ \___/|_| |_|\_\|_| |_|\___/ \_/\_/ |___/ # -# This file was automatically generated by gh-aw. DO NOT EDIT. +# This file was automatically generated by gh-aw (v0.42.17). DO NOT EDIT. # # To update this file, edit the corresponding .md file and run: # gh aw compile @@ -54,7 +54,7 @@ jobs: comment_repo: "" steps: - name: Setup Scripts - uses: github/gh-aw/actions/setup@e820ba3d2aacc3903329db6b310aeaa86202844e # v0.42.11 + uses: github/gh-aw/actions/setup@7a970851c1090295e55a16e549c61ba1ce227f16 # v0.42.17 with: destination: /opt/gh-aw/actions - name: Check workflow file timestamps @@ -93,7 +93,7 @@ jobs: secret_verification_result: ${{ steps.validate-secret.outputs.verification_result }} steps: - name: Setup Scripts - uses: github/gh-aw/actions/setup@e820ba3d2aacc3903329db6b310aeaa86202844e # v0.42.11 + uses: github/gh-aw/actions/setup@7a970851c1090295e55a16e549c61ba1ce227f16 # v0.42.17 with: destination: /opt/gh-aw/actions - name: Checkout .github and .agents folders @@ -175,7 +175,7 @@ jobs: const determineAutomaticLockdown = require('/opt/gh-aw/actions/determine_automatic_lockdown.cjs'); await determineAutomaticLockdown(github, context, core); - name: Download container images - run: bash /opt/gh-aw/actions/download_docker_images.sh ghcr.io/github/gh-aw-firewall/agent:0.13.12 ghcr.io/github/gh-aw-firewall/squid:0.13.12 ghcr.io/github/gh-aw-mcpg:v0.0.103 ghcr.io/github/github-mcp-server:v0.30.3 node:lts-alpine + run: bash /opt/gh-aw/actions/download_docker_images.sh ghcr.io/github/gh-aw-firewall/agent:0.13.12 ghcr.io/github/gh-aw-firewall/squid:0.13.12 ghcr.io/github/gh-aw-mcpg:v0.0.113 ghcr.io/github/github-mcp-server:v0.30.3 node:lts-alpine - name: Write Safe Outputs Config run: | mkdir -p /opt/gh-aw/safeoutputs @@ -428,7 +428,7 @@ jobs: # Register API key as secret to mask it from logs echo "::add-mask::${MCP_GATEWAY_API_KEY}" export GH_AW_ENGINE="copilot" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.0.103' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.0.113' mkdir -p /home/runner/.copilot cat << MCPCONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh @@ -473,6 +473,7 @@ jobs: model: process.env.GH_AW_MODEL_AGENT_COPILOT || "", version: "", agent_version: "0.0.405", + cli_version: "v0.42.17", workflow_name: "Build Test Java", experimental: false, supports_tools_allowlist: true, @@ -489,7 +490,7 @@ jobs: allowed_domains: ["defaults","github","java"], firewall_enabled: true, awf_version: "v0.13.12", - awmg_version: "v0.0.103", + awmg_version: "v0.0.113", steps: { firewall: "squid" }, @@ -790,7 +791,7 @@ jobs: total_count: ${{ steps.missing_tool.outputs.total_count }} steps: - name: Setup Scripts - uses: github/gh-aw/actions/setup@e820ba3d2aacc3903329db6b310aeaa86202844e # v0.42.11 + uses: github/gh-aw/actions/setup@7a970851c1090295e55a16e549c61ba1ce227f16 # v0.42.17 with: destination: /opt/gh-aw/actions - name: Debug job inputs @@ -860,6 +861,23 @@ jobs: setupGlobals(core, github, context, exec, io); const { main } = require('/opt/gh-aw/actions/handle_agent_failure.cjs'); await main(); + - name: Handle No-Op Message + id: handle_noop_message + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0 + env: + GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }} + GH_AW_WORKFLOW_NAME: "Build Test Java" + GH_AW_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + GH_AW_AGENT_CONCLUSION: ${{ needs.agent.result }} + GH_AW_NOOP_MESSAGE: ${{ steps.noop.outputs.noop_message }} + GH_AW_NOOP_REPORT_AS_ISSUE: "true" + with: + github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/handle_noop_message.cjs'); + await main(); - name: Update reaction comment with completion status id: conclusion uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0 @@ -890,7 +908,7 @@ jobs: success: ${{ steps.parse_results.outputs.success }} steps: - name: Setup Scripts - uses: github/gh-aw/actions/setup@e820ba3d2aacc3903329db6b310aeaa86202844e # v0.42.11 + uses: github/gh-aw/actions/setup@7a970851c1090295e55a16e549c61ba1ce227f16 # v0.42.17 with: destination: /opt/gh-aw/actions - name: Download agent artifacts @@ -1003,7 +1021,7 @@ jobs: process_safe_outputs_temporary_id_map: ${{ steps.process_safe_outputs.outputs.temporary_id_map }} steps: - name: Setup Scripts - uses: github/gh-aw/actions/setup@e820ba3d2aacc3903329db6b310aeaa86202844e # v0.42.11 + uses: github/gh-aw/actions/setup@7a970851c1090295e55a16e549c61ba1ce227f16 # v0.42.17 with: destination: /opt/gh-aw/actions - name: Download agent output artifact diff --git a/.github/workflows/build-test-java.md b/.github/workflows/build-test-java.md index 82256fa4..cffa0f5a 100644 --- a/.github/workflows/build-test-java.md +++ b/.github/workflows/build-test-java.md @@ -50,11 +50,30 @@ Clone and test the following projects from the test repository: 1. **Clone Repository**: `gh repo clone Mossaka/gh-aw-firewall-test-java /tmp/test-java` - **CRITICAL**: If clone fails, immediately call `safeoutputs-missing_tool` with message "CLONE_FAILED: Unable to clone test repository" and stop execution -2. **Test Projects**: +2. **Configure Maven Proxy**: Maven ignores Java system properties for proxy configuration, so you must create `~/.m2/settings.xml` before running any Maven commands: + ```bash + mkdir -p ~/.m2 + cat > ~/.m2/settings.xml << EOF + + + + awf-httptruehttp + ${SQUID_PROXY_HOST}${SQUID_PROXY_PORT} + + + awf-httpstruehttps + ${SQUID_PROXY_HOST}${SQUID_PROXY_PORT} + + + + EOF + ``` + +3. **Test Projects**: - `gson`: `cd /tmp/test-java/gson && mvn compile && mvn test` - `caffeine`: `cd /tmp/test-java/caffeine && mvn compile && mvn test` -3. **For each project**, capture: +4. **For each project**, capture: - Compile success/failure - Test pass/fail count - Any error messages diff --git a/CLAUDE.md b/CLAUDE.md index 38504099..4a053d88 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -232,6 +232,14 @@ DNS traffic is restricted to trusted DNS servers only to prevent DNS-based data - Container DNS config (`containers/agent/entrypoint.sh`): Configures `/etc/resolv.conf` - Docker Compose (`src/docker-manager.ts`): Sets container `dns:` config and `AWF_DNS_SERVERS` env var +## Proxy Environment Variables + +AWF sets the following proxy-related environment variables in the agent container: + +- `HTTP_PROXY` / `HTTPS_PROXY`: Standard proxy variables (used by curl, wget, pip, npm, etc.) +- `SQUID_PROXY_HOST` / `SQUID_PROXY_PORT`: Raw proxy host and port for tools that need them separately +- `JAVA_TOOL_OPTIONS`: JVM system properties (`-Dhttp.proxyHost`, `-Dhttps.proxyHost`, etc.) for Java tools. Works for Gradle, SBT, and most JVM tools. **Maven requires separate `~/.m2/settings.xml` configuration** — see `docs/troubleshooting.md`. + **Example**: ```bash # Use Cloudflare DNS instead of Google DNS diff --git a/docs/troubleshooting.md b/docs/troubleshooting.md index 375e7a27..fc12fb8c 100644 --- a/docs/troubleshooting.md +++ b/docs/troubleshooting.md @@ -129,6 +129,54 @@ cat /tmp/awf-agent-logs-/*.log ``` +## Java / Maven / Gradle Issues + +### How AWF Handles Java Proxy + +AWF automatically sets `JAVA_TOOL_OPTIONS` with `-Dhttp.proxyHost`, `-Dhttp.proxyPort`, `-Dhttps.proxyHost`, `-Dhttps.proxyPort`, and `-Dhttp.nonProxyHosts` inside the agent container. This works for most Java tools that read standard JVM system properties, including Gradle and SBT. + +### Maven Requires Extra Configuration + +**Problem:** Maven builds fail with network errors even though the domain is in `--allow-domains` + +**Cause:** Maven's HTTP transport (Apache HttpClient / Maven Resolver) ignores Java system properties for proxy configuration. Unlike Gradle and most other Java tools, Maven does **not** read `-DproxyHost`/`-DproxyPort` from `JAVA_TOOL_OPTIONS`. + +**Solution:** Create `~/.m2/settings.xml` with proxy configuration before running Maven: + +```bash +mkdir -p ~/.m2 +cat > ~/.m2/settings.xml << EOF + + + + awf-httptruehttp + ${SQUID_PROXY_HOST}${SQUID_PROXY_PORT} + + + awf-httpstruehttps + ${SQUID_PROXY_HOST}${SQUID_PROXY_PORT} + + + +EOF +``` + +The `SQUID_PROXY_HOST` and `SQUID_PROXY_PORT` environment variables are automatically set by AWF in the agent container. + +For agentic workflows, add this as a setup step in the workflow `.md` file so the agent creates the file before running Maven commands. + +### Gradle Works Automatically + +Gradle reads JVM system properties via `ProxySelector.getDefault()`, so the `JAVA_TOOL_OPTIONS` environment variable set by AWF is sufficient. No extra configuration is needed for Gradle builds. + +### Why This Is Needed + +AWF uses a forward proxy (Squid) for HTTPS egress control rather than transparent interception. This means tools must be proxy-aware: + +- **Most tools**: Use `HTTP_PROXY`/`HTTPS_PROXY` environment variables (set automatically by AWF) +- **Java tools**: Use `JAVA_TOOL_OPTIONS` with JVM system properties (set automatically by AWF) +- **Maven**: Requires `~/.m2/settings.xml` (must be configured manually — see above) + ## Log Analysis ### Finding Blocked Domains diff --git a/src/docker-manager.test.ts b/src/docker-manager.test.ts index 2e21fd74..a32ff1f6 100644 --- a/src/docker-manager.test.ts +++ b/src/docker-manager.test.ts @@ -259,6 +259,15 @@ describe('docker-manager', () => { agentIp: '172.30.0.20', }; + // Ensure workDir exists for chroot tests that create chroot-hosts file + beforeEach(() => { + fs.mkdirSync(mockConfig.workDir, { recursive: true }); + }); + + afterEach(() => { + fs.rmSync(mockConfig.workDir, { recursive: true, force: true }); + }); + it('should generate docker-compose config with GHCR images by default', () => { const result = generateDockerCompose(mockConfig, mockNetworkConfig); @@ -559,7 +568,10 @@ describe('docker-manager', () => { expect(volumes).toContain('/etc/ca-certificates:/host/etc/ca-certificates:ro'); expect(volumes).toContain('/etc/alternatives:/host/etc/alternatives:ro'); expect(volumes).toContain('/etc/ld.so.cache:/host/etc/ld.so.cache:ro'); - expect(volumes).toContain('/etc/hosts:/host/etc/hosts:ro'); + // /etc/hosts is now always a custom chroot-hosts file in chroot mode (for pre-resolved domains) + const hostsVolume = volumes.find((v: string) => v.includes('/host/etc/hosts')); + expect(hostsVolume).toBeDefined(); + expect(hostsVolume).toContain('chroot-hosts:/host/etc/hosts:ro'); // Should still include essential mounts expect(volumes).toContain('/tmp:/tmp:rw'); @@ -775,51 +787,39 @@ describe('docker-manager', () => { }); it('should mount read-only chroot-hosts when enableChroot and enableHostAccess are true', () => { - // Ensure workDir exists for chroot-hosts file creation - fs.mkdirSync(mockConfig.workDir, { recursive: true }); - try { - const config = { - ...mockConfig, - enableChroot: true, - enableHostAccess: true - }; - const result = generateDockerCompose(config, mockNetworkConfig); - const agent = result.services.agent; - const volumes = agent.volumes as string[]; + const config = { + ...mockConfig, + enableChroot: true, + enableHostAccess: true + }; + const result = generateDockerCompose(config, mockNetworkConfig); + const agent = result.services.agent; + const volumes = agent.volumes as string[]; - // Should mount a read-only copy of /etc/hosts with host.docker.internal pre-injected - const hostsVolume = volumes.find((v: string) => v.includes('/host/etc/hosts')); - expect(hostsVolume).toBeDefined(); - expect(hostsVolume).toContain('chroot-hosts:/host/etc/hosts:ro'); - } finally { - fs.rmSync(mockConfig.workDir, { recursive: true, force: true }); - } + // Should mount a read-only copy of /etc/hosts with host.docker.internal pre-injected + const hostsVolume = volumes.find((v: string) => v.includes('/host/etc/hosts')); + expect(hostsVolume).toBeDefined(); + expect(hostsVolume).toContain('chroot-hosts:/host/etc/hosts:ro'); }); it('should inject host.docker.internal into chroot-hosts file', () => { - // Ensure workDir exists for chroot-hosts file creation - fs.mkdirSync(mockConfig.workDir, { recursive: true }); - try { - const config = { - ...mockConfig, - enableChroot: true, - enableHostAccess: true - }; - generateDockerCompose(config, mockNetworkConfig); - - // The chroot-hosts file should exist and contain host.docker.internal - const chrootHostsPath = `${mockConfig.workDir}/chroot-hosts`; - expect(fs.existsSync(chrootHostsPath)).toBe(true); - const content = fs.readFileSync(chrootHostsPath, 'utf8'); - // Docker bridge gateway resolution may succeed or fail in test env, - // but the file should exist with at least localhost - expect(content).toContain('localhost'); - } finally { - fs.rmSync(mockConfig.workDir, { recursive: true, force: true }); - } + const config = { + ...mockConfig, + enableChroot: true, + enableHostAccess: true + }; + generateDockerCompose(config, mockNetworkConfig); + + // The chroot-hosts file should exist and contain host.docker.internal + const chrootHostsPath = `${mockConfig.workDir}/chroot-hosts`; + expect(fs.existsSync(chrootHostsPath)).toBe(true); + const content = fs.readFileSync(chrootHostsPath, 'utf8'); + // Docker bridge gateway resolution may succeed or fail in test env, + // but the file should exist with at least localhost + expect(content).toContain('localhost'); }); - it('should mount read-only /etc/hosts when enableChroot is true but enableHostAccess is false', () => { + it('should mount custom chroot-hosts when enableChroot is true even without enableHostAccess', () => { const config = { ...mockConfig, enableChroot: true, @@ -829,7 +829,105 @@ describe('docker-manager', () => { const agent = result.services.agent; const volumes = agent.volumes as string[]; - expect(volumes).toContain('/etc/hosts:/host/etc/hosts:ro'); + // Should mount a custom chroot-hosts file (for pre-resolved domains) + const hostsVolume = volumes.find((v: string) => v.includes('/host/etc/hosts')); + expect(hostsVolume).toBeDefined(); + expect(hostsVolume).toContain('chroot-hosts:/host/etc/hosts:ro'); + }); + + it('should pre-resolve allowed domains into chroot-hosts file', () => { + // Mock getent to return a resolved IP for a test domain + mockExecaSync.mockImplementation((...args: any[]) => { + if (args[0] === 'getent' && args[1]?.[0] === 'hosts') { + const domain = args[1][1]; + if (domain === 'github.com') { + return { stdout: '140.82.121.4 github.com', stderr: '', exitCode: 0 }; + } + if (domain === 'npmjs.org') { + return { stdout: '104.16.22.35 npmjs.org', stderr: '', exitCode: 0 }; + } + throw new Error('Resolution failed'); + } + // For docker network inspect (host.docker.internal) + throw new Error('Not found'); + }); + + const config = { + ...mockConfig, + allowedDomains: ['github.com', 'npmjs.org', '*.wildcard.com'], + enableChroot: true + }; + generateDockerCompose(config, mockNetworkConfig); + + const chrootHostsPath = `${mockConfig.workDir}/chroot-hosts`; + expect(fs.existsSync(chrootHostsPath)).toBe(true); + const content = fs.readFileSync(chrootHostsPath, 'utf8'); + + // Should contain pre-resolved domains + expect(content).toContain('140.82.121.4\tgithub.com'); + expect(content).toContain('104.16.22.35\tnpmjs.org'); + // Should NOT contain wildcard domains (can't be resolved) + expect(content).not.toContain('wildcard.com'); + + // Reset mock + mockExecaSync.mockReset(); + }); + + it('should skip domains that fail to resolve during pre-resolution', () => { + // Mock getent to fail for all domains + mockExecaSync.mockImplementation(() => { + throw new Error('Resolution failed'); + }); + + const config = { + ...mockConfig, + allowedDomains: ['unreachable.tailnet.example'], + enableChroot: true + }; + // Should not throw even if resolution fails + generateDockerCompose(config, mockNetworkConfig); + + const chrootHostsPath = `${mockConfig.workDir}/chroot-hosts`; + expect(fs.existsSync(chrootHostsPath)).toBe(true); + const content = fs.readFileSync(chrootHostsPath, 'utf8'); + + // Should still have the base hosts content (localhost) + expect(content).toContain('localhost'); + // Should NOT contain the unresolvable domain + expect(content).not.toContain('unreachable.tailnet.example'); + + // Reset mock + mockExecaSync.mockReset(); + }); + + it('should not add duplicate entries for domains already in /etc/hosts', () => { + // Mock getent to return a resolved IP + mockExecaSync.mockImplementation((...args: any[]) => { + if (args[0] === 'getent' && args[1]?.[0] === 'hosts') { + return { stdout: '127.0.0.1 localhost', stderr: '', exitCode: 0 }; + } + throw new Error('Not found'); + }); + + const config = { + ...mockConfig, + allowedDomains: ['localhost'], // localhost is already in /etc/hosts + enableChroot: true + }; + generateDockerCompose(config, mockNetworkConfig); + + const chrootHostsPath = `${mockConfig.workDir}/chroot-hosts`; + const content = fs.readFileSync(chrootHostsPath, 'utf8'); + + // Count occurrences of 'localhost' - should only be the original entries, not duplicated + const localhostMatches = content.match(/localhost/g); + // /etc/hosts typically has multiple localhost entries (127.0.0.1 and ::1) + // The key assertion is that getent should NOT have been called for localhost + // since it's already in the hosts file + expect(localhostMatches).toBeDefined(); + + // Reset mock + mockExecaSync.mockReset(); }); it('should use GHCR image when enableChroot is true with default preset (GHCR)', () => { diff --git a/src/docker-manager.ts b/src/docker-manager.ts index 2742b071..03469f16 100644 --- a/src/docker-manager.ts +++ b/src/docker-manager.ts @@ -495,18 +495,47 @@ export function generateDockerCompose( ); // Mount /etc/hosts for host name resolution inside chroot - // When BOTH chroot and host access are enabled, we create a copy with host.docker.internal - // injected. Docker only adds this to the container's /etc/hosts via extra_hosts, but the - // chroot uses the host's /etc/hosts which lacks this entry. MCP servers need it to connect - // to the MCP gateway running on the host. - if (config.enableHostAccess) { - const chrootHostsPath = path.join(config.workDir, 'chroot-hosts'); + // Always create a custom hosts file in chroot mode to: + // 1. Pre-resolve allowed domains using the host's DNS stack (supports Tailscale MagicDNS, + // split DNS, and other custom resolvers not available inside the container) + // 2. Inject host.docker.internal when --enable-host-access is set + const chrootHostsPath = path.join(config.workDir, 'chroot-hosts'); + try { + fs.copyFileSync('/etc/hosts', chrootHostsPath); + } catch { + fs.writeFileSync(chrootHostsPath, '127.0.0.1 localhost\n'); + } + + // Pre-resolve allowed domains on the host and inject into /etc/hosts + // This is critical for domains that rely on custom DNS (e.g., Tailscale MagicDNS + // at 100.100.100.100) which is unreachable from inside the Docker container's + // network namespace. Resolution runs on the host where all DNS resolvers are available. + const hostsContent = fs.readFileSync(chrootHostsPath, 'utf-8'); + for (const domain of config.allowedDomains) { + // Skip patterns that aren't resolvable hostnames + if (domain.startsWith('*.') || domain.startsWith('.') || domain.includes('*')) continue; + // Skip if already in hosts file + if (hostsContent.includes(domain)) continue; + try { - fs.copyFileSync('/etc/hosts', chrootHostsPath); + const { stdout } = execa.sync('getent', ['hosts', domain], { timeout: 5000 }); + const parts = stdout.trim().split(/\s+/); + const ip = parts[0]; + if (ip) { + fs.appendFileSync(chrootHostsPath, `${ip}\t${domain}\n`); + logger.debug(`Pre-resolved ${domain} -> ${ip} for chroot /etc/hosts`); + } } catch { - fs.writeFileSync(chrootHostsPath, '127.0.0.1 localhost\n'); + // Domain couldn't be resolved on the host - it will use DNS at runtime + logger.debug(`Could not pre-resolve ${domain} for chroot /etc/hosts (will use DNS at runtime)`); } - // Resolve host-gateway IP from Docker bridge and append host.docker.internal + } + + // Add host.docker.internal when host access is enabled + // Docker only adds this to the container's /etc/hosts via extra_hosts, but the + // chroot uses the host's /etc/hosts which lacks this entry. MCP servers need it + // to connect to the MCP gateway running on the host. + if (config.enableHostAccess) { try { const { stdout } = execa.sync('docker', [ 'network', 'inspect', 'bridge', @@ -520,12 +549,11 @@ export function generateDockerCompose( } catch (err) { logger.debug(`Could not resolve Docker bridge gateway: ${err}`); } - fs.chmodSync(chrootHostsPath, 0o644); - agentVolumes.push(`${chrootHostsPath}:/host/etc/hosts:ro`); - } else { - agentVolumes.push('/etc/hosts:/host/etc/hosts:ro'); } + fs.chmodSync(chrootHostsPath, 0o644); + agentVolumes.push(`${chrootHostsPath}:/host/etc/hosts:ro`); + // SECURITY: Hide Docker socket to prevent firewall bypass via 'docker run' // An attacker could otherwise spawn a new container without network restrictions agentVolumes.push('/dev/null:/host/var/run/docker.sock:ro'); From 836ea5cb6c8ef3bd5b0c322f1135c2da811c4164 Mon Sep 17 00:00:00 2001 From: "Jiaxiao (mossaka) Zhou" Date: Tue, 10 Feb 2026 07:53:14 +0000 Subject: [PATCH 3/6] chore: regenerate workflow lock files after merging main Co-Authored-By: Claude Opus 4.6 (1M context) --- .github/workflows/build-test-bun.lock.yml | 10 +++++----- .github/workflows/build-test-cpp.lock.yml | 10 +++++----- .github/workflows/build-test-deno.lock.yml | 10 +++++----- .github/workflows/build-test-go.lock.yml | 12 +++++------ .github/workflows/build-test-java.lock.yml | 12 +++++------ .github/workflows/build-test-node.lock.yml | 12 +++++------ .github/workflows/build-test-rust.lock.yml | 10 +++++----- .../workflows/ci-cd-gaps-assessment.lock.yml | 10 +++++----- .github/workflows/ci-doctor.lock.yml | 16 +++++++-------- .../cli-flag-consistency-checker.lock.yml | 10 +++++----- .../dependency-security-monitor.lock.yml | 14 ++++++------- .github/workflows/doc-maintainer.lock.yml | 14 ++++++------- .../issue-duplication-detector.lock.yml | 16 +++++++-------- .github/workflows/issue-monster.lock.yml | 10 +++++----- .../pelis-agent-factory-advisor.lock.yml | 16 +++++++-------- .github/workflows/plan.lock.yml | 10 +++++----- .github/workflows/security-guard.lock.yml | 14 ++++++------- .github/workflows/security-review.lock.yml | 16 +++++++-------- .github/workflows/smoke-chroot.lock.yml | 16 +++++++-------- .github/workflows/smoke-claude.lock.yml | 20 +++++++++---------- .github/workflows/smoke-codex.lock.yml | 20 +++++++++---------- .github/workflows/smoke-copilot.lock.yml | 16 +++++++-------- .../workflows/test-coverage-improver.lock.yml | 14 ++++++------- .../workflows/update-release-notes.lock.yml | 10 +++++----- 24 files changed, 159 insertions(+), 159 deletions(-) diff --git a/.github/workflows/build-test-bun.lock.yml b/.github/workflows/build-test-bun.lock.yml index 991430fb..5f784525 100644 --- a/.github/workflows/build-test-bun.lock.yml +++ b/.github/workflows/build-test-bun.lock.yml @@ -97,7 +97,7 @@ jobs: with: destination: /opt/gh-aw/actions - name: Checkout .github and .agents folders - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6 with: persist-credentials: false - name: Create gh-aw temp directory @@ -802,7 +802,7 @@ jobs: echo "Agent Conclusion: $AGENT_CONCLUSION" - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -908,13 +908,13 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent artifacts continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-artifacts path: /tmp/gh-aw/threat-detection/ - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/threat-detection/ @@ -1021,7 +1021,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ diff --git a/.github/workflows/build-test-cpp.lock.yml b/.github/workflows/build-test-cpp.lock.yml index 5e7dd3d0..e1ce138e 100644 --- a/.github/workflows/build-test-cpp.lock.yml +++ b/.github/workflows/build-test-cpp.lock.yml @@ -97,7 +97,7 @@ jobs: with: destination: /opt/gh-aw/actions - name: Checkout .github and .agents folders - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6 with: persist-credentials: false - name: Create gh-aw temp directory @@ -802,7 +802,7 @@ jobs: echo "Agent Conclusion: $AGENT_CONCLUSION" - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -908,13 +908,13 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent artifacts continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-artifacts path: /tmp/gh-aw/threat-detection/ - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/threat-detection/ @@ -1021,7 +1021,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ diff --git a/.github/workflows/build-test-deno.lock.yml b/.github/workflows/build-test-deno.lock.yml index 9d03ed69..a972aecc 100644 --- a/.github/workflows/build-test-deno.lock.yml +++ b/.github/workflows/build-test-deno.lock.yml @@ -97,7 +97,7 @@ jobs: with: destination: /opt/gh-aw/actions - name: Checkout .github and .agents folders - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6 with: persist-credentials: false - name: Create gh-aw temp directory @@ -802,7 +802,7 @@ jobs: echo "Agent Conclusion: $AGENT_CONCLUSION" - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -908,13 +908,13 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent artifacts continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-artifacts path: /tmp/gh-aw/threat-detection/ - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/threat-detection/ @@ -1021,7 +1021,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ diff --git a/.github/workflows/build-test-go.lock.yml b/.github/workflows/build-test-go.lock.yml index 44d2440d..b5b57499 100644 --- a/.github/workflows/build-test-go.lock.yml +++ b/.github/workflows/build-test-go.lock.yml @@ -97,11 +97,11 @@ jobs: with: destination: /opt/gh-aw/actions - name: Checkout .github and .agents folders - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6 with: persist-credentials: false - name: Setup Go - uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0 + uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0 with: go-version: '1.22' - name: Capture GOROOT for AWF chroot mode @@ -808,7 +808,7 @@ jobs: echo "Agent Conclusion: $AGENT_CONCLUSION" - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -914,13 +914,13 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent artifacts continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-artifacts path: /tmp/gh-aw/threat-detection/ - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/threat-detection/ @@ -1027,7 +1027,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ diff --git a/.github/workflows/build-test-java.lock.yml b/.github/workflows/build-test-java.lock.yml index f8450867..20e2ce0f 100644 --- a/.github/workflows/build-test-java.lock.yml +++ b/.github/workflows/build-test-java.lock.yml @@ -97,11 +97,11 @@ jobs: with: destination: /opt/gh-aw/actions - name: Checkout .github and .agents folders - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6 with: persist-credentials: false - name: Setup Java - uses: actions/setup-java@be666c2fcd27ec809703dec50e508c2fdc7f6654 # v5.2.0 + uses: actions/setup-java@c1e323688fd81a25caa38c78aa6df2d33d3e20d9 # v4.8.0 with: java-version: '21' distribution: temurin @@ -807,7 +807,7 @@ jobs: echo "Agent Conclusion: $AGENT_CONCLUSION" - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -913,13 +913,13 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent artifacts continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-artifacts path: /tmp/gh-aw/threat-detection/ - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/threat-detection/ @@ -1026,7 +1026,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ diff --git a/.github/workflows/build-test-node.lock.yml b/.github/workflows/build-test-node.lock.yml index ea3e01b5..c409d18c 100644 --- a/.github/workflows/build-test-node.lock.yml +++ b/.github/workflows/build-test-node.lock.yml @@ -97,11 +97,11 @@ jobs: with: destination: /opt/gh-aw/actions - name: Checkout .github and .agents folders - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6 with: persist-credentials: false - name: Setup Node.js - uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0 + uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0 with: node-version: '20' package-manager-cache: false @@ -807,7 +807,7 @@ jobs: echo "Agent Conclusion: $AGENT_CONCLUSION" - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -913,13 +913,13 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent artifacts continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-artifacts path: /tmp/gh-aw/threat-detection/ - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/threat-detection/ @@ -1026,7 +1026,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ diff --git a/.github/workflows/build-test-rust.lock.yml b/.github/workflows/build-test-rust.lock.yml index bdd9f0ed..6f2e5a34 100644 --- a/.github/workflows/build-test-rust.lock.yml +++ b/.github/workflows/build-test-rust.lock.yml @@ -97,7 +97,7 @@ jobs: with: destination: /opt/gh-aw/actions - name: Checkout .github and .agents folders - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6 with: persist-credentials: false - name: Create gh-aw temp directory @@ -802,7 +802,7 @@ jobs: echo "Agent Conclusion: $AGENT_CONCLUSION" - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -908,13 +908,13 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent artifacts continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-artifacts path: /tmp/gh-aw/threat-detection/ - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/threat-detection/ @@ -1021,7 +1021,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ diff --git a/.github/workflows/ci-cd-gaps-assessment.lock.yml b/.github/workflows/ci-cd-gaps-assessment.lock.yml index 35bbd456..5343834c 100644 --- a/.github/workflows/ci-cd-gaps-assessment.lock.yml +++ b/.github/workflows/ci-cd-gaps-assessment.lock.yml @@ -97,7 +97,7 @@ jobs: with: destination: /opt/gh-aw/actions - name: Checkout .github and .agents folders - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6 with: sparse-checkout: | .github @@ -752,7 +752,7 @@ jobs: echo "Agent Conclusion: $AGENT_CONCLUSION" - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -858,13 +858,13 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent artifacts continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-artifacts path: /tmp/gh-aw/threat-detection/ - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/threat-detection/ @@ -965,7 +965,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ diff --git a/.github/workflows/ci-doctor.lock.yml b/.github/workflows/ci-doctor.lock.yml index 879c0d9c..a8a08802 100644 --- a/.github/workflows/ci-doctor.lock.yml +++ b/.github/workflows/ci-doctor.lock.yml @@ -129,7 +129,7 @@ jobs: with: destination: /opt/gh-aw/actions - name: Checkout .github and .agents folders - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6 with: sparse-checkout: | .github @@ -142,7 +142,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 + uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 with: key: memory-${{ github.workflow }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -895,7 +895,7 @@ jobs: echo "Agent Conclusion: $AGENT_CONCLUSION" - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1001,13 +1001,13 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent artifacts continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-artifacts path: /tmp/gh-aw/threat-detection/ - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/threat-detection/ @@ -1136,7 +1136,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1172,13 +1172,13 @@ jobs: with: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 continue-on-error: true with: name: cache-memory path: /tmp/gh-aw/cache-memory - name: Save cache-memory to cache (default) - uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 + uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 with: key: memory-${{ github.workflow }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory diff --git a/.github/workflows/cli-flag-consistency-checker.lock.yml b/.github/workflows/cli-flag-consistency-checker.lock.yml index 6f781111..3099da3b 100644 --- a/.github/workflows/cli-flag-consistency-checker.lock.yml +++ b/.github/workflows/cli-flag-consistency-checker.lock.yml @@ -92,7 +92,7 @@ jobs: with: destination: /opt/gh-aw/actions - name: Checkout .github and .agents folders - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6 with: sparse-checkout: | .github @@ -759,7 +759,7 @@ jobs: echo "Agent Conclusion: $AGENT_CONCLUSION" - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -867,13 +867,13 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent artifacts continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-artifacts path: /tmp/gh-aw/threat-detection/ - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/threat-detection/ @@ -978,7 +978,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ diff --git a/.github/workflows/dependency-security-monitor.lock.yml b/.github/workflows/dependency-security-monitor.lock.yml index 725bb319..896e0797 100644 --- a/.github/workflows/dependency-security-monitor.lock.yml +++ b/.github/workflows/dependency-security-monitor.lock.yml @@ -99,7 +99,7 @@ jobs: with: destination: /opt/gh-aw/actions - name: Checkout .github and .agents folders - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6 with: sparse-checkout: | .github @@ -889,7 +889,7 @@ jobs: echo "Agent Conclusion: $AGENT_CONCLUSION" - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1009,13 +1009,13 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent artifacts continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-artifacts path: /tmp/gh-aw/threat-detection/ - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/threat-detection/ @@ -1122,7 +1122,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1133,13 +1133,13 @@ jobs: echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV" - name: Download patch artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-artifacts path: /tmp/gh-aw/ - name: Checkout repository if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'create_pull_request')) - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6 with: token: ${{ github.token }} persist-credentials: false diff --git a/.github/workflows/doc-maintainer.lock.yml b/.github/workflows/doc-maintainer.lock.yml index 69e3a0c4..2693baa3 100644 --- a/.github/workflows/doc-maintainer.lock.yml +++ b/.github/workflows/doc-maintainer.lock.yml @@ -97,7 +97,7 @@ jobs: with: destination: /opt/gh-aw/actions - name: Checkout .github and .agents folders - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6 with: sparse-checkout: | .github @@ -775,7 +775,7 @@ jobs: echo "Agent Conclusion: $AGENT_CONCLUSION" - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -895,13 +895,13 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent artifacts continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-artifacts path: /tmp/gh-aw/threat-detection/ - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/threat-detection/ @@ -1042,7 +1042,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1053,13 +1053,13 @@ jobs: echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV" - name: Download patch artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-artifacts path: /tmp/gh-aw/ - name: Checkout repository if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'create_pull_request')) - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6 with: token: ${{ github.token }} persist-credentials: false diff --git a/.github/workflows/issue-duplication-detector.lock.yml b/.github/workflows/issue-duplication-detector.lock.yml index eab9a93e..fc82dd3d 100644 --- a/.github/workflows/issue-duplication-detector.lock.yml +++ b/.github/workflows/issue-duplication-detector.lock.yml @@ -95,7 +95,7 @@ jobs: with: destination: /opt/gh-aw/actions - name: Checkout .github and .agents folders - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6 with: sparse-checkout: | .github @@ -108,7 +108,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 + uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 with: key: issue-duplication-detector-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -774,7 +774,7 @@ jobs: echo "Agent Conclusion: $AGENT_CONCLUSION" - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -878,13 +878,13 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent artifacts continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-artifacts path: /tmp/gh-aw/threat-detection/ - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/threat-detection/ @@ -1012,7 +1012,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1048,13 +1048,13 @@ jobs: with: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 continue-on-error: true with: name: cache-memory path: /tmp/gh-aw/cache-memory - name: Save cache-memory to cache (default) - uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 + uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 with: key: issue-duplication-detector-${{ github.run_id }} path: /tmp/gh-aw/cache-memory diff --git a/.github/workflows/issue-monster.lock.yml b/.github/workflows/issue-monster.lock.yml index 145cd631..1a87fd43 100644 --- a/.github/workflows/issue-monster.lock.yml +++ b/.github/workflows/issue-monster.lock.yml @@ -104,7 +104,7 @@ jobs: with: destination: /opt/gh-aw/actions - name: Checkout .github and .agents folders - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6 with: sparse-checkout: | .github @@ -812,7 +812,7 @@ jobs: echo "Agent Conclusion: $AGENT_CONCLUSION" - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -920,13 +920,13 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent artifacts continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-artifacts path: /tmp/gh-aw/threat-detection/ - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/threat-detection/ @@ -1084,7 +1084,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ diff --git a/.github/workflows/pelis-agent-factory-advisor.lock.yml b/.github/workflows/pelis-agent-factory-advisor.lock.yml index c9c8c18a..91725801 100644 --- a/.github/workflows/pelis-agent-factory-advisor.lock.yml +++ b/.github/workflows/pelis-agent-factory-advisor.lock.yml @@ -98,7 +98,7 @@ jobs: with: destination: /opt/gh-aw/actions - name: Checkout .github and .agents folders - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6 with: sparse-checkout: | .github @@ -111,7 +111,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 + uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 with: key: memory-${{ github.workflow }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -776,7 +776,7 @@ jobs: echo "Agent Conclusion: $AGENT_CONCLUSION" - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -882,13 +882,13 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent artifacts continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-artifacts path: /tmp/gh-aw/threat-detection/ - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/threat-detection/ @@ -989,7 +989,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1025,13 +1025,13 @@ jobs: with: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 continue-on-error: true with: name: cache-memory path: /tmp/gh-aw/cache-memory - name: Save cache-memory to cache (default) - uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 + uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 with: key: memory-${{ github.workflow }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory diff --git a/.github/workflows/plan.lock.yml b/.github/workflows/plan.lock.yml index 6679ff31..be7770b4 100644 --- a/.github/workflows/plan.lock.yml +++ b/.github/workflows/plan.lock.yml @@ -126,7 +126,7 @@ jobs: with: destination: /opt/gh-aw/actions - name: Checkout .github and .agents folders - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6 with: sparse-checkout: | .github @@ -879,7 +879,7 @@ jobs: echo "Agent Conclusion: $AGENT_CONCLUSION" - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -983,13 +983,13 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent artifacts continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-artifacts path: /tmp/gh-aw/threat-detection/ - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/threat-detection/ @@ -1148,7 +1148,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ diff --git a/.github/workflows/security-guard.lock.yml b/.github/workflows/security-guard.lock.yml index 71a3aa4d..32c5e730 100644 --- a/.github/workflows/security-guard.lock.yml +++ b/.github/workflows/security-guard.lock.yml @@ -94,7 +94,7 @@ jobs: with: destination: /opt/gh-aw/actions - name: Checkout .github and .agents folders - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6 with: sparse-checkout: | .github @@ -135,7 +135,7 @@ jobs: CLAUDE_CODE_OAUTH_TOKEN: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }} ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} - name: Setup Node.js - uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0 + uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0 with: node-version: '24' package-manager-cache: false @@ -795,7 +795,7 @@ jobs: echo "Agent Conclusion: $AGENT_CONCLUSION" - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -899,13 +899,13 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent artifacts continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-artifacts path: /tmp/gh-aw/threat-detection/ - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/threat-detection/ @@ -937,7 +937,7 @@ jobs: CLAUDE_CODE_OAUTH_TOKEN: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }} ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} - name: Setup Node.js - uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0 + uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0 with: node-version: '24' package-manager-cache: false @@ -1026,7 +1026,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ diff --git a/.github/workflows/security-review.lock.yml b/.github/workflows/security-review.lock.yml index aa287fa3..57736c7d 100644 --- a/.github/workflows/security-review.lock.yml +++ b/.github/workflows/security-review.lock.yml @@ -99,7 +99,7 @@ jobs: with: destination: /opt/gh-aw/actions - name: Checkout .github and .agents folders - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6 with: sparse-checkout: | .github @@ -112,7 +112,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 + uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 with: key: memory-${{ github.workflow }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -777,7 +777,7 @@ jobs: echo "Agent Conclusion: $AGENT_CONCLUSION" - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -883,13 +883,13 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent artifacts continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-artifacts path: /tmp/gh-aw/threat-detection/ - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/threat-detection/ @@ -990,7 +990,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1026,13 +1026,13 @@ jobs: with: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 continue-on-error: true with: name: cache-memory path: /tmp/gh-aw/cache-memory - name: Save cache-memory to cache (default) - uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 + uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 with: key: memory-${{ github.workflow }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory diff --git a/.github/workflows/smoke-chroot.lock.yml b/.github/workflows/smoke-chroot.lock.yml index 5edbb80a..6b35f449 100644 --- a/.github/workflows/smoke-chroot.lock.yml +++ b/.github/workflows/smoke-chroot.lock.yml @@ -116,22 +116,22 @@ jobs: with: destination: /opt/gh-aw/actions - name: Checkout .github and .agents folders - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6 with: persist-credentials: false - name: Setup Node.js - uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0 + uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0 with: node-version: '24' package-manager-cache: false - name: Setup Python - uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0 + uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0 with: python-version: '3.12' - name: Create gh-aw temp directory run: bash /opt/gh-aw/actions/create_gh_aw_tmp_dir.sh - name: Setup Go - uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # 0aaccfd150d50ccaeb58ebd88d36e91967a5f35b + uses: actions/setup-go@0aaccfd150d50ccaeb58ebd88d36e91967a5f35b # 0aaccfd150d50ccaeb58ebd88d36e91967a5f35b with: go-version: "1.22" - name: Capture host versions for verification @@ -866,7 +866,7 @@ jobs: echo "Agent Conclusion: $AGENT_CONCLUSION" - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -972,13 +972,13 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent artifacts continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-artifacts path: /tmp/gh-aw/threat-detection/ - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/threat-detection/ @@ -1085,7 +1085,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ diff --git a/.github/workflows/smoke-claude.lock.yml b/.github/workflows/smoke-claude.lock.yml index 68540223..fa12baee 100644 --- a/.github/workflows/smoke-claude.lock.yml +++ b/.github/workflows/smoke-claude.lock.yml @@ -117,7 +117,7 @@ jobs: with: destination: /opt/gh-aw/actions - name: Checkout .github and .agents folders - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6 with: persist-credentials: false - name: Create gh-aw temp directory @@ -126,7 +126,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 + uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 with: key: memory-${{ github.workflow }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -165,7 +165,7 @@ jobs: CLAUDE_CODE_OAUTH_TOKEN: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }} ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} - name: Setup Node.js - uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0 + uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0 with: node-version: '24' package-manager-cache: false @@ -943,7 +943,7 @@ jobs: echo "Agent Conclusion: $AGENT_CONCLUSION" - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1049,13 +1049,13 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent artifacts continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-artifacts path: /tmp/gh-aw/threat-detection/ - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/threat-detection/ @@ -1087,7 +1087,7 @@ jobs: CLAUDE_CODE_OAUTH_TOKEN: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }} ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} - name: Setup Node.js - uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0 + uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0 with: node-version: '24' package-manager-cache: false @@ -1178,7 +1178,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1214,13 +1214,13 @@ jobs: with: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 continue-on-error: true with: name: cache-memory path: /tmp/gh-aw/cache-memory - name: Save cache-memory to cache (default) - uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 + uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 with: key: memory-${{ github.workflow }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory diff --git a/.github/workflows/smoke-codex.lock.yml b/.github/workflows/smoke-codex.lock.yml index 4cc6ab3d..d09b64d9 100644 --- a/.github/workflows/smoke-codex.lock.yml +++ b/.github/workflows/smoke-codex.lock.yml @@ -123,7 +123,7 @@ jobs: with: destination: /opt/gh-aw/actions - name: Checkout .github and .agents folders - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6 with: persist-credentials: false - name: Create gh-aw temp directory @@ -132,7 +132,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 + uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 with: key: memory-${{ github.workflow }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -171,7 +171,7 @@ jobs: CODEX_API_KEY: ${{ secrets.CODEX_API_KEY }} OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} - name: Setup Node.js - uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0 + uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0 with: node-version: '24' package-manager-cache: false @@ -1545,7 +1545,7 @@ jobs: echo "Agent Conclusion: $AGENT_CONCLUSION" - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1651,13 +1651,13 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent artifacts continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-artifacts path: /tmp/gh-aw/threat-detection/ - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/threat-detection/ @@ -1689,7 +1689,7 @@ jobs: CODEX_API_KEY: ${{ secrets.CODEX_API_KEY }} OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} - name: Setup Node.js - uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0 + uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0 with: node-version: '24' package-manager-cache: false @@ -1796,7 +1796,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1832,13 +1832,13 @@ jobs: with: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 continue-on-error: true with: name: cache-memory path: /tmp/gh-aw/cache-memory - name: Save cache-memory to cache (default) - uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 + uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 with: key: memory-${{ github.workflow }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory diff --git a/.github/workflows/smoke-copilot.lock.yml b/.github/workflows/smoke-copilot.lock.yml index b0eaa21d..f03ee1c2 100644 --- a/.github/workflows/smoke-copilot.lock.yml +++ b/.github/workflows/smoke-copilot.lock.yml @@ -114,7 +114,7 @@ jobs: with: destination: /opt/gh-aw/actions - name: Checkout .github and .agents folders - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6 with: persist-credentials: false - name: Create gh-aw temp directory @@ -123,7 +123,7 @@ jobs: - name: Create cache-memory directory run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh - name: Restore cache-memory file share data - uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 + uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 with: key: memory-${{ github.workflow }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory @@ -889,7 +889,7 @@ jobs: echo "Agent Conclusion: $AGENT_CONCLUSION" - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -995,13 +995,13 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent artifacts continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-artifacts path: /tmp/gh-aw/threat-detection/ - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/threat-detection/ @@ -1108,7 +1108,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1144,13 +1144,13 @@ jobs: with: destination: /opt/gh-aw/actions - name: Download cache-memory artifact (default) - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 continue-on-error: true with: name: cache-memory path: /tmp/gh-aw/cache-memory - name: Save cache-memory to cache (default) - uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 + uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 with: key: memory-${{ github.workflow }}-${{ github.run_id }} path: /tmp/gh-aw/cache-memory diff --git a/.github/workflows/test-coverage-improver.lock.yml b/.github/workflows/test-coverage-improver.lock.yml index 57591d7a..a1be1f76 100644 --- a/.github/workflows/test-coverage-improver.lock.yml +++ b/.github/workflows/test-coverage-improver.lock.yml @@ -100,7 +100,7 @@ jobs: with: destination: /opt/gh-aw/actions - name: Checkout .github and .agents folders - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6 with: sparse-checkout: | .github @@ -843,7 +843,7 @@ jobs: echo "Agent Conclusion: $AGENT_CONCLUSION" - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -963,13 +963,13 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent artifacts continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-artifacts path: /tmp/gh-aw/threat-detection/ - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/threat-detection/ @@ -1111,7 +1111,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -1122,13 +1122,13 @@ jobs: echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV" - name: Download patch artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-artifacts path: /tmp/gh-aw/ - name: Checkout repository if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'create_pull_request')) - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6 with: token: ${{ github.token }} persist-credentials: false diff --git a/.github/workflows/update-release-notes.lock.yml b/.github/workflows/update-release-notes.lock.yml index c403db93..6a5d3241 100644 --- a/.github/workflows/update-release-notes.lock.yml +++ b/.github/workflows/update-release-notes.lock.yml @@ -94,7 +94,7 @@ jobs: with: destination: /opt/gh-aw/actions - name: Checkout .github and .agents folders - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6 with: sparse-checkout: | .github @@ -789,7 +789,7 @@ jobs: echo "Agent Conclusion: $AGENT_CONCLUSION" - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ @@ -895,13 +895,13 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent artifacts continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-artifacts path: /tmp/gh-aw/threat-detection/ - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/threat-detection/ @@ -1026,7 +1026,7 @@ jobs: destination: /opt/gh-aw/actions - name: Download agent output artifact continue-on-error: true - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: name: agent-output path: /tmp/gh-aw/safeoutputs/ From 7280ef4342c2d74c19528b48aea048b459137eb8 Mon Sep 17 00:00:00 2001 From: Copilot <198982749+Copilot@users.noreply.github.com> Date: Tue, 10 Feb 2026 11:05:58 -0800 Subject: [PATCH 4/6] feat: add skip-unset mode to one-shot token library with value preview (enabled by default) (#640) * Initial plan * feat: add AWF_ONE_SHOT_SKIP_UNSET flag to skip clearing tokens - Add skip_unset flag controlled by AWF_ONE_SHOT_SKIP_UNSET env var - When enabled, tokens are logged but not cleared after access - Allow multiple reads of tokens in skip-unset mode - Update README with skip-unset mode documentation - Add tests for skip-unset mode in both container and chroot modes Co-authored-by: lpcox <15877973+lpcox@users.noreply.github.com> * test: verify skip-unset functionality works correctly - Built one-shot-token library successfully - Verified normal mode clears tokens after first access - Verified skip-unset mode preserves tokens and logs accesses - Unit tests pass (736/737) - Lint passes with no new errors Co-authored-by: lpcox <15877973+lpcox@users.noreply.github.com> * fix: always strdup result in skip-unset mode for safety - Call strdup() on result in both normal and skip-unset modes - Prevents potential use-after-free if environment is modified - Applies to both first access and subsequent accesses in skip-unset mode - Addresses code review feedback about pointer safety Co-authored-by: lpcox <15877973+lpcox@users.noreply.github.com> * feat: enable skip-unset mode by default Set AWF_ONE_SHOT_SKIP_UNSET=1 in entrypoint.sh to skip variable unsetting while still logging token accesses. This applies to both container mode and chroot mode. Co-authored-by: lpcox <15877973+lpcox@users.noreply.github.com> * feat: add token value preview to log messages Show first 4 characters of token value plus "..." in log messages to provide debugging context while protecting sensitive data. Updates both getenv and secure_getenv interceptors. Co-authored-by: lpcox <15877973+lpcox@users.noreply.github.com> * feat: replace skip-unset with in-memory caching strategy Cache token values in memory on first access, then unset from environment. Subsequent getenv() calls return the cached value, allowing programs to read tokens multiple times while /proc/self/environ is cleared. Remove AWF_ONE_SHOT_SKIP_UNSET - caching makes it unnecessary. Co-authored-by: lpcox <15877973+lpcox@users.noreply.github.com> --------- Co-authored-by: copilot-swe-agent[bot] <198982749+Copilot@users.noreply.github.com> Co-authored-by: lpcox <15877973+lpcox@users.noreply.github.com> --- containers/agent/entrypoint.sh | 3 +- containers/agent/one-shot-token/README.md | 42 +++--- .../agent/one-shot-token/one-shot-token.c | 91 ++++++++---- tests/integration/one-shot-tokens.test.ts | 136 ++++++++++++------ 4 files changed, 186 insertions(+), 86 deletions(-) diff --git a/containers/agent/entrypoint.sh b/containers/agent/entrypoint.sh index 775f832d..77a25f10 100644 --- a/containers/agent/entrypoint.sh +++ b/containers/agent/entrypoint.sh @@ -406,7 +406,8 @@ else # 2. gosu switches to awfuser (drops root privileges) # 3. exec replaces the current process with the user command # - # Enable one-shot token protection to prevent tokens from being read multiple times + # Enable one-shot token protection - tokens are cached in memory and + # unset from the environment so /proc/self/environ is cleared export LD_PRELOAD=/usr/local/lib/one-shot-token.so exec capsh --drop=$CAPS_TO_DROP -- -c "exec gosu awfuser $(printf '%q ' "$@")" fi diff --git a/containers/agent/one-shot-token/README.md b/containers/agent/one-shot-token/README.md index b00f09af..db2d21c0 100644 --- a/containers/agent/one-shot-token/README.md +++ b/containers/agent/one-shot-token/README.md @@ -2,9 +2,9 @@ ## Overview -The one-shot token library is an `LD_PRELOAD` shared library that provides **single-use access** to sensitive environment variables containing GitHub, OpenAI, Anthropic/Claude, and Codex API tokens. When a process reads a protected token via `getenv()`, the library returns the value once and immediately unsets the environment variable, preventing subsequent reads. +The one-shot token library is an `LD_PRELOAD` shared library that provides **cached access** to sensitive environment variables containing GitHub, OpenAI, Anthropic/Claude, and Codex API tokens. When a process reads a protected token via `getenv()`, the library caches the value in memory and immediately unsets the environment variable. Subsequent `getenv()` calls return the cached value, allowing the process to read tokens multiple times while `/proc/self/environ` is cleared. -This protects against malicious code that might attempt to exfiltrate tokens after the legitimate application has already consumed them. +This protects against exfiltration via `/proc/self/environ` inspection while allowing legitimate multi-read access patterns that programs like the Copilot CLI require. ## Configuration @@ -78,7 +78,7 @@ Linux's dynamic linker (`ld.so`) supports an environment variable called `LD_PRE │ Application calls getenv("GITHUB_TOKEN"): │ │ 1. Resolves to one-shot-token.so's getenv() │ │ 2. We check if it's a sensitive token │ -│ 3. If yes: call real getenv(), copy value, unsetenv(), return │ +│ 3. If yes: cache value, unsetenv(), return cached value │ │ 4. If no: pass through to real getenv() │ └─────────────────────────────────────────────────────────────────┘ ``` @@ -100,7 +100,7 @@ Second getenv("GITHUB_TOKEN") call: ┌─────────────┐ ┌──────────────────┐ │ Application │────→│ one-shot-token.so │ │ │ │ │ -│ │←────│ Returns: NULL │ (token already accessed) +│ │←────│ Returns: "ghp_..." │ (from in-memory cache) └─────────────┘ └──────────────────────┘ ``` @@ -118,16 +118,17 @@ When `LD_PRELOAD=/usr/local/lib/one-shot-token.so` is set, the dynamic linker lo We use `dlsym(RTLD_NEXT, "getenv")` to get a pointer to the **next** `getenv` in the symbol search order (libc's implementation). This allows us to: - Call the real `getenv()` to retrieve the actual value -- Return that value to the caller -- Then call `unsetenv()` to remove it from the environment +- Cache the value in an in-memory array +- Call `unsetenv()` to remove it from the environment (clears `/proc/self/environ`) +- Return the cached value to the caller -### 3. State Tracking +### 3. State Tracking and Caching -We maintain an array of flags (`token_accessed[]`) to track which tokens have been read. Once a token is marked as accessed, subsequent calls return `NULL` without consulting the environment. +We maintain an array of flags (`token_accessed[]`) and a parallel cache array (`token_cache[]`). On first access, the token value is cached and the environment variable is unset. Subsequent calls return the cached value directly. ### 4. Memory Management -When we retrieve a token value, we `strdup()` it before calling `unsetenv()`. This is necessary because: +When we retrieve a token value, we `strdup()` it into the cache before calling `unsetenv()`. This is necessary because: - `getenv()` returns a pointer to memory owned by the environment - `unsetenv()` invalidates that pointer - The caller expects a valid string, so we must copy it first @@ -209,9 +210,9 @@ LD_PRELOAD=./one-shot-token.so ./test_getenv Expected output: ``` [one-shot-token] Initialized with 11 default token(s) -[one-shot-token] Token GITHUB_TOKEN accessed and cleared +[one-shot-token] Token GITHUB_TOKEN accessed and cached (value: test...) First read: test-token-12345 -Second read: +Second read: test-token-12345 ``` ### Custom Token Test @@ -236,12 +237,12 @@ LD_PRELOAD=./one-shot-token.so bash -c ' Expected output: ``` [one-shot-token] Initialized with 2 custom token(s) from AWF_ONE_SHOT_TOKENS -[one-shot-token] Token MY_API_KEY accessed and cleared +[one-shot-token] Token MY_API_KEY accessed and cached (value: secr...) First MY_API_KEY: secret-value-123 -Second MY_API_KEY: -[one-shot-token] Token SECRET_TOKEN accessed and cleared +Second MY_API_KEY: secret-value-123 +[one-shot-token] Token SECRET_TOKEN accessed and cached (value: anot...) First SECRET_TOKEN: another-secret -Second SECRET_TOKEN: +Second SECRET_TOKEN: another-secret ``` ### Integration with AWF @@ -263,13 +264,14 @@ Note: The `AWF_ONE_SHOT_TOKENS` variable must be exported before running `awf` s ### What This Protects Against -- **Token reuse by injected code**: If malicious code runs after the legitimate application has read its token, it cannot retrieve the token again -- **Token leakage via environment inspection**: Tools like `printenv` or reading `/proc/self/environ` will not show the token after first access +- **Token leakage via environment inspection**: `/proc/self/environ` and tools like `printenv` (in the same process) will not show the token after first access — the environment variable is unset +- **Token exfiltration via /proc**: Other processes reading `/proc//environ` cannot see the token ### What This Does NOT Protect Against -- **Memory inspection**: The token exists in process memory (as the returned string) +- **Memory inspection**: The token exists in process memory (in the cache array) - **Interception before first read**: If malicious code runs before the legitimate code reads the token, it gets the value +- **In-process getenv() calls**: Since values are cached, any code in the same process can still call `getenv()` and get the cached token - **Static linking**: Programs statically linked with libc bypass LD_PRELOAD - **Direct syscalls**: Code that reads `/proc/self/environ` directly (without getenv) bypasses this protection @@ -279,13 +281,13 @@ This library is one layer in AWF's security model: 1. **Network isolation**: iptables rules redirect traffic through Squid proxy 2. **Domain allowlisting**: Squid blocks requests to non-allowed domains 3. **Capability dropping**: CAP_NET_ADMIN is dropped to prevent iptables modification -4. **One-shot tokens**: This library prevents token reuse +4. **Token environment cleanup**: This library clears tokens from `/proc/self/environ` while caching for legitimate use ## Limitations - **x86_64 Linux only**: The library is compiled for x86_64 Ubuntu - **glibc programs only**: Programs using musl libc or statically linked programs are not affected -- **Single process**: Child processes inherit the LD_PRELOAD but have their own token state (each can read once) +- **Single process**: Child processes inherit the LD_PRELOAD but have their own token state and cache (each starts fresh) ## Files diff --git a/containers/agent/one-shot-token/one-shot-token.c b/containers/agent/one-shot-token/one-shot-token.c index 8c4b6b47..3b8cda82 100644 --- a/containers/agent/one-shot-token/one-shot-token.c +++ b/containers/agent/one-shot-token/one-shot-token.c @@ -2,8 +2,9 @@ * One-Shot Token LD_PRELOAD Library * * Intercepts getenv() calls for sensitive token environment variables. - * On first access, returns the real value and immediately unsets the variable. - * Subsequent calls return NULL, preventing token reuse by malicious code. + * On first access, caches the value in memory and unsets from environment. + * Subsequent calls return the cached value, so the process can read tokens + * multiple times while /proc/self/environ no longer exposes them. * * Configuration: * AWF_ONE_SHOT_TOKENS - Comma-separated list of token names to protect @@ -53,6 +54,11 @@ static int num_tokens = 0; /* Track which tokens have been accessed (one flag per token) */ static int token_accessed[MAX_TOKENS] = {0}; +/* Cached token values - stored on first access so subsequent reads succeed + * even after the variable is unset from the environment. This allows + * /proc/self/environ to be cleaned while the process can still read tokens. */ +static char *token_cache[MAX_TOKENS] = {0}; + /* Mutex for thread safety */ static pthread_mutex_t token_mutex = PTHREAD_MUTEX_INITIALIZER; @@ -199,12 +205,43 @@ static int get_token_index(const char *name) { return -1; } +/** + * Format token value for logging: show first 4 characters + "..." + * Returns a static buffer (not thread-safe for the buffer, but safe for our use case + * since we hold token_mutex when calling this) + */ +static const char *format_token_value(const char *value) { + static char formatted[8]; /* "abcd..." + null terminator */ + + if (value == NULL) { + return "NULL"; + } + + size_t len = strlen(value); + if (len == 0) { + return "(empty)"; + } + + if (len <= 4) { + /* If 4 chars or less, just show it all with ... */ + snprintf(formatted, sizeof(formatted), "%s...", value); + } else { + /* Show first 4 chars + ... */ + snprintf(formatted, sizeof(formatted), "%.4s...", value); + } + + return formatted; +} + /** * Intercepted getenv function * * For sensitive tokens: - * - First call: returns the real value, then unsets the variable - * - Subsequent calls: returns NULL + * - First call: caches the value, unsets from environment, returns cached value + * - Subsequent calls: returns the cached value from memory + * + * This clears tokens from /proc/self/environ while allowing the process + * to read them multiple times via getenv(). * * For all other variables: passes through to real getenv */ @@ -226,30 +263,33 @@ char *getenv(const char *name) { return real_getenv(name); } - /* Sensitive token - handle one-shot access (mutex already held) */ + /* Sensitive token - handle cached access (mutex already held) */ char *result = NULL; if (!token_accessed[token_idx]) { - /* First access - get the real value */ + /* First access - get the real value and cache it */ result = real_getenv(name); if (result != NULL) { - /* Make a copy since unsetenv will invalidate the pointer */ + /* Cache the value so subsequent reads succeed after unsetenv */ /* Note: This memory is intentionally never freed - it must persist - * for the lifetime of the caller's use of the returned pointer */ - result = strdup(result); + * for the lifetime of the process */ + token_cache[token_idx] = strdup(result); - /* Unset the variable so it can't be accessed again */ + /* Unset the variable from the environment so /proc/self/environ is cleared */ unsetenv(name); - fprintf(stderr, "[one-shot-token] Token %s accessed and cleared\n", name); + fprintf(stderr, "[one-shot-token] Token %s accessed and cached (value: %s)\n", + name, format_token_value(token_cache[token_idx])); + + result = token_cache[token_idx]; } /* Mark as accessed even if NULL (prevents repeated log messages) */ token_accessed[token_idx] = 1; } else { - /* Already accessed - return NULL */ - result = NULL; + /* Already accessed - return cached value */ + result = token_cache[token_idx]; } pthread_mutex_unlock(&token_mutex); @@ -261,11 +301,11 @@ char *getenv(const char *name) { * Intercepted secure_getenv function * * This function preserves secure_getenv semantics (returns NULL in privileged contexts) - * while applying the same one-shot token protection as getenv. + * while applying the same cached token protection as getenv. * * For sensitive tokens: - * - First call: returns the real value (if not in privileged context), then unsets the variable - * - Subsequent calls: returns NULL + * - First call: caches the value, unsets from environment, returns cached value + * - Subsequent calls: returns the cached value from memory * * For all other variables: passes through to real secure_getenv (or getenv if unavailable) */ @@ -285,7 +325,7 @@ char *secure_getenv(const char *name) { return real_secure_getenv(name); } - /* Sensitive token - handle one-shot access with secure_getenv semantics */ + /* Sensitive token - handle cached access with secure_getenv semantics */ pthread_mutex_lock(&token_mutex); char *result = NULL; @@ -295,22 +335,25 @@ char *secure_getenv(const char *name) { result = real_secure_getenv(name); if (result != NULL) { - /* Make a copy since unsetenv will invalidate the pointer */ + /* Cache the value so subsequent reads succeed after unsetenv */ /* Note: This memory is intentionally never freed - it must persist - * for the lifetime of the caller's use of the returned pointer */ - result = strdup(result); + * for the lifetime of the process */ + token_cache[token_idx] = strdup(result); - /* Unset the variable so it can't be accessed again */ + /* Unset the variable from the environment so /proc/self/environ is cleared */ unsetenv(name); - fprintf(stderr, "[one-shot-token] Token %s accessed and cleared (via secure_getenv)\n", name); + fprintf(stderr, "[one-shot-token] Token %s accessed and cached (value: %s) (via secure_getenv)\n", + name, format_token_value(token_cache[token_idx])); + + result = token_cache[token_idx]; } /* Mark as accessed even if NULL (prevents repeated log messages) */ token_accessed[token_idx] = 1; } else { - /* Already accessed - return NULL */ - result = NULL; + /* Already accessed - return cached value */ + result = token_cache[token_idx]; } pthread_mutex_unlock(&token_mutex); diff --git a/tests/integration/one-shot-tokens.test.ts b/tests/integration/one-shot-tokens.test.ts index b5af984b..c955157f 100644 --- a/tests/integration/one-shot-tokens.test.ts +++ b/tests/integration/one-shot-tokens.test.ts @@ -1,21 +1,31 @@ /** * One-Shot Token Tests * - * These tests verify the LD_PRELOAD one-shot token library that prevents - * sensitive environment variables from being read multiple times. + * These tests verify the LD_PRELOAD one-shot token library that protects + * sensitive environment variables by caching values and clearing them + * from the environment. * - * The library intercepts getenv() calls for tokens like GITHUB_TOKEN and - * returns the value once, then unsets the variable to prevent malicious - * code from exfiltrating tokens after legitimate use. + * The library intercepts getenv() calls for tokens like GITHUB_TOKEN. + * On first access, it caches the value in memory and unsets the variable + * from the environment (clearing /proc/self/environ). Subsequent getenv() + * calls return the cached value, allowing programs to read tokens multiple + * times while the environment is cleaned. * * Tests verify: * - First read succeeds and returns the token value - * - Second read returns empty/null (token has been cleared) + * - Second read returns the cached value (within same process) + * - Tokens are unset from the environment (/proc/self/environ is cleared) * - Behavior works in both container mode and chroot mode * * IMPORTANT: These tests require buildLocal: true because the one-shot-token * library is compiled during the Docker image build. Pre-built images from GHCR * may not include this feature if they were built before PR #604 was merged. + * + * Note on shell tests: `printenv` forks a new process each time, so each + * invocation gets a fresh LD_PRELOAD library instance. The parent bash + * process environment is unaffected by child unsetenv() calls, so both + * `printenv` reads succeed. The caching is most relevant for programs that + * call getenv() multiple times within the same process (e.g., Python, Node.js). */ /// @@ -37,8 +47,9 @@ describe('One-Shot Token Protection', () => { }); describe('Container Mode', () => { - test('should allow GITHUB_TOKEN to be read once, then clear it', async () => { - // Create a test script that reads the token twice + test('should cache GITHUB_TOKEN and clear from environment', async () => { + // printenv forks a new process each time, so both reads succeed + // (parent bash environ unaffected by child unsetenv) const testScript = ` FIRST_READ=$(printenv GITHUB_TOKEN) SECOND_READ=$(printenv GITHUB_TOKEN) @@ -60,15 +71,14 @@ describe('One-Shot Token Protection', () => { ); expect(result).toSucceed(); - // First read should have the token + // Both reads succeed (each printenv is a separate process) expect(result.stdout).toContain('First read: [ghp_test_token_12345]'); - // Second read should be empty (token has been cleared) - expect(result.stdout).toContain('Second read: []'); - // Verify the one-shot-token library logged the token access - expect(result.stderr).toContain('[one-shot-token] Token GITHUB_TOKEN accessed and cleared'); + expect(result.stdout).toContain('Second read: [ghp_test_token_12345]'); + // Verify the one-shot-token library logged the token access with value preview + expect(result.stderr).toContain('[one-shot-token] Token GITHUB_TOKEN accessed and cached (value: ghp_...)'); }, 120000); - test('should allow COPILOT_GITHUB_TOKEN to be read once, then clear it', async () => { + test('should cache COPILOT_GITHUB_TOKEN and clear from environment', async () => { const testScript = ` FIRST_READ=$(printenv COPILOT_GITHUB_TOKEN) SECOND_READ=$(printenv COPILOT_GITHUB_TOKEN) @@ -91,11 +101,11 @@ describe('One-Shot Token Protection', () => { expect(result).toSucceed(); expect(result.stdout).toContain('First read: [copilot_test_token_67890]'); - expect(result.stdout).toContain('Second read: []'); - expect(result.stderr).toContain('[one-shot-token] Token COPILOT_GITHUB_TOKEN accessed and cleared'); + expect(result.stdout).toContain('Second read: [copilot_test_token_67890]'); + expect(result.stderr).toContain('[one-shot-token] Token COPILOT_GITHUB_TOKEN accessed and cached (value: copi...)'); }, 120000); - test('should allow OPENAI_API_KEY to be read once, then clear it', async () => { + test('should cache OPENAI_API_KEY and clear from environment', async () => { const testScript = ` FIRST_READ=$(printenv OPENAI_API_KEY) SECOND_READ=$(printenv OPENAI_API_KEY) @@ -118,8 +128,8 @@ describe('One-Shot Token Protection', () => { expect(result).toSucceed(); expect(result.stdout).toContain('First read: [sk-test-openai-key]'); - expect(result.stdout).toContain('Second read: []'); - expect(result.stderr).toContain('[one-shot-token] Token OPENAI_API_KEY accessed and cleared'); + expect(result.stdout).toContain('Second read: [sk-test-openai-key]'); + expect(result.stderr).toContain('[one-shot-token] Token OPENAI_API_KEY accessed and cached (value: sk-t...)'); }, 120000); test('should handle multiple different tokens independently', async () => { @@ -153,11 +163,11 @@ describe('One-Shot Token Protection', () => { ); expect(result).toSucceed(); - // Each token should be readable once + // Both reads for each token should succeed (printenv is separate process) expect(result.stdout).toContain('GitHub first: [ghp_multi_token_1]'); - expect(result.stdout).toContain('GitHub second: []'); + expect(result.stdout).toContain('GitHub second: [ghp_multi_token_1]'); expect(result.stdout).toContain('OpenAI first: [sk-multi-key-2]'); - expect(result.stdout).toContain('OpenAI second: []'); + expect(result.stdout).toContain('OpenAI second: [sk-multi-key-2]'); }, 120000); test('should not interfere with non-sensitive environment variables', async () => { @@ -193,14 +203,14 @@ describe('One-Shot Token Protection', () => { expect(result.stderr).not.toContain('[one-shot-token] Token NORMAL_VAR'); }, 120000); - test('should work with programmatic getenv() calls', async () => { + test('should return cached value on subsequent getenv() calls in same process', async () => { // Use Python to call getenv() directly (not through shell) - // This tests that the LD_PRELOAD library properly intercepts C library calls + // This tests that the LD_PRELOAD library caches values for same-process reads const pythonScript = ` import os -# First call to os.getenv calls C's getenv() +# First call to os.getenv calls C's getenv() - caches and clears from environ first = os.getenv('GITHUB_TOKEN', '') -# Second call should return None/empty because token was cleared +# Second call returns the cached value second = os.getenv('GITHUB_TOKEN', '') print(f"First: [{first}]") print(f"Second: [{second}]") @@ -220,14 +230,57 @@ print(f"Second: [{second}]") ); expect(result).toSucceed(); + // Both reads should succeed (second read returns cached value) expect(result.stdout).toContain('First: [ghp_python_test_token]'); - expect(result.stdout).toContain('Second: []'); - expect(result.stderr).toContain('[one-shot-token] Token GITHUB_TOKEN accessed and cleared'); + expect(result.stdout).toContain('Second: [ghp_python_test_token]'); + expect(result.stderr).toContain('[one-shot-token] Token GITHUB_TOKEN accessed and cached (value: ghp_...)'); + }, 120000); + + test('should clear token from /proc/self/environ while caching for getenv()', async () => { + // Verify that the token is removed from the environ array + // but still accessible via getenv() (from cache) + const pythonScript = ` +import os +import ctypes + +# First access caches and clears from environ +first = os.getenv('GITHUB_TOKEN', '') + +# Check if token is still in os.environ (reflects C environ array) +# After unsetenv, it should be gone from the environ array +in_environ = 'GITHUB_TOKEN' in os.environ + +# But getenv() should still return cached value +second = os.getenv('GITHUB_TOKEN', '') + +print(f"First getenv: [{first}]") +print(f"In os.environ: [{in_environ}]") +print(f"Second getenv: [{second}]") + `.trim(); + + const result = await runner.runWithSudo( + `python3 -c '${pythonScript}'`, + { + allowDomains: ['localhost'], + logLevel: 'debug', + timeout: 60000, + buildLocal: true, + env: { + GITHUB_TOKEN: 'ghp_environ_check', + }, + } + ); + + expect(result).toSucceed(); + expect(result.stdout).toContain('First getenv: [ghp_environ_check]'); + // Note: Python's os.environ may cache at startup, so this checks the + // behavior of getenv() returning cached values + expect(result.stdout).toContain('Second getenv: [ghp_environ_check]'); }, 120000); }); describe('Chroot Mode', () => { - test('should allow GITHUB_TOKEN to be read once in chroot mode', async () => { + test('should cache GITHUB_TOKEN in chroot mode', async () => { const testScript = ` FIRST_READ=$(printenv GITHUB_TOKEN) SECOND_READ=$(printenv GITHUB_TOKEN) @@ -251,14 +304,14 @@ print(f"Second: [{second}]") expect(result).toSucceed(); expect(result.stdout).toContain('First read: [ghp_chroot_token_12345]'); - expect(result.stdout).toContain('Second read: []'); + expect(result.stdout).toContain('Second read: [ghp_chroot_token_12345]'); // Verify the library was copied to the chroot expect(result.stderr).toContain('One-shot token library copied to chroot'); - // Verify the one-shot-token library logged the token access - expect(result.stderr).toContain('[one-shot-token] Token GITHUB_TOKEN accessed and cleared'); + // Verify the one-shot-token library logged the token access with value preview + expect(result.stderr).toContain('[one-shot-token] Token GITHUB_TOKEN accessed and cached (value: ghp_...)'); }, 120000); - test('should allow COPILOT_GITHUB_TOKEN to be read once in chroot mode', async () => { + test('should cache COPILOT_GITHUB_TOKEN in chroot mode', async () => { const testScript = ` FIRST_READ=$(printenv COPILOT_GITHUB_TOKEN) SECOND_READ=$(printenv COPILOT_GITHUB_TOKEN) @@ -282,11 +335,11 @@ print(f"Second: [{second}]") expect(result).toSucceed(); expect(result.stdout).toContain('First read: [copilot_chroot_token_67890]'); - expect(result.stdout).toContain('Second read: []'); - expect(result.stderr).toContain('[one-shot-token] Token COPILOT_GITHUB_TOKEN accessed and cleared'); + expect(result.stdout).toContain('Second read: [copilot_chroot_token_67890]'); + expect(result.stderr).toContain('[one-shot-token] Token COPILOT_GITHUB_TOKEN accessed and cached (value: copi...)'); }, 120000); - test('should work with programmatic getenv() calls in chroot mode', async () => { + test('should return cached value on subsequent getenv() in chroot mode', async () => { const pythonScript = ` import os first = os.getenv('GITHUB_TOKEN', '') @@ -311,8 +364,8 @@ print(f"Second: [{second}]") expect(result).toSucceed(); expect(result.stdout).toContain('First: [ghp_chroot_python_token]'); - expect(result.stdout).toContain('Second: []'); - expect(result.stderr).toContain('[one-shot-token] Token GITHUB_TOKEN accessed and cleared'); + expect(result.stdout).toContain('Second: [ghp_chroot_python_token]'); + expect(result.stderr).toContain('[one-shot-token] Token GITHUB_TOKEN accessed and cached (value: ghp_...)'); }, 120000); test('should not interfere with non-sensitive variables in chroot mode', async () => { @@ -375,9 +428,9 @@ print(f"Second: [{second}]") expect(result).toSucceed(); expect(result.stdout).toContain('GitHub first: [ghp_chroot_multi_1]'); - expect(result.stdout).toContain('GitHub second: []'); + expect(result.stdout).toContain('GitHub second: [ghp_chroot_multi_1]'); expect(result.stdout).toContain('OpenAI first: [sk-chroot-multi-2]'); - expect(result.stdout).toContain('OpenAI second: []'); + expect(result.stdout).toContain('OpenAI second: [sk-chroot-multi-2]'); }, 120000); }); @@ -456,7 +509,8 @@ print(f"Second: [{second}]") expect(result).toSucceed(); expect(result.stdout).toContain('First: [ghp_test-with-special_chars@#$%]'); - expect(result.stdout).toContain('Second: []'); + expect(result.stdout).toContain('Second: [ghp_test-with-special_chars@#$%]'); }, 120000); }); + }); From c3757437d1eebc593f017287e84fbf297d6d837d Mon Sep 17 00:00:00 2001 From: "Jiaxiao (mossaka) Zhou" Date: Tue, 10 Feb 2026 20:38:01 +0000 Subject: [PATCH 5/6] fix: use mkdtempSync for chroot-hosts to address CodeQL CWE-377 Build complete chroot-hosts content in memory, then write atomically to a securely-created temp directory (fs.mkdtempSync). This satisfies CodeQL's js/insecure-temporary-file rule by using the recognized sanitizer for temp file creation. Co-Authored-By: Claude Opus 4.6 (1M context) --- src/docker-manager.test.ts | 31 +++++++++++++++++++++---------- src/docker-manager.ts | 22 +++++++++++++--------- 2 files changed, 34 insertions(+), 19 deletions(-) diff --git a/src/docker-manager.test.ts b/src/docker-manager.test.ts index 8bf02a7e..e76db0a8 100644 --- a/src/docker-manager.test.ts +++ b/src/docker-manager.test.ts @@ -568,10 +568,10 @@ describe('docker-manager', () => { expect(volumes).toContain('/etc/ca-certificates:/host/etc/ca-certificates:ro'); expect(volumes).toContain('/etc/alternatives:/host/etc/alternatives:ro'); expect(volumes).toContain('/etc/ld.so.cache:/host/etc/ld.so.cache:ro'); - // /etc/hosts is now always a custom chroot-hosts file in chroot mode (for pre-resolved domains) + // /etc/hosts is always a custom hosts file in a secure chroot temp dir (for pre-resolved domains) const hostsVolume = volumes.find((v: string) => v.includes('/host/etc/hosts')); expect(hostsVolume).toBeDefined(); - expect(hostsVolume).toContain('chroot-hosts:/host/etc/hosts:ro'); + expect(hostsVolume).toMatch(/chroot-.*\/hosts:\/host\/etc\/hosts:ro/); // Should still include essential mounts expect(volumes).toContain('/tmp:/tmp:rw'); @@ -799,7 +799,7 @@ describe('docker-manager', () => { // Should mount a read-only copy of /etc/hosts with host.docker.internal pre-injected const hostsVolume = volumes.find((v: string) => v.includes('/host/etc/hosts')); expect(hostsVolume).toBeDefined(); - expect(hostsVolume).toContain('chroot-hosts:/host/etc/hosts:ro'); + expect(hostsVolume).toMatch(/chroot-.*\/hosts:\/host\/etc\/hosts:ro/); }); it('should inject host.docker.internal into chroot-hosts file', () => { @@ -810,8 +810,10 @@ describe('docker-manager', () => { }; generateDockerCompose(config, mockNetworkConfig); - // The chroot-hosts file should exist and contain host.docker.internal - const chrootHostsPath = `${mockConfig.workDir}/chroot-hosts`; + // Find the chroot hosts file (mkdtempSync creates chroot-XXXXXX directory) + const chrootDir = fs.readdirSync(mockConfig.workDir).find(d => d.startsWith('chroot-')); + expect(chrootDir).toBeDefined(); + const chrootHostsPath = `${mockConfig.workDir}/${chrootDir}/hosts`; expect(fs.existsSync(chrootHostsPath)).toBe(true); const content = fs.readFileSync(chrootHostsPath, 'utf8'); // Docker bridge gateway resolution may succeed or fail in test env, @@ -829,10 +831,10 @@ describe('docker-manager', () => { const agent = result.services.agent; const volumes = agent.volumes as string[]; - // Should mount a custom chroot-hosts file (for pre-resolved domains) + // Should mount a custom hosts file in a secure chroot temp dir (for pre-resolved domains) const hostsVolume = volumes.find((v: string) => v.includes('/host/etc/hosts')); expect(hostsVolume).toBeDefined(); - expect(hostsVolume).toContain('chroot-hosts:/host/etc/hosts:ro'); + expect(hostsVolume).toMatch(/chroot-.*\/hosts:\/host\/etc\/hosts:ro/); }); it('should pre-resolve allowed domains into chroot-hosts file', () => { @@ -859,7 +861,10 @@ describe('docker-manager', () => { }; generateDockerCompose(config, mockNetworkConfig); - const chrootHostsPath = `${mockConfig.workDir}/chroot-hosts`; + // Find the chroot hosts file (mkdtempSync creates chroot-XXXXXX directory) + const chrootDir = fs.readdirSync(mockConfig.workDir).find(d => d.startsWith('chroot-')); + expect(chrootDir).toBeDefined(); + const chrootHostsPath = `${mockConfig.workDir}/${chrootDir}/hosts`; expect(fs.existsSync(chrootHostsPath)).toBe(true); const content = fs.readFileSync(chrootHostsPath, 'utf8'); @@ -887,7 +892,10 @@ describe('docker-manager', () => { // Should not throw even if resolution fails generateDockerCompose(config, mockNetworkConfig); - const chrootHostsPath = `${mockConfig.workDir}/chroot-hosts`; + // Find the chroot hosts file (mkdtempSync creates chroot-XXXXXX directory) + const chrootDir = fs.readdirSync(mockConfig.workDir).find(d => d.startsWith('chroot-')); + expect(chrootDir).toBeDefined(); + const chrootHostsPath = `${mockConfig.workDir}/${chrootDir}/hosts`; expect(fs.existsSync(chrootHostsPath)).toBe(true); const content = fs.readFileSync(chrootHostsPath, 'utf8'); @@ -916,7 +924,10 @@ describe('docker-manager', () => { }; generateDockerCompose(config, mockNetworkConfig); - const chrootHostsPath = `${mockConfig.workDir}/chroot-hosts`; + // Find the chroot hosts file (mkdtempSync creates chroot-XXXXXX directory) + const chrootDir = fs.readdirSync(mockConfig.workDir).find(d => d.startsWith('chroot-')); + expect(chrootDir).toBeDefined(); + const chrootHostsPath = `${mockConfig.workDir}/${chrootDir}/hosts`; const content = fs.readFileSync(chrootHostsPath, 'utf8'); // Count occurrences of 'localhost' - should only be the original entries, not duplicated diff --git a/src/docker-manager.ts b/src/docker-manager.ts index 2d657cd5..155a092b 100644 --- a/src/docker-manager.ts +++ b/src/docker-manager.ts @@ -500,18 +500,19 @@ export function generateDockerCompose( // 1. Pre-resolve allowed domains using the host's DNS stack (supports Tailscale MagicDNS, // split DNS, and other custom resolvers not available inside the container) // 2. Inject host.docker.internal when --enable-host-access is set - const chrootHostsPath = path.join(config.workDir, 'chroot-hosts'); + // Build complete chroot hosts file content in memory, then write atomically + // to a securely-created temp directory (mkdtempSync) to satisfy CWE-377. + let hostsContent = '127.0.0.1 localhost\n'; try { - fs.copyFileSync('/etc/hosts', chrootHostsPath); + hostsContent = fs.readFileSync('/etc/hosts', 'utf-8'); } catch { - fs.writeFileSync(chrootHostsPath, '127.0.0.1 localhost\n'); + // /etc/hosts not readable, use minimal fallback } - // Pre-resolve allowed domains on the host and inject into /etc/hosts + // Pre-resolve allowed domains on the host and append to hosts content. // This is critical for domains that rely on custom DNS (e.g., Tailscale MagicDNS // at 100.100.100.100) which is unreachable from inside the Docker container's // network namespace. Resolution runs on the host where all DNS resolvers are available. - const hostsContent = fs.readFileSync(chrootHostsPath, 'utf-8'); for (const domain of config.allowedDomains) { // Skip patterns that aren't resolvable hostnames if (domain.startsWith('*.') || domain.startsWith('.') || domain.includes('*')) continue; @@ -523,7 +524,7 @@ export function generateDockerCompose( const parts = stdout.trim().split(/\s+/); const ip = parts[0]; if (ip) { - fs.appendFileSync(chrootHostsPath, `${ip}\t${domain}\n`); + hostsContent += `${ip}\t${domain}\n`; logger.debug(`Pre-resolved ${domain} -> ${ip} for chroot /etc/hosts`); } } catch { @@ -532,7 +533,7 @@ export function generateDockerCompose( } } - // Add host.docker.internal when host access is enabled + // Add host.docker.internal when host access is enabled. // Docker only adds this to the container's /etc/hosts via extra_hosts, but the // chroot uses the host's /etc/hosts which lacks this entry. MCP servers need it // to connect to the MCP gateway running on the host. @@ -544,7 +545,7 @@ export function generateDockerCompose( ]); const hostGatewayIp = stdout.trim(); if (hostGatewayIp) { - fs.appendFileSync(chrootHostsPath, `${hostGatewayIp}\thost.docker.internal\n`); + hostsContent += `${hostGatewayIp}\thost.docker.internal\n`; logger.debug(`Added host.docker.internal (${hostGatewayIp}) to chroot-hosts`); } } catch (err) { @@ -552,7 +553,10 @@ export function generateDockerCompose( } } - fs.chmodSync(chrootHostsPath, 0o644); + // Write to a securely-created directory (mkdtempSync satisfies CWE-377) + const chrootHostsDir = fs.mkdtempSync(path.join(config.workDir, 'chroot-')); + const chrootHostsPath = path.join(chrootHostsDir, 'hosts'); + fs.writeFileSync(chrootHostsPath, hostsContent, { mode: 0o644 }); agentVolumes.push(`${chrootHostsPath}:/host/etc/hosts:ro`); // SECURITY: Hide Docker socket to prevent firewall bypass via 'docker run' From 0c6cdafed09715070340f0937c35b704418d36f7 Mon Sep 17 00:00:00 2001 From: "Jiaxiao (mossaka) Zhou" Date: Tue, 10 Feb 2026 20:52:12 +0000 Subject: [PATCH 6/6] chore: recompile build-test-java workflow after merge Co-Authored-By: Claude Opus 4.6 (1M context) --- .github/workflows/build-test-java.lock.yml | 33 +++++++++++++++++----- 1 file changed, 26 insertions(+), 7 deletions(-) diff --git a/.github/workflows/build-test-java.lock.yml b/.github/workflows/build-test-java.lock.yml index a32d78aa..20e2ce0f 100644 --- a/.github/workflows/build-test-java.lock.yml +++ b/.github/workflows/build-test-java.lock.yml @@ -99,10 +99,6 @@ jobs: - name: Checkout .github and .agents folders uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6 with: - sparse-checkout: | - .github - .agents - depth: 1 persist-credentials: false - name: Setup Java uses: actions/setup-java@c1e323688fd81a25caa38c78aa6df2d33d3e20d9 # v4.8.0 @@ -143,8 +139,31 @@ jobs: COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }} - name: Install GitHub Copilot CLI run: /opt/gh-aw/actions/install_copilot_cli.sh 0.0.405 - - name: Install awf binary - run: bash /opt/gh-aw/actions/install_awf_binary.sh v0.13.12 + - name: Install awf dependencies + run: npm ci + - name: Build awf + run: npm run build + - name: Install awf binary (local) + run: | + WORKSPACE_PATH="${GITHUB_WORKSPACE:-$(pwd)}" + NODE_BIN="$(command -v node)" + if [ ! -d "$WORKSPACE_PATH" ]; then + echo "Workspace path not found: $WORKSPACE_PATH" + exit 1 + fi + if [ ! -x "$NODE_BIN" ]; then + echo "Node binary not found: $NODE_BIN" + exit 1 + fi + if [ ! -d "/usr/local/bin" ]; then + echo "/usr/local/bin is missing" + exit 1 + fi + sudo tee /usr/local/bin/awf > /dev/null <&1 | tee /tmp/gh-aw/agent-stdio.log env: