From 007421f82ec98ceec34984f63bbdf8c62baf4f89 Mon Sep 17 00:00:00 2001 From: 35C4n0r Date: Mon, 12 Jan 2026 13:00:42 +0000 Subject: [PATCH 01/13] feat: add AI Bridge configuration support. --- registry/coder-labs/modules/codex/main.tf | 26 +++++++++++++++++++ .../modules/codex/scripts/install.sh | 11 ++++++++ 2 files changed, 37 insertions(+) diff --git a/registry/coder-labs/modules/codex/main.tf b/registry/coder-labs/modules/codex/main.tf index 203518395..93341d713 100644 --- a/registry/coder-labs/modules/codex/main.tf +++ b/registry/coder-labs/modules/codex/main.tf @@ -71,6 +71,12 @@ variable "cli_app_display_name" { default = "Codex CLI" } +variable "enable_coder_aibridge" { + type = bool + description = "Use AI Bridge for Codex. https://coder.com/docs/ai-coder/ai-bridge" + default = false +} + variable "install_codex" { type = bool description = "Whether to install Codex." @@ -155,12 +161,31 @@ resource "coder_env" "openai_api_key" { value = var.openai_api_key } +resource "coder_env" "coder_aibridge_session_token" { + count = var.enable_coder_aibridge ? 1 : 0 + agent_id = var.agent_id + name = "CODER_AIBRIDGE_SESSION_TOKEN" + value = data.coder_workspace_owner.me.session_token +} + locals { workdir = trimsuffix(var.workdir, "/") app_slug = "codex" install_script = file("${path.module}/scripts/install.sh") start_script = file("${path.module}/scripts/start.sh") module_dir_name = ".codex-module" + aibridge_config = <<-EOF + [model_providers.aibridge] + name = "AI Bridge" + base_url = "${data.coder_workspace.me.access_url}/api/v2/aibridge/openai/v1" + env_key = "CODER_AIBRIDGE_SESSION_TOKEN" # can be injected as `coder_env` with value `data.coder_workspace_owner.me.session_token` + wire_api = "responses" + + [profiles.aibridge] + model_provider = "aibridge" + model = "${var.codex_model}" + model_reasoning_effort = "medium" # this can also be extracted as a module input + EOF } module "agentapi" { @@ -211,6 +236,7 @@ module "agentapi" { ARG_INSTALL='${var.install_codex}' \ ARG_CODEX_VERSION='${var.codex_version}' \ ARG_BASE_CONFIG_TOML='${base64encode(var.base_config_toml)}' \ + ARG_AIBRIDGE_CONFIG='${base64encode(var.enable_coder_aibridge ? local.aibridge_config : "")}' \ ARG_ADDITIONAL_MCP_SERVERS='${base64encode(var.additional_mcp_servers)}' \ ARG_CODER_MCP_APP_STATUS_SLUG='${local.app_slug}' \ ARG_CODEX_START_DIRECTORY='${local.workdir}' \ diff --git a/registry/coder-labs/modules/codex/scripts/install.sh b/registry/coder-labs/modules/codex/scripts/install.sh index 62842165e..09e40a581 100644 --- a/registry/coder-labs/modules/codex/scripts/install.sh +++ b/registry/coder-labs/modules/codex/scripts/install.sh @@ -13,6 +13,7 @@ set -o nounset ARG_BASE_CONFIG_TOML=$(echo -n "$ARG_BASE_CONFIG_TOML" | base64 -d) ARG_ADDITIONAL_MCP_SERVERS=$(echo -n "$ARG_ADDITIONAL_MCP_SERVERS" | base64 -d) ARG_CODEX_INSTRUCTION_PROMPT=$(echo -n "$ARG_CODEX_INSTRUCTION_PROMPT" | base64 -d) +ARG_AIBRIDGE_CONFIG=$(echo -n "$ARG_AIBRIDGE_CONFIG" | base64 -d) echo "=== Codex Module Configuration ===" printf "Install Codex: %s\n" "$ARG_INSTALL" @@ -127,6 +128,15 @@ EOF fi } +append_aibridge_config_section() { + local config_path="$1" + + if [ -n "$ARG_AIBRIDGE_CONFIG" ]; then + printf "Adding AI Bridge configuration\n" + echo -e "\n# AI Bridge Configuration\n$ARG_AIBRIDGE_CONFIG" >> "$config_path" + fi +} + function populate_config_toml() { CONFIG_PATH="$HOME/.codex/config.toml" mkdir -p "$(dirname "$CONFIG_PATH")" @@ -140,6 +150,7 @@ function populate_config_toml() { fi append_mcp_servers_section "$CONFIG_PATH" + append_aibridge_config_section "$CONFIG_PATH" } function add_instruction_prompt_if_exists() { From 667965ee5fa1808a8be80aa1a2b459b8a1c2a899 Mon Sep 17 00:00:00 2001 From: 35C4n0r Date: Mon, 12 Jan 2026 13:16:53 +0000 Subject: [PATCH 02/13] wip --- registry/coder-labs/modules/codex/main.tf | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/registry/coder-labs/modules/codex/main.tf b/registry/coder-labs/modules/codex/main.tf index 93341d713..d87951e5d 100644 --- a/registry/coder-labs/modules/codex/main.tf +++ b/registry/coder-labs/modules/codex/main.tf @@ -178,13 +178,13 @@ locals { [model_providers.aibridge] name = "AI Bridge" base_url = "${data.coder_workspace.me.access_url}/api/v2/aibridge/openai/v1" - env_key = "CODER_AIBRIDGE_SESSION_TOKEN" # can be injected as `coder_env` with value `data.coder_workspace_owner.me.session_token` + env_key = "CODER_AIBRIDGE_SESSION_TOKEN" wire_api = "responses" [profiles.aibridge] model_provider = "aibridge" model = "${var.codex_model}" - model_reasoning_effort = "medium" # this can also be extracted as a module input + model_reasoning_effort = "medium" EOF } From 5eecdbbc69829e476a862bb2b606622a9c7235ff Mon Sep 17 00:00:00 2001 From: 35C4n0r Date: Mon, 12 Jan 2026 13:41:56 +0000 Subject: [PATCH 03/13] feat: add support for enabling Coder AI Bridge in configuration --- registry/coder-labs/modules/codex/main.tf | 16 ++++++++++++++-- .../coder-labs/modules/codex/scripts/install.sh | 8 +++++++- .../coder-labs/modules/codex/scripts/start.sh | 8 +++++++- 3 files changed, 28 insertions(+), 4 deletions(-) diff --git a/registry/coder-labs/modules/codex/main.tf b/registry/coder-labs/modules/codex/main.tf index d87951e5d..a4ebac169 100644 --- a/registry/coder-labs/modules/codex/main.tf +++ b/registry/coder-labs/modules/codex/main.tf @@ -77,6 +77,16 @@ variable "enable_coder_aibridge" { default = false } +variable "model_reasoning_effort" { + type = string + description = "The reasoning effort for the AI Bridge model. One of: none, low, medium, high." + default = "medium" + validation { + condition = contains(["none", "low", "medium", "high"], var.model_reasoning_effort) + error_message = "model_reasoning_effort must be one of: none, low, medium, high." + } +} + variable "install_codex" { type = bool description = "Whether to install Codex." @@ -122,7 +132,7 @@ variable "agentapi_version" { variable "codex_model" { type = string description = "The model for Codex to use. Defaults to gpt-5.1-codex-max." - default = "" + default = "gpt-5.1-codex-max" } variable "pre_install_script" { @@ -184,7 +194,7 @@ locals { [profiles.aibridge] model_provider = "aibridge" model = "${var.codex_model}" - model_reasoning_effort = "medium" + model_reasoning_effort = "${var.model_reasoning_effort}" EOF } @@ -221,6 +231,7 @@ module "agentapi" { ARG_CODEX_START_DIRECTORY='${local.workdir}' \ ARG_CODEX_TASK_PROMPT='${base64encode(var.ai_prompt)}' \ ARG_CONTINUE='${var.continue}' \ + ARG_ENABLE_CODER_AIBRIDGE='${var.enable_coder_aibridge}' \ /tmp/start.sh EOT @@ -236,6 +247,7 @@ module "agentapi" { ARG_INSTALL='${var.install_codex}' \ ARG_CODEX_VERSION='${var.codex_version}' \ ARG_BASE_CONFIG_TOML='${base64encode(var.base_config_toml)}' \ + ARG_ENABLE_CODER_AIBRIDGE='${var.enable_coder_aibridge}' \ ARG_AIBRIDGE_CONFIG='${base64encode(var.enable_coder_aibridge ? local.aibridge_config : "")}' \ ARG_ADDITIONAL_MCP_SERVERS='${base64encode(var.additional_mcp_servers)}' \ ARG_CODER_MCP_APP_STATUS_SLUG='${local.app_slug}' \ diff --git a/registry/coder-labs/modules/codex/scripts/install.sh b/registry/coder-labs/modules/codex/scripts/install.sh index 09e40a581..56452c3af 100644 --- a/registry/coder-labs/modules/codex/scripts/install.sh +++ b/registry/coder-labs/modules/codex/scripts/install.sh @@ -13,6 +13,7 @@ set -o nounset ARG_BASE_CONFIG_TOML=$(echo -n "$ARG_BASE_CONFIG_TOML" | base64 -d) ARG_ADDITIONAL_MCP_SERVERS=$(echo -n "$ARG_ADDITIONAL_MCP_SERVERS" | base64 -d) ARG_CODEX_INSTRUCTION_PROMPT=$(echo -n "$ARG_CODEX_INSTRUCTION_PROMPT" | base64 -d) +ARG_ENABLE_CODER_AIBRIDGE=${ARG_ENABLE_CODER_AIBRIDGE:-false} ARG_AIBRIDGE_CONFIG=$(echo -n "$ARG_AIBRIDGE_CONFIG" | base64 -d) echo "=== Codex Module Configuration ===" @@ -25,6 +26,7 @@ printf "Has Additional MCP: %s\n" "$([ -n "$ARG_ADDITIONAL_MCP_SERVERS" ] && ech printf "Has System Prompt: %s\n" "$([ -n "$ARG_CODEX_INSTRUCTION_PROMPT" ] && echo "Yes" || echo "No")" printf "OpenAI API Key: %s\n" "$([ -n "$ARG_OPENAI_API_KEY" ] && echo "Provided" || echo "Not provided")" printf "Report Tasks: %s\n" "$ARG_REPORT_TASKS" +printf "Enable Coder AI Bridge: %s\n" "$ARG_ENABLE_CODER_AIBRIDGE" echo "======================================" set +o nounset @@ -150,7 +152,11 @@ function populate_config_toml() { fi append_mcp_servers_section "$CONFIG_PATH" - append_aibridge_config_section "$CONFIG_PATH" + + if [ "$ARG_ENABLE_CODER_AIBRIDGE" = "true" ]; then + printf "Coder AI Bridge is enabled\n" + append_aibridge_config_section "$CONFIG_PATH" + fi } function add_instruction_prompt_if_exists() { diff --git a/registry/coder-labs/modules/codex/scripts/start.sh b/registry/coder-labs/modules/codex/scripts/start.sh index e77436f13..e85b69e96 100644 --- a/registry/coder-labs/modules/codex/scripts/start.sh +++ b/registry/coder-labs/modules/codex/scripts/start.sh @@ -18,6 +18,8 @@ printf "Version: %s\n" "$(codex --version)" set -o nounset ARG_CODEX_TASK_PROMPT=$(echo -n "$ARG_CODEX_TASK_PROMPT" | base64 -d) ARG_CONTINUE=${ARG_CONTINUE:-true} +ARG_ENABLE_CODER_AIBRIDGE=${ARG_ENABLE_CODER_AIBRIDGE:-false} + echo "=== Codex Launch Configuration ===" printf "OpenAI API Key: %s\n" "$([ -n "$ARG_OPENAI_API_KEY" ] && echo "Provided" || echo "Not provided")" @@ -26,6 +28,7 @@ printf "Start Directory: %s\n" "$ARG_CODEX_START_DIRECTORY" printf "Has Task Prompt: %s\n" "$([ -n "$ARG_CODEX_TASK_PROMPT" ] && echo "Yes" || echo "No")" printf "Report Tasks: %s\n" "$ARG_REPORT_TASKS" printf "Continue Sessions: %s\n" "$ARG_CONTINUE" +printf "Enable Coder AI Bridge: %s\n" "$ARG_ENABLE_CODER_AIBRIDGE" echo "======================================" set +o nounset @@ -153,7 +156,10 @@ setup_workdir() { build_codex_args() { CODEX_ARGS=() - if [ -n "$ARG_CODEX_MODEL" ]; then + if [ "$ARG_ENABLE_CODER_AIBRIDGE" = "true" ]; then + printf "Coder AI Bridge is enabled, using profile aibridge\n" + CODEX_ARGS+=("--profile" "aibridge") + elif [ -n "$ARG_CODEX_MODEL" ]; then CODEX_ARGS+=("--model" "$ARG_CODEX_MODEL") fi From e9c9c7d6feeb4510526aec7dcab9d4dedd679b61 Mon Sep 17 00:00:00 2001 From: 35C4n0r Date: Mon, 12 Jan 2026 16:03:53 +0000 Subject: [PATCH 04/13] feat: add AI Bridge test and update documentation --- registry/coder-labs/modules/codex/README.md | 51 +++++++++++++++++-- .../coder-labs/modules/codex/main.test.ts | 46 ++++++++++++++--- registry/coder-labs/modules/codex/main.tf | 6 +-- .../coder-labs/modules/codex/scripts/start.sh | 1 - 4 files changed, 88 insertions(+), 16 deletions(-) diff --git a/registry/coder-labs/modules/codex/README.md b/registry/coder-labs/modules/codex/README.md index f98f9882c..345ab2479 100644 --- a/registry/coder-labs/modules/codex/README.md +++ b/registry/coder-labs/modules/codex/README.md @@ -3,7 +3,7 @@ display_name: Codex CLI icon: ../../../../.icons/openai.svg description: Run Codex CLI in your workspace with AgentAPI integration verified: true -tags: [agent, codex, ai, openai, tasks] +tags: [agent, codex, ai, openai, tasks, aibridge] --- # Codex CLI @@ -13,7 +13,7 @@ Run Codex CLI in your workspace to access OpenAI's models through the Codex inte ```tf module "codex" { source = "registry.coder.com/coder-labs/codex/coder" - version = "4.0.0" + version = "4.1.0" agent_id = coder_agent.example.id openai_api_key = var.openai_api_key workdir = "/home/coder/project" @@ -32,7 +32,7 @@ module "codex" { module "codex" { count = data.coder_workspace.me.start_count source = "registry.coder.com/coder-labs/codex/coder" - version = "4.0.0" + version = "4.1.0" agent_id = coder_agent.example.id openai_api_key = "..." workdir = "/home/coder/project" @@ -52,7 +52,7 @@ data "coder_task" "me" {} module "codex" { source = "registry.coder.com/coder-labs/codex/coder" - version = "4.0.0" + version = "4.1.0" agent_id = coder_agent.example.id openai_api_key = "..." ai_prompt = data.coder_task.me.prompt @@ -99,7 +99,7 @@ For custom Codex configuration, use `base_config_toml` and/or `additional_mcp_se ```tf module "codex" { source = "registry.coder.com/coder-labs/codex/coder" - version = "4.0.0" + version = "4.1.0" # ... other variables ... # Override default configuration @@ -122,6 +122,46 @@ module "codex" { > [!NOTE] > If no custom configuration is provided, the module uses secure defaults. The Coder MCP server is always included automatically. For containerized workspaces (Docker/Kubernetes), you may need `sandbox_mode = "danger-full-access"` to avoid permission issues. For advanced options, see [Codex config docs](https://github.com/openai/codex/blob/main/codex-rs/config.md). +### AI Bridge Configuration + +For AI Bridge configuration set `enable_coder_aibridge` to `true`. [AI Bridge](https://coder.com/docs/ai-coder/ai-bridge) is a Premium Coder feature that provides centralized LLM proxy management + +```tf +resource "coder_ai_task" "task" { + count = data.coder_workspace.me.start_count + app_id = module.codex.task_app_id +} + +data "coder_task" "me" {} + +module "codex" { + source = "registry.coder.com/coder-labs/codex/coder" + version = "4.1.0" + agent_id = coder_agent.example.id + openai_api_key = "..." + ai_prompt = data.coder_task.me.prompt + workdir = "/home/coder/project" + enable_coder_aibridge = true +} +``` + +This adds a new model_provider and a profile to the Codex configuration: + +```toml +[model_providers.aibridge] +name = "AI Bridge" +base_url = "https://dev.coder.com/api/v2/aibridge/openai/v1" +env_key = "CODER_AIBRIDGE_SESSION_TOKEN" +wire_api = "responses" + +[profiles.aibridge] +model_provider = "aibridge" +model = "" # as configured in the module input +model_reasoning_effort = "" # as configured in the module input +``` + +Codex then runs with `--profile aibridge` + ## Troubleshooting - Check installation and startup logs in `~/.codex-module/` @@ -137,3 +177,4 @@ module "codex" { - [Codex CLI Documentation](https://github.com/openai/codex) - [AgentAPI Documentation](https://github.com/coder/agentapi) - [Coder AI Agents Guide](https://coder.com/docs/tutorials/ai-agents) +- [AI Bridge](https://coder.com/docs/ai-coder/ai-bridge) diff --git a/registry/coder-labs/modules/codex/main.test.ts b/registry/coder-labs/modules/codex/main.test.ts index 2041e36e6..58286778b 100644 --- a/registry/coder-labs/modules/codex/main.test.ts +++ b/registry/coder-labs/modules/codex/main.test.ts @@ -39,9 +39,11 @@ interface SetupProps { agentapiMockScript?: string; } -const setup = async (props?: SetupProps): Promise<{ id: string }> => { +const setup = async ( + props?: SetupProps, +): Promise<{ id: string; coderEnvVars: Record }> => { const projectDir = "/home/coder/project"; - const { id } = await setupUtil({ + const { id, coderEnvVars } = await setupUtil({ moduleDir: import.meta.dir, moduleVariables: { install_codex: props?.skipCodexMock ? "true" : "false", @@ -62,7 +64,7 @@ const setup = async (props?: SetupProps): Promise<{ id: string }> => { content: await loadTestFile(import.meta.dir, "codex-mock.sh"), }); } - return { id }; + return { id, coderEnvVars }; }; setDefaultTimeout(60 * 1000); @@ -113,7 +115,7 @@ describe("codex", async () => { sandbox_mode = "danger-full-access" approval_policy = "never" preferred_auth_method = "apikey" - + [custom_section] new_feature = true `.trim(); @@ -189,7 +191,7 @@ describe("codex", async () => { args = ["-y", "@modelcontextprotocol/server-github"] type = "stdio" description = "GitHub integration" - + [mcp_servers.FileSystem] command = "npx" args = ["-y", "@modelcontextprotocol/server-filesystem", "/workspace"] @@ -215,7 +217,7 @@ describe("codex", async () => { approval_policy = "untrusted" preferred_auth_method = "chatgpt" custom_setting = "test-value" - + [advanced_settings] timeout = 30000 debug = true @@ -228,7 +230,7 @@ describe("codex", async () => { args = ["--serve", "--port", "8080"] type = "stdio" description = "Custom development tool" - + [mcp_servers.DatabaseMCP] command = "python" args = ["-m", "database_mcp_server"] @@ -454,4 +456,34 @@ describe("codex", async () => { ); expect(startLog.stdout).not.toContain("test prompt"); }); + + test("codex-with-aibridge", async () => { + const { id } = await setup({ + moduleVariables: { + enable_coder_aibridge: "true", + model_reasoning_effort: "none", + }, + }); + + await execModuleScript(id); + + const startLog = await readFileContainer( + id, + "/home/coder/.codex-module/agentapi-start.log", + ); + + const configToml = await readFileContainer( + id, + "/home/coder/.codex/config.toml", + ); + expect(startLog).toContain( + "Coder AI Bridge is enabled, using profile aibridge", + ); + expect(startLog).toContain( + "Starting Codex with arguments: --profile aibridge", + ); + expect(configToml).toContain( + "[profiles.aibridge]\n" + 'model_provider = "aibridge"', + ); + }); }); diff --git a/registry/coder-labs/modules/codex/main.tf b/registry/coder-labs/modules/codex/main.tf index a4ebac169..fd1dad01c 100644 --- a/registry/coder-labs/modules/codex/main.tf +++ b/registry/coder-labs/modules/codex/main.tf @@ -78,9 +78,9 @@ variable "enable_coder_aibridge" { } variable "model_reasoning_effort" { - type = string - description = "The reasoning effort for the AI Bridge model. One of: none, low, medium, high." - default = "medium" + type = string + description = "The reasoning effort for the AI Bridge model. One of: none, low, medium, high. https://platform.openai.com/docs/guides/latest-model#lower-reasoning-effort" + default = "medium" validation { condition = contains(["none", "low", "medium", "high"], var.model_reasoning_effort) error_message = "model_reasoning_effort must be one of: none, low, medium, high." diff --git a/registry/coder-labs/modules/codex/scripts/start.sh b/registry/coder-labs/modules/codex/scripts/start.sh index e85b69e96..dab703e1f 100644 --- a/registry/coder-labs/modules/codex/scripts/start.sh +++ b/registry/coder-labs/modules/codex/scripts/start.sh @@ -20,7 +20,6 @@ ARG_CODEX_TASK_PROMPT=$(echo -n "$ARG_CODEX_TASK_PROMPT" | base64 -d) ARG_CONTINUE=${ARG_CONTINUE:-true} ARG_ENABLE_CODER_AIBRIDGE=${ARG_ENABLE_CODER_AIBRIDGE:-false} - echo "=== Codex Launch Configuration ===" printf "OpenAI API Key: %s\n" "$([ -n "$ARG_OPENAI_API_KEY" ] && echo "Provided" || echo "Not provided")" printf "Codex Model: %s\n" "${ARG_CODEX_MODEL:-"Default"}" From 6c26ff849ddac3da8070a9dffac494c5f6e40b23 Mon Sep 17 00:00:00 2001 From: 35C4n0r Date: Mon, 12 Jan 2026 16:14:20 +0000 Subject: [PATCH 05/13] wip --- registry/coder-labs/modules/codex/main.test.ts | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/registry/coder-labs/modules/codex/main.test.ts b/registry/coder-labs/modules/codex/main.test.ts index 58286778b..5fbb02f89 100644 --- a/registry/coder-labs/modules/codex/main.test.ts +++ b/registry/coder-labs/modules/codex/main.test.ts @@ -39,11 +39,9 @@ interface SetupProps { agentapiMockScript?: string; } -const setup = async ( - props?: SetupProps, -): Promise<{ id: string; coderEnvVars: Record }> => { +const setup = async (props?: SetupProps): Promise<{ id: string }> => { const projectDir = "/home/coder/project"; - const { id, coderEnvVars } = await setupUtil({ + const { id } = await setupUtil({ moduleDir: import.meta.dir, moduleVariables: { install_codex: props?.skipCodexMock ? "true" : "false", @@ -64,7 +62,7 @@ const setup = async ( content: await loadTestFile(import.meta.dir, "codex-mock.sh"), }); } - return { id, coderEnvVars }; + return { id }; }; setDefaultTimeout(60 * 1000); From b31c79343b24a7e0bd67ab7fb0e64b58ea8e4cbf Mon Sep 17 00:00:00 2001 From: 35C4n0r <70096901+35C4n0r@users.noreply.github.com> Date: Tue, 13 Jan 2026 20:34:11 +0530 Subject: [PATCH 06/13] Update registry/coder-labs/modules/codex/README.md Co-authored-by: Atif Ali --- registry/coder-labs/modules/codex/README.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/registry/coder-labs/modules/codex/README.md b/registry/coder-labs/modules/codex/README.md index 345ab2479..e2b80b856 100644 --- a/registry/coder-labs/modules/codex/README.md +++ b/registry/coder-labs/modules/codex/README.md @@ -124,7 +124,9 @@ module "codex" { ### AI Bridge Configuration -For AI Bridge configuration set `enable_coder_aibridge` to `true`. [AI Bridge](https://coder.com/docs/ai-coder/ai-bridge) is a Premium Coder feature that provides centralized LLM proxy management +[AI Bridge](https://coder.com/docs/ai-coder/ai-bridge) is a centralized AI gateway that securely intermediates between users’ coding tools and AI providers, managing authentication, auditing, and usage attribution. + +To the AI Bridge integration, first [set up AI Bridge](https://coder.com/docs/ai-coder/ai-bridge/setup) and set `enable_coder_aibridge` to `true`. ```tf resource "coder_ai_task" "task" { From 5ee430a5be15a1ec460986bb157ba7cb0423c7f4 Mon Sep 17 00:00:00 2001 From: 35C4n0r Date: Thu, 15 Jan 2026 06:05:55 +0000 Subject: [PATCH 07/13] ref --- registry/coder-labs/modules/codex/README.md | 4 ++-- registry/coder-labs/modules/codex/main.test.ts | 4 ++-- registry/coder-labs/modules/codex/main.tf | 10 +++++----- registry/coder-labs/modules/codex/scripts/install.sh | 8 ++++---- registry/coder-labs/modules/codex/scripts/start.sh | 8 ++++---- 5 files changed, 17 insertions(+), 17 deletions(-) diff --git a/registry/coder-labs/modules/codex/README.md b/registry/coder-labs/modules/codex/README.md index e2b80b856..c51026f6a 100644 --- a/registry/coder-labs/modules/codex/README.md +++ b/registry/coder-labs/modules/codex/README.md @@ -126,7 +126,7 @@ module "codex" { [AI Bridge](https://coder.com/docs/ai-coder/ai-bridge) is a centralized AI gateway that securely intermediates between users’ coding tools and AI providers, managing authentication, auditing, and usage attribution. -To the AI Bridge integration, first [set up AI Bridge](https://coder.com/docs/ai-coder/ai-bridge/setup) and set `enable_coder_aibridge` to `true`. +To the AI Bridge integration, first [set up AI Bridge](https://coder.com/docs/ai-coder/ai-bridge/setup) and set `enable_aibridge` to `true`. ```tf resource "coder_ai_task" "task" { @@ -143,7 +143,7 @@ module "codex" { openai_api_key = "..." ai_prompt = data.coder_task.me.prompt workdir = "/home/coder/project" - enable_coder_aibridge = true + enable_aibridge = true } ``` diff --git a/registry/coder-labs/modules/codex/main.test.ts b/registry/coder-labs/modules/codex/main.test.ts index 5fbb02f89..333465bf5 100644 --- a/registry/coder-labs/modules/codex/main.test.ts +++ b/registry/coder-labs/modules/codex/main.test.ts @@ -458,7 +458,7 @@ describe("codex", async () => { test("codex-with-aibridge", async () => { const { id } = await setup({ moduleVariables: { - enable_coder_aibridge: "true", + enable_aibridge: "true", model_reasoning_effort: "none", }, }); @@ -475,7 +475,7 @@ describe("codex", async () => { "/home/coder/.codex/config.toml", ); expect(startLog).toContain( - "Coder AI Bridge is enabled, using profile aibridge", + "AI Bridge is enabled, using profile aibridge", ); expect(startLog).toContain( "Starting Codex with arguments: --profile aibridge", diff --git a/registry/coder-labs/modules/codex/main.tf b/registry/coder-labs/modules/codex/main.tf index fd1dad01c..ab1651b0e 100644 --- a/registry/coder-labs/modules/codex/main.tf +++ b/registry/coder-labs/modules/codex/main.tf @@ -71,7 +71,7 @@ variable "cli_app_display_name" { default = "Codex CLI" } -variable "enable_coder_aibridge" { +variable "enable_aibridge" { type = bool description = "Use AI Bridge for Codex. https://coder.com/docs/ai-coder/ai-bridge" default = false @@ -172,7 +172,7 @@ resource "coder_env" "openai_api_key" { } resource "coder_env" "coder_aibridge_session_token" { - count = var.enable_coder_aibridge ? 1 : 0 + count = var.enable_aibridge ? 1 : 0 agent_id = var.agent_id name = "CODER_AIBRIDGE_SESSION_TOKEN" value = data.coder_workspace_owner.me.session_token @@ -231,7 +231,7 @@ module "agentapi" { ARG_CODEX_START_DIRECTORY='${local.workdir}' \ ARG_CODEX_TASK_PROMPT='${base64encode(var.ai_prompt)}' \ ARG_CONTINUE='${var.continue}' \ - ARG_ENABLE_CODER_AIBRIDGE='${var.enable_coder_aibridge}' \ + ARG_ENABLE_AIBRIDGE='${var.enable_aibridge}' \ /tmp/start.sh EOT @@ -247,8 +247,8 @@ module "agentapi" { ARG_INSTALL='${var.install_codex}' \ ARG_CODEX_VERSION='${var.codex_version}' \ ARG_BASE_CONFIG_TOML='${base64encode(var.base_config_toml)}' \ - ARG_ENABLE_CODER_AIBRIDGE='${var.enable_coder_aibridge}' \ - ARG_AIBRIDGE_CONFIG='${base64encode(var.enable_coder_aibridge ? local.aibridge_config : "")}' \ + ARG_ENABLE_AIBRIDGE='${var.enable_aibridge}' \ + ARG_AIBRIDGE_CONFIG='${base64encode(var.enable_aibridge ? local.aibridge_config : "")}' \ ARG_ADDITIONAL_MCP_SERVERS='${base64encode(var.additional_mcp_servers)}' \ ARG_CODER_MCP_APP_STATUS_SLUG='${local.app_slug}' \ ARG_CODEX_START_DIRECTORY='${local.workdir}' \ diff --git a/registry/coder-labs/modules/codex/scripts/install.sh b/registry/coder-labs/modules/codex/scripts/install.sh index 56452c3af..90229e97e 100644 --- a/registry/coder-labs/modules/codex/scripts/install.sh +++ b/registry/coder-labs/modules/codex/scripts/install.sh @@ -13,7 +13,7 @@ set -o nounset ARG_BASE_CONFIG_TOML=$(echo -n "$ARG_BASE_CONFIG_TOML" | base64 -d) ARG_ADDITIONAL_MCP_SERVERS=$(echo -n "$ARG_ADDITIONAL_MCP_SERVERS" | base64 -d) ARG_CODEX_INSTRUCTION_PROMPT=$(echo -n "$ARG_CODEX_INSTRUCTION_PROMPT" | base64 -d) -ARG_ENABLE_CODER_AIBRIDGE=${ARG_ENABLE_CODER_AIBRIDGE:-false} +ARG_ENABLE_AIBRIDGE=${ARG_ENABLE_AIBRIDGE:-false} ARG_AIBRIDGE_CONFIG=$(echo -n "$ARG_AIBRIDGE_CONFIG" | base64 -d) echo "=== Codex Module Configuration ===" @@ -26,7 +26,7 @@ printf "Has Additional MCP: %s\n" "$([ -n "$ARG_ADDITIONAL_MCP_SERVERS" ] && ech printf "Has System Prompt: %s\n" "$([ -n "$ARG_CODEX_INSTRUCTION_PROMPT" ] && echo "Yes" || echo "No")" printf "OpenAI API Key: %s\n" "$([ -n "$ARG_OPENAI_API_KEY" ] && echo "Provided" || echo "Not provided")" printf "Report Tasks: %s\n" "$ARG_REPORT_TASKS" -printf "Enable Coder AI Bridge: %s\n" "$ARG_ENABLE_CODER_AIBRIDGE" +printf "Enable Coder AI Bridge: %s\n" "$ARG_ENABLE_AIBRIDGE" echo "======================================" set +o nounset @@ -153,8 +153,8 @@ function populate_config_toml() { append_mcp_servers_section "$CONFIG_PATH" - if [ "$ARG_ENABLE_CODER_AIBRIDGE" = "true" ]; then - printf "Coder AI Bridge is enabled\n" + if [ "$ARG_ENABLE_AIBRIDGE" = "true" ]; then + printf "AI Bridge is enabled\n" append_aibridge_config_section "$CONFIG_PATH" fi } diff --git a/registry/coder-labs/modules/codex/scripts/start.sh b/registry/coder-labs/modules/codex/scripts/start.sh index dab703e1f..3e55dc70f 100644 --- a/registry/coder-labs/modules/codex/scripts/start.sh +++ b/registry/coder-labs/modules/codex/scripts/start.sh @@ -18,7 +18,7 @@ printf "Version: %s\n" "$(codex --version)" set -o nounset ARG_CODEX_TASK_PROMPT=$(echo -n "$ARG_CODEX_TASK_PROMPT" | base64 -d) ARG_CONTINUE=${ARG_CONTINUE:-true} -ARG_ENABLE_CODER_AIBRIDGE=${ARG_ENABLE_CODER_AIBRIDGE:-false} +ARG_ENABLE_AIBRIDGE=${ARG_ENABLE_AIBRIDGE:-false} echo "=== Codex Launch Configuration ===" printf "OpenAI API Key: %s\n" "$([ -n "$ARG_OPENAI_API_KEY" ] && echo "Provided" || echo "Not provided")" @@ -27,7 +27,7 @@ printf "Start Directory: %s\n" "$ARG_CODEX_START_DIRECTORY" printf "Has Task Prompt: %s\n" "$([ -n "$ARG_CODEX_TASK_PROMPT" ] && echo "Yes" || echo "No")" printf "Report Tasks: %s\n" "$ARG_REPORT_TASKS" printf "Continue Sessions: %s\n" "$ARG_CONTINUE" -printf "Enable Coder AI Bridge: %s\n" "$ARG_ENABLE_CODER_AIBRIDGE" +printf "Enable Coder AI Bridge: %s\n" "$ARG_ENABLE_AIBRIDGE" echo "======================================" set +o nounset @@ -155,8 +155,8 @@ setup_workdir() { build_codex_args() { CODEX_ARGS=() - if [ "$ARG_ENABLE_CODER_AIBRIDGE" = "true" ]; then - printf "Coder AI Bridge is enabled, using profile aibridge\n" + if [ "$ARG_ENABLE_AIBRIDGE" = "true" ]; then + printf "AI Bridge is enabled, using profile aibridge\n" CODEX_ARGS+=("--profile" "aibridge") elif [ -n "$ARG_CODEX_MODEL" ]; then CODEX_ARGS+=("--model" "$ARG_CODEX_MODEL") From 9bf3761fab84840e0de574f9e81ff922af1facaa Mon Sep 17 00:00:00 2001 From: 35C4n0r Date: Thu, 15 Jan 2026 06:07:01 +0000 Subject: [PATCH 08/13] bun fmt --- registry/coder-labs/modules/codex/README.md | 14 +++++++------- registry/coder-labs/modules/codex/main.test.ts | 4 +--- 2 files changed, 8 insertions(+), 10 deletions(-) diff --git a/registry/coder-labs/modules/codex/README.md b/registry/coder-labs/modules/codex/README.md index c51026f6a..9f6ed4956 100644 --- a/registry/coder-labs/modules/codex/README.md +++ b/registry/coder-labs/modules/codex/README.md @@ -126,7 +126,7 @@ module "codex" { [AI Bridge](https://coder.com/docs/ai-coder/ai-bridge) is a centralized AI gateway that securely intermediates between users’ coding tools and AI providers, managing authentication, auditing, and usage attribution. -To the AI Bridge integration, first [set up AI Bridge](https://coder.com/docs/ai-coder/ai-bridge/setup) and set `enable_aibridge` to `true`. +To the AI Bridge integration, first [set up AI Bridge](https://coder.com/docs/ai-coder/ai-bridge/setup) and set `enable_aibridge` to `true`. ```tf resource "coder_ai_task" "task" { @@ -137,12 +137,12 @@ resource "coder_ai_task" "task" { data "coder_task" "me" {} module "codex" { - source = "registry.coder.com/coder-labs/codex/coder" - version = "4.1.0" - agent_id = coder_agent.example.id - openai_api_key = "..." - ai_prompt = data.coder_task.me.prompt - workdir = "/home/coder/project" + source = "registry.coder.com/coder-labs/codex/coder" + version = "4.1.0" + agent_id = coder_agent.example.id + openai_api_key = "..." + ai_prompt = data.coder_task.me.prompt + workdir = "/home/coder/project" enable_aibridge = true } ``` diff --git a/registry/coder-labs/modules/codex/main.test.ts b/registry/coder-labs/modules/codex/main.test.ts index 333465bf5..a4edd8185 100644 --- a/registry/coder-labs/modules/codex/main.test.ts +++ b/registry/coder-labs/modules/codex/main.test.ts @@ -474,9 +474,7 @@ describe("codex", async () => { id, "/home/coder/.codex/config.toml", ); - expect(startLog).toContain( - "AI Bridge is enabled, using profile aibridge", - ); + expect(startLog).toContain("AI Bridge is enabled, using profile aibridge"); expect(startLog).toContain( "Starting Codex with arguments: --profile aibridge", ); From 8da4a4afdf571e2d347161e28187701adca28bf3 Mon Sep 17 00:00:00 2001 From: 35C4n0r Date: Thu, 15 Jan 2026 06:42:53 +0000 Subject: [PATCH 09/13] update model --- registry/coder-labs/modules/codex/main.tf | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/registry/coder-labs/modules/codex/main.tf b/registry/coder-labs/modules/codex/main.tf index ab1651b0e..8dd943c77 100644 --- a/registry/coder-labs/modules/codex/main.tf +++ b/registry/coder-labs/modules/codex/main.tf @@ -131,8 +131,8 @@ variable "agentapi_version" { variable "codex_model" { type = string - description = "The model for Codex to use. Defaults to gpt-5.1-codex-max." - default = "gpt-5.1-codex-max" + description = "The model for Codex to use. Defaults to gpt-5.2-codex." + default = "gpt-5.2-codex" } variable "pre_install_script" { From af316b6266d5a89a057852e160723355f5082555 Mon Sep 17 00:00:00 2001 From: 35C4n0r Date: Thu, 15 Jan 2026 06:46:22 +0000 Subject: [PATCH 10/13] feat: add validation --- registry/coder-labs/modules/codex/main.tf | 5 +++++ registry/coder-labs/modules/codex/scripts/install.sh | 5 ++++- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/registry/coder-labs/modules/codex/main.tf b/registry/coder-labs/modules/codex/main.tf index 8dd943c77..6511f9edd 100644 --- a/registry/coder-labs/modules/codex/main.tf +++ b/registry/coder-labs/modules/codex/main.tf @@ -75,6 +75,11 @@ variable "enable_aibridge" { type = bool description = "Use AI Bridge for Codex. https://coder.com/docs/ai-coder/ai-bridge" default = false + + validation { + condition = !(var.enable_aibridge && length(var.openai_api_key) > 0) + error_message = "openai_api_key cannot be provided when enable_aibridge is true." + } } variable "model_reasoning_effort" { diff --git a/registry/coder-labs/modules/codex/scripts/install.sh b/registry/coder-labs/modules/codex/scripts/install.sh index 90229e97e..97d539a8c 100644 --- a/registry/coder-labs/modules/codex/scripts/install.sh +++ b/registry/coder-labs/modules/codex/scripts/install.sh @@ -202,4 +202,7 @@ install_codex codex --version populate_config_toml add_instruction_prompt_if_exists -add_auth_json + +if [ "$ARG_ENABLE_AIBRIDGE" = "false" ]; then + add_auth_json +fi From 628b5b6ea3bc7d7c8dd7bcda185a9a284f345897 Mon Sep 17 00:00:00 2001 From: 35C4n0r Date: Thu, 15 Jan 2026 14:32:46 +0000 Subject: [PATCH 11/13] feat: update README.md --- registry/coder-labs/modules/codex/README.md | 15 ++++++++++++++- registry/coder-labs/modules/codex/main.tf | 2 +- 2 files changed, 15 insertions(+), 2 deletions(-) diff --git a/registry/coder-labs/modules/codex/README.md b/registry/coder-labs/modules/codex/README.md index 9f6ed4956..fb6b5b7d6 100644 --- a/registry/coder-labs/modules/codex/README.md +++ b/registry/coder-labs/modules/codex/README.md @@ -128,6 +128,8 @@ module "codex" { To the AI Bridge integration, first [set up AI Bridge](https://coder.com/docs/ai-coder/ai-bridge/setup) and set `enable_aibridge` to `true`. +#### Usage with tasks and AI Bridge + ```tf resource "coder_ai_task" "task" { count = data.coder_workspace.me.start_count @@ -140,13 +142,24 @@ module "codex" { source = "registry.coder.com/coder-labs/codex/coder" version = "4.1.0" agent_id = coder_agent.example.id - openai_api_key = "..." ai_prompt = data.coder_task.me.prompt workdir = "/home/coder/project" enable_aibridge = true } ``` +#### Standalone usage with AI Bridge + +```tf +module "codex" { + source = "registry.coder.com/coder-labs/codex/coder" + version = "4.1.0" + agent_id = coder_agent.example.id + workdir = "/home/coder/project" + enable_aibridge = true +} +``` + This adds a new model_provider and a profile to the Codex configuration: ```toml diff --git a/registry/coder-labs/modules/codex/main.tf b/registry/coder-labs/modules/codex/main.tf index 6511f9edd..fc19f171f 100644 --- a/registry/coder-labs/modules/codex/main.tf +++ b/registry/coder-labs/modules/codex/main.tf @@ -78,7 +78,7 @@ variable "enable_aibridge" { validation { condition = !(var.enable_aibridge && length(var.openai_api_key) > 0) - error_message = "openai_api_key cannot be provided when enable_aibridge is true." + error_message = "openai_api_key cannot be provided when enable_aibridge is true. AI Bridge automatically authenticates the client using their Coder credentials." } } From da22f9242171f3a82820401bc9025a8b098628e1 Mon Sep 17 00:00:00 2001 From: 35C4n0r Date: Wed, 21 Jan 2026 17:15:46 +0000 Subject: [PATCH 12/13] chore: standardize according to claude-code --- registry/coder-labs/modules/codex/README.md | 149 ++++++++------------ registry/coder-labs/modules/codex/main.tf | 2 +- 2 files changed, 63 insertions(+), 88 deletions(-) diff --git a/registry/coder-labs/modules/codex/README.md b/registry/coder-labs/modules/codex/README.md index fb6b5b7d6..00820f35c 100644 --- a/registry/coder-labs/modules/codex/README.md +++ b/registry/coder-labs/modules/codex/README.md @@ -13,7 +13,7 @@ Run Codex CLI in your workspace to access OpenAI's models through the Codex inte ```tf module "codex" { source = "registry.coder.com/coder-labs/codex/coder" - version = "4.1.0" + version = "4.2.0" agent_id = coder_agent.example.id openai_api_key = var.openai_api_key workdir = "/home/coder/project" @@ -32,7 +32,7 @@ module "codex" { module "codex" { count = data.coder_workspace.me.start_count source = "registry.coder.com/coder-labs/codex/coder" - version = "4.1.0" + version = "4.2.0" agent_id = coder_agent.example.id openai_api_key = "..." workdir = "/home/coder/project" @@ -40,7 +40,35 @@ module "codex" { } ``` -### Tasks integration +### Usage with AI Bridge + +[AI Bridge](https://coder.com/docs/ai-coder/ai-bridge) is a Premium Coder feature that provides centralized LLM proxy management. To use AI Bridge, set `enable_aibridge = true`. + +For tasks integration with AI Bridge, add `enable_aibridge = true` to the [Usage with Tasks](#usage-with-tasks) example below. + +#### Standalone usage with AI Bridge + +```tf +module "codex" { + source = "registry.coder.com/coder-labs/codex/coder" + version = "4.2.0" + agent_id = coder_agent.example.id + workdir = "/home/coder/project" + enable_aibridge = true +} +``` + +When `enable_aibridge = true`, the module automatically sets: + +- `CODER_AIBRIDGE_SESSION_TOKEN` to the workspace owner's session token +- Configures Codex to use the AI Bridge profile with `base_url` pointing to `${data.coder_workspace.me.access_url}/api/v2/aibridge/openai/v1` + +This allows Codex to route API requests through Coder's AI Bridge instead of directly to OpenAI's API. +Template build will fail if `openai_api_key` is provided alongside `enable_aibridge = true`. + +### Usage with Tasks + +This example shows how to configure Codex with Coder tasks. ```tf resource "coder_ai_task" "task" { @@ -52,17 +80,46 @@ data "coder_task" "me" {} module "codex" { source = "registry.coder.com/coder-labs/codex/coder" - version = "4.1.0" + version = "4.2.0" agent_id = coder_agent.example.id openai_api_key = "..." ai_prompt = data.coder_task.me.prompt workdir = "/home/coder/project" - # Custom configuration for full auto mode + # Optional: route through AI Bridge (Premium feature) + # enable_aibridge = true +} +``` + +### Advanced Configuration + +This example shows additional configuration options for custom models, MCP servers, and base configuration. + +```tf +module "codex" { + source = "registry.coder.com/coder-labs/codex/coder" + version = "4.2.0" + agent_id = coder_agent.example.id + openai_api_key = "..." + workdir = "/home/coder/project" + + codex_version = "0.1.0" # Pin to a specific version + codex_model = "gpt-4o" # Custom model + + # Override default configuration base_config_toml = <<-EOT + sandbox_mode = "danger-full-access" approval_policy = "never" preferred_auth_method = "apikey" EOT + + # Add extra MCP servers + additional_mcp_servers = <<-EOT + [mcp_servers.GitHub] + command = "npx" + args = ["-y", "@modelcontextprotocol/server-github"] + type = "stdio" + EOT } ``` @@ -92,91 +149,9 @@ preferred_auth_method = "apikey" network_access = true ``` -### Custom Configuration - -For custom Codex configuration, use `base_config_toml` and/or `additional_mcp_servers`: - -```tf -module "codex" { - source = "registry.coder.com/coder-labs/codex/coder" - version = "4.1.0" - # ... other variables ... - - # Override default configuration - base_config_toml = <<-EOT - sandbox_mode = "danger-full-access" - approval_policy = "never" - preferred_auth_method = "apikey" - EOT - - # Add extra MCP servers - additional_mcp_servers = <<-EOT - [mcp_servers.GitHub] - command = "npx" - args = ["-y", "@modelcontextprotocol/server-github"] - type = "stdio" - EOT -} -``` - > [!NOTE] > If no custom configuration is provided, the module uses secure defaults. The Coder MCP server is always included automatically. For containerized workspaces (Docker/Kubernetes), you may need `sandbox_mode = "danger-full-access"` to avoid permission issues. For advanced options, see [Codex config docs](https://github.com/openai/codex/blob/main/codex-rs/config.md). -### AI Bridge Configuration - -[AI Bridge](https://coder.com/docs/ai-coder/ai-bridge) is a centralized AI gateway that securely intermediates between users’ coding tools and AI providers, managing authentication, auditing, and usage attribution. - -To the AI Bridge integration, first [set up AI Bridge](https://coder.com/docs/ai-coder/ai-bridge/setup) and set `enable_aibridge` to `true`. - -#### Usage with tasks and AI Bridge - -```tf -resource "coder_ai_task" "task" { - count = data.coder_workspace.me.start_count - app_id = module.codex.task_app_id -} - -data "coder_task" "me" {} - -module "codex" { - source = "registry.coder.com/coder-labs/codex/coder" - version = "4.1.0" - agent_id = coder_agent.example.id - ai_prompt = data.coder_task.me.prompt - workdir = "/home/coder/project" - enable_aibridge = true -} -``` - -#### Standalone usage with AI Bridge - -```tf -module "codex" { - source = "registry.coder.com/coder-labs/codex/coder" - version = "4.1.0" - agent_id = coder_agent.example.id - workdir = "/home/coder/project" - enable_aibridge = true -} -``` - -This adds a new model_provider and a profile to the Codex configuration: - -```toml -[model_providers.aibridge] -name = "AI Bridge" -base_url = "https://dev.coder.com/api/v2/aibridge/openai/v1" -env_key = "CODER_AIBRIDGE_SESSION_TOKEN" -wire_api = "responses" - -[profiles.aibridge] -model_provider = "aibridge" -model = "" # as configured in the module input -model_reasoning_effort = "" # as configured in the module input -``` - -Codex then runs with `--profile aibridge` - ## Troubleshooting - Check installation and startup logs in `~/.codex-module/` diff --git a/registry/coder-labs/modules/codex/main.tf b/registry/coder-labs/modules/codex/main.tf index fc19f171f..60f7548c9 100644 --- a/registry/coder-labs/modules/codex/main.tf +++ b/registry/coder-labs/modules/codex/main.tf @@ -78,7 +78,7 @@ variable "enable_aibridge" { validation { condition = !(var.enable_aibridge && length(var.openai_api_key) > 0) - error_message = "openai_api_key cannot be provided when enable_aibridge is true. AI Bridge automatically authenticates the client using their Coder credentials." + error_message = "openai_api_key cannot be provided when enable_aibridge is true. AI Bridge automatically authenticates the client using Coder credentials." } } From 535a318f48aa0f06af94e314fcbb404b049f7dff Mon Sep 17 00:00:00 2001 From: 35C4n0r Date: Wed, 21 Jan 2026 17:52:28 +0000 Subject: [PATCH 13/13] chore: qol readme updates --- registry/coder-labs/modules/codex/README.md | 20 +++++++++++++++++--- 1 file changed, 17 insertions(+), 3 deletions(-) diff --git a/registry/coder-labs/modules/codex/README.md b/registry/coder-labs/modules/codex/README.md index 00820f35c..1ffff08b8 100644 --- a/registry/coder-labs/modules/codex/README.md +++ b/registry/coder-labs/modules/codex/README.md @@ -58,10 +58,24 @@ module "codex" { } ``` -When `enable_aibridge = true`, the module automatically sets: +When `enable_aibridge = true`, the module: -- `CODER_AIBRIDGE_SESSION_TOKEN` to the workspace owner's session token -- Configures Codex to use the AI Bridge profile with `base_url` pointing to `${data.coder_workspace.me.access_url}/api/v2/aibridge/openai/v1` +- Configures Codex to use the AI Bridge profile with `base_url` pointing to `${data.coder_workspace.me.access_url}/api/v2/aibridge/openai/v1` and `env_key` pointing to the workspace owner's session token + +```toml +[model_providers.aibridge] +name = "AI Bridge" +base_url = "https://example.coder.com/api/v2/aibridge/openai/v1" +env_key = "CODER_AIBRIDGE_SESSION_TOKEN" +wire_api = "responses" + +[profiles.aibridge] +model_provider = "aibridge" +model = "" # as configured in the module input +model_reasoning_effort = "" # as configured in the module input +``` + +Codex then runs with `--profile aibridge` This allows Codex to route API requests through Coder's AI Bridge instead of directly to OpenAI's API. Template build will fail if `openai_api_key` is provided alongside `enable_aibridge = true`.