Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 23 additions & 0 deletions backend/api/migrations/0120_add_queued_sync_status.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
# Generated by Django 4.2.29 on 2026-03-25 11:53

from django.db import migrations, models


class Migration(migrations.Migration):

dependencies = [
('api', '0119_secretevent_type_field'),
]

operations = [
migrations.AlterField(
model_name='environmentsync',
name='status',
field=models.CharField(choices=[('queued', 'Queued'), ('in_progress', 'In progress'), ('completed', 'Completed'), ('cancelled', 'cancelled'), ('timed_out', 'Timed out'), ('failed', 'Failed')], default='queued', max_length=16),
),
migrations.AlterField(
model_name='environmentsyncevent',
name='status',
field=models.CharField(choices=[('queued', 'Queued'), ('in_progress', 'In progress'), ('completed', 'Completed'), ('cancelled', 'cancelled'), ('timed_out', 'Timed out'), ('failed', 'Failed')], default='queued', max_length=16),
),
]
12 changes: 9 additions & 3 deletions backend/api/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
from django.utils import timezone
from django.conf import settings
from api.services import Providers, ServiceConfig
from api.tasks.syncing import trigger_sync_tasks
from api.tasks.syncing import trigger_sync_tasks, trigger_syncs_for_referencing_envs
from backend.quotas import (
can_add_account,
can_add_app,
Expand Down Expand Up @@ -402,6 +402,10 @@ def save(self, *args, **kwargs):
if env_sync.is_active
]

# Trigger syncs for other environments whose secrets
# reference this one (cross-env and cross-app references)
trigger_syncs_for_referencing_envs(self)


class EnvironmentKey(models.Model):
id = models.TextField(default=uuid4, primary_key=True, editable=False)
Expand Down Expand Up @@ -470,13 +474,15 @@ class ProviderCredentials(models.Model):


class EnvironmentSync(models.Model):
QUEUED = "queued"
IN_PROGRESS = "in_progress"
COMPLETED = "completed"
CANCELLED = "cancelled"
TIMED_OUT = "timed_out"
FAILED = "failed"

STATUS_OPTIONS = [
(QUEUED, "Queued"),
(IN_PROGRESS, "In progress"),
(COMPLETED, "Completed"),
(CANCELLED, "cancelled"),
Expand All @@ -501,7 +507,7 @@ class EnvironmentSync(models.Model):
status = models.CharField(
max_length=16,
choices=STATUS_OPTIONS,
default=IN_PROGRESS,
default=QUEUED,
)


Expand All @@ -512,7 +518,7 @@ class EnvironmentSyncEvent(models.Model):
status = models.CharField(
max_length=16,
choices=EnvironmentSync.STATUS_OPTIONS,
default=EnvironmentSync.IN_PROGRESS,
default=EnvironmentSync.QUEUED,
)
created_at = models.DateTimeField(auto_now_add=True, blank=True, null=True)
completed_at = models.DateTimeField(blank=True, null=True)
Expand Down
201 changes: 94 additions & 107 deletions backend/api/tasks/syncing.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,114 +58,37 @@ def trigger_sync_tasks(env_sync):

cancel_sync_tasks(env_sync) # cancel any running or queued jobs for this sync

if env_sync.service == ServiceConfig.CLOUDFLARE_PAGES["id"]:
env_sync.status = EnvironmentSync.IN_PROGRESS
env_sync.save()

job = perform_cloudflare_pages_sync.delay(env_sync)
job_id = job.get_id()

EnvironmentSyncEvent.objects.create(id=job_id, env_sync=env_sync)

elif env_sync.service == ServiceConfig.CLOUDFLARE_WORKERS["id"]:
env_sync.status = EnvironmentSync.IN_PROGRESS
env_sync.save()

job = perform_cloudflare_workers_sync.delay(env_sync)
job_id = job.get_id()

EnvironmentSyncEvent.objects.create(id=job_id, env_sync=env_sync)

elif env_sync.service == ServiceConfig.AWS_SECRETS_MANAGER["id"]:
env_sync.status = EnvironmentSync.IN_PROGRESS
env_sync.save()

job = perform_aws_sm_sync.delay(env_sync)
job_id = job.get_id()

EnvironmentSyncEvent.objects.create(id=job_id, env_sync=env_sync)

elif env_sync.service == ServiceConfig.GITHUB_ACTIONS["id"]:
env_sync.status = EnvironmentSync.IN_PROGRESS
env_sync.save()

job = perform_github_actions_sync.delay(env_sync)
job_id = job.get_id()

EnvironmentSyncEvent.objects.create(id=job_id, env_sync=env_sync)

elif env_sync.service == ServiceConfig.GITHUB_DEPENDABOT["id"]:
env_sync.status = EnvironmentSync.IN_PROGRESS
env_sync.save()

job = perform_github_dependabot_sync.delay(env_sync)
job_id = job.get_id()

EnvironmentSyncEvent.objects.create(id=job_id, env_sync=env_sync)

elif env_sync.service == ServiceConfig.HASHICORP_VAULT["id"]:
env_sync.status = EnvironmentSync.IN_PROGRESS
env_sync.save()

job = perform_vault_sync.delay(env_sync)
job_id = job.get_id()

EnvironmentSyncEvent.objects.create(id=job_id, env_sync=env_sync)

elif env_sync.service == ServiceConfig.HASHICORP_NOMAD["id"]:
env_sync.status = EnvironmentSync.IN_PROGRESS
env_sync.save()

job = perform_nomad_sync.delay(env_sync)
job_id = job.get_id()

EnvironmentSyncEvent.objects.create(id=job_id, env_sync=env_sync)

elif env_sync.service == ServiceConfig.GITLAB_CI["id"]:
env_sync.status = EnvironmentSync.IN_PROGRESS
env_sync.save()

job = perform_gitlab_sync.delay(env_sync)
job_id = job.get_id()

EnvironmentSyncEvent.objects.create(id=job_id, env_sync=env_sync)

elif env_sync.service == ServiceConfig.RAILWAY["id"]:
env_sync.status = EnvironmentSync.IN_PROGRESS
env_sync.save()

job = perform_railway_sync.delay(env_sync)
job_id = job.get_id()

EnvironmentSyncEvent.objects.create(id=job_id, env_sync=env_sync)

elif env_sync.service == ServiceConfig.VERCEL["id"]:
env_sync.status = EnvironmentSync.IN_PROGRESS
env_sync.save()

job = perform_vercel_sync.delay(env_sync)
job_id = job.get_id()

EnvironmentSyncEvent.objects.create(id=job_id, env_sync=env_sync)

elif env_sync.service == ServiceConfig.RENDER["id"]:
env_sync.status = EnvironmentSync.IN_PROGRESS
env_sync.save()
SERVICE_DISPATCH = {
ServiceConfig.CLOUDFLARE_PAGES["id"]: perform_cloudflare_pages_sync,
ServiceConfig.CLOUDFLARE_WORKERS["id"]: perform_cloudflare_workers_sync,
ServiceConfig.AWS_SECRETS_MANAGER["id"]: perform_aws_sm_sync,
ServiceConfig.GITHUB_ACTIONS["id"]: perform_github_actions_sync,
ServiceConfig.GITHUB_DEPENDABOT["id"]: perform_github_dependabot_sync,
ServiceConfig.HASHICORP_VAULT["id"]: perform_vault_sync,
ServiceConfig.HASHICORP_NOMAD["id"]: perform_nomad_sync,
ServiceConfig.GITLAB_CI["id"]: perform_gitlab_sync,
ServiceConfig.RAILWAY["id"]: perform_railway_sync,
ServiceConfig.VERCEL["id"]: perform_vercel_sync,
ServiceConfig.RENDER["id"]: perform_render_service_sync,
ServiceConfig.AZURE_KEY_VAULT["id"]: perform_azure_kv_sync,
}

sync_func = SERVICE_DISPATCH.get(env_sync.service)
if sync_func is None:
return

env_sync.status = EnvironmentSync.QUEUED
env_sync.save()

job = perform_render_service_sync.delay(env_sync)
try:
job = sync_func.delay(env_sync)
job_id = job.get_id()

EnvironmentSyncEvent.objects.create(id=job_id, env_sync=env_sync)

elif env_sync.service == ServiceConfig.AZURE_KEY_VAULT["id"]:
env_sync.status = EnvironmentSync.IN_PROGRESS
except Exception as e:
logger.error(f"Failed to dispatch sync job for {env_sync.id}: {e}")
env_sync.status = EnvironmentSync.FAILED
env_sync.save()

job = perform_azure_kv_sync.delay(env_sync)
job_id = job.get_id()

EnvironmentSyncEvent.objects.create(id=job_id, env_sync=env_sync)


# try and cancel running or queued jobs for this sync
def cancel_sync_tasks(env_sync):
Expand All @@ -175,7 +98,8 @@ def cancel_sync_tasks(env_sync):
EnvironmentSyncEvent = apps.get_model("api", "EnvironmentSyncEvent")

for sync_event in EnvironmentSyncEvent.objects.filter(
env_sync=env_sync, status=EnvironmentSync.IN_PROGRESS
env_sync=env_sync,
status__in=[EnvironmentSync.IN_PROGRESS, EnvironmentSync.QUEUED],
):
try:
job = Job.fetch(sync_event.id, connection=get_queue("default").connection)
Expand All @@ -202,10 +126,14 @@ def handle_sync_event(environment_sync, sync_function, *args, **kwargs):
.first()
)

try:
EnvironmentSync = apps.get_model("api", "EnvironmentSync")
EnvironmentSyncEvent = apps.get_model("api", "EnvironmentSyncEvent")
# Mark as in-progress now that the worker has picked up the job
environment_sync.status = EnvironmentSync.IN_PROGRESS
environment_sync.save()
if sync_event:
sync_event.status = EnvironmentSync.IN_PROGRESS
sync_event.save()

try:
secrets = get_environment_secrets(
environment_sync.environment, environment_sync.path
)
Expand Down Expand Up @@ -572,3 +500,62 @@ def perform_azure_kv_sync(environment_sync):
credentials.get("client_secret"),
vault_uri,
)


def trigger_syncs_for_referencing_envs(changed_env):
"""
Finds environments with active syncs whose secrets reference the changed
environment, and triggers those syncs to keep referenced values up to date.

Called synchronously from Environment.save() so that sync status is set to
IN_PROGRESS immediately (the actual sync work is dispatched async by
trigger_sync_tasks). This ensures the UI reflects the pending sync right away.

This handles the case where env B has a secret like ${staging.DB_HOST}
referencing env "staging" — when a secret in "staging" changes, env B's
syncs need to be triggered too.
"""
from api.utils.secrets import env_has_references_to

EnvironmentSync = apps.get_model("api", "EnvironmentSync")

org = changed_env.app.organisation
changed_env_name = changed_env.name
changed_app_name = changed_env.app.name
changed_app_id = changed_env.app_id

# Find all active syncs in the org, excluding the changed environment
candidate_syncs = EnvironmentSync.objects.filter(
environment__app__organisation=org,
is_active=True,
deleted_at=None,
).exclude(
environment=changed_env
).select_related("environment", "environment__app")

# Group syncs by environment to avoid redundant reference checks
env_syncs_map = {}
for sync in candidate_syncs:
env_id = sync.environment_id
if env_id not in env_syncs_map:
env_syncs_map[env_id] = []
env_syncs_map[env_id].append(sync)

if not env_syncs_map:
return

for env_id, syncs in env_syncs_map.items():
try:
if env_has_references_to(
env_id, changed_env_name, changed_app_name, changed_app_id
):
logger.info(
f"Environment {env_id} references changed environment "
f"{changed_env.id}, triggering syncs"
)
for sync in syncs:
trigger_sync_tasks(sync)
except Exception as e:
logger.warning(
f"Failed to check references for environment {env_id}: {e}"
)
73 changes: 73 additions & 0 deletions backend/api/utils/secrets.py
Original file line number Diff line number Diff line change
Expand Up @@ -488,3 +488,76 @@ def decrypt_secret_value(
raise SecretReferenceException("\n".join(unresolved_local_references))

return value


def env_has_references_to(source_env_id, target_env_name, target_app_name, target_app_id):
"""
Check if any secrets in the source environment contain references
to the target environment.

Used to determine which syncs need to be triggered when a referenced
environment's secrets change.

Args:
source_env_id: ID of the environment to check secrets in.
target_env_name: Name of the potentially-referenced environment.
target_app_name: Name of the app containing the target environment.
target_app_id: ID of the app containing the target environment.

Returns:
bool: True if any secret in source_env references the target environment.
"""
Secret = apps.get_model("api", "Secret")
ServerEnvironmentKey = apps.get_model("api", "ServerEnvironmentKey")
Environment = apps.get_model("api", "Environment")

try:
source_env = Environment.objects.select_related("app").get(id=source_env_id)
except Environment.DoesNotExist:
return False

try:
server_env_key = ServerEnvironmentKey.objects.get(
environment_id=source_env_id
)
except ServerEnvironmentKey.DoesNotExist:
return False

pk, sk = get_server_keypair()

try:
env_seed = decrypt_asymmetric(server_env_key.wrapped_seed, sk.hex(), pk.hex())
env_pubkey, env_privkey = env_keypair(env_seed)
except Exception:
return False

secrets = Secret.objects.filter(
environment_id=source_env_id,
deleted_at=None,
)

same_app = str(source_env.app_id) == str(target_app_id)
target_env_lower = target_env_name.lower()
target_app_lower = target_app_name.lower()

for secret in secrets:
try:
value = decrypt_asymmetric(secret.value, env_privkey, env_pubkey)

# Check cross-app references: ${APP::ENV.KEY}
for ref_app, ref_env, _ in CROSS_APP_ENV_PATTERN.findall(value):
if (
ref_app.lower() == target_app_lower
and ref_env.lower() == target_env_lower
):
return True

# Check cross-env references: ${ENV.KEY} (only if same app)
if same_app:
for ref_env, _ in CROSS_ENV_PATTERN.findall(value):
if ref_env.lower() == target_env_lower:
return True
except Exception:
continue

return False
Empty file added backend/tests/tasks/__init__.py
Empty file.
Loading
Loading