From 4f5b0f953826a4e769de16fc084c8fa5a86ff125 Mon Sep 17 00:00:00 2001 From: David Han Date: Thu, 19 Feb 2026 10:54:51 -0500 Subject: [PATCH 1/6] Use PATCH method for preexisting accounts --- aws_organizations/README.md | 4 ++++ aws_organizations/main_organizations.yaml | 12 +++++++++++- aws_quickstart/README.md | 4 ++++ aws_quickstart/datadog_integration_api_call_v2.yaml | 12 +++++++++++- 4 files changed, 30 insertions(+), 2 deletions(-) diff --git a/aws_organizations/README.md b/aws_organizations/README.md index c9b1071e..ed149c21 100644 --- a/aws_organizations/README.md +++ b/aws_organizations/README.md @@ -35,6 +35,10 @@ Before getting started, ensure you have the following prerequisites: 10. Move to the Review page and Click Submit. This launches the creation process for the Datadog StackSet. This could take a while depending on how many accounts need to be integrated. Ensure that the StackSet successfully creates all resources before proceeding. 11. After the stack is created, go back to the AWS integration tile in Datadog and click Ready! +## Pre-existing Integrations + +If you deploy this StackSet to an AWS Organization or OU that includes accounts which already have a Datadog integration configured (e.g., via a standalone stack), the template will detect the existing integration and update it rather than attempting to create a duplicate. This prevents a 409 conflict error that would otherwise trigger a rollback and delete the pre-existing integration. + ## Datadog::Integrations::AWS This CloudFormation StackSet only manages *AWS* resources required by the Datadog AWS integration. The actual integration configuration within Datadog platform can also be managed in CloudFormation using the custom resource [Datadog::Integrations::AWS](https://github.com/DataDog/datadog-cloudformation-resources/tree/master/datadog-integrations-aws-handler) if you like. diff --git a/aws_organizations/main_organizations.yaml b/aws_organizations/main_organizations.yaml index 4bb3559b..47ebe431 100644 --- a/aws_organizations/main_organizations.yaml +++ b/aws_organizations/main_organizations.yaml @@ -272,7 +272,17 @@ Resources: try: # Call Datadog API and report response back to CloudFormation uuid = "" - if event["RequestType"] != "Create": + if event["RequestType"] == "Create": + datadog_account_response = get_datadog_account(event) + code = datadog_account_response.getcode() + data = datadog_account_response.read() + if code == 200 and data: + json_response = json.loads(data) + if len(json_response["data"]) > 0: + LOGGER.info("Account already exists in Datadog. Using PATCH to update instead of POST to avoid conflict.") + uuid = json_response["data"][0]["id"] + method = "PATCH" + else: datadog_account_response = get_datadog_account(event) uuid = extract_uuid_from_account_response(event, context, datadog_account_response) if uuid is None: diff --git a/aws_quickstart/README.md b/aws_quickstart/README.md index 43f39ad0..bfe19690 100644 --- a/aws_quickstart/README.md +++ b/aws_quickstart/README.md @@ -23,6 +23,10 @@ This template creates the following AWS resources required by the Datadog AWS in - The Datadog Forwarder only deploys to the AWS region where the AWS integration CloudFormation stack is launched. If you operate in multiple AWS regions, you can deploy the Forwarder stack (without the rest of the AWS integration stack) directly to other regions as needed. - The Datadog Forwarder is installed with default settings as a nested stack, edit the nested stack directly to update the forwarder specific settings. +## Pre-existing Integrations + +If you deploy this CloudFormation stack to an AWS account that already has a Datadog integration configured, the stack will detect the existing integration and update it rather than attempting to create a duplicate. This prevents a 409 conflict error that would otherwise trigger a rollback and delete the pre-existing integration. + ## Updating your CloudFormation Stack As of v2.0.0 of the aws_quickstart template updates to the stack parameters are supported. Updates should generally be made to the root CloudFormation Stack (entitled DatadogIntegration by default). We do not support updating the API Key or APP Key fields to point to a different Datadog Organization - if these are updated they must point to the original Organization. You can also update the version of the template used by selecting "Replace existing template" while updating your CloudFormation Stack. You must select a version number with the same major version as your current template. diff --git a/aws_quickstart/datadog_integration_api_call_v2.yaml b/aws_quickstart/datadog_integration_api_call_v2.yaml index 18d06183..fc03c392 100644 --- a/aws_quickstart/datadog_integration_api_call_v2.yaml +++ b/aws_quickstart/datadog_integration_api_call_v2.yaml @@ -237,7 +237,17 @@ Resources: try: # Call Datadog API and report response back to CloudFormation uuid = "" - if event["RequestType"] != "Create": + if event["RequestType"] == "Create": + datadog_account_response = get_datadog_account(event) + code = datadog_account_response.getcode() + data = datadog_account_response.read() + if code == 200 and data: + json_response = json.loads(data) + if len(json_response["data"]) > 0: + LOGGER.info("Account already exists in Datadog. Using PATCH to update instead of POST to avoid conflict.") + uuid = json_response["data"][0]["id"] + method = "PATCH" + else: datadog_account_response = get_datadog_account(event) uuid = extract_uuid_from_account_response(event, context, datadog_account_response) if uuid is None: From 056c90f35a33b27ae7b7f651ff9d91720e01df0a Mon Sep 17 00:00:00 2001 From: David Han Date: Thu, 19 Feb 2026 11:08:50 -0500 Subject: [PATCH 2/6] Extract the api call into python file for readability and inject in release script --- .github/workflows/python-test.yml | 13 + .../datadog_integration_api_call.py | 236 ++++++++++++++ .../datadog_integration_api_call_test.py | 308 ++++++++++++++++++ .../datadog_integration_api_call_v2.yaml | 234 +------------ aws_quickstart/release.sh | 12 + 5 files changed, 570 insertions(+), 233 deletions(-) create mode 100644 aws_quickstart/datadog_integration_api_call.py create mode 100644 aws_quickstart/datadog_integration_api_call_test.py diff --git a/.github/workflows/python-test.yml b/.github/workflows/python-test.yml index 0191c36d..056a6d30 100644 --- a/.github/workflows/python-test.yml +++ b/.github/workflows/python-test.yml @@ -21,3 +21,16 @@ jobs: run: | cd aws_quickstart python -B -S -m unittest datadog_agentless_api_call_test.py -v + + integration_api_call_test: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 + with: + python-version: "3.13" + - name: Run Integration API Call unit tests + run: | + cd aws_quickstart + python -B -S -m unittest datadog_integration_api_call_test.py -v diff --git a/aws_quickstart/datadog_integration_api_call.py b/aws_quickstart/datadog_integration_api_call.py new file mode 100644 index 00000000..ecbc8ad7 --- /dev/null +++ b/aws_quickstart/datadog_integration_api_call.py @@ -0,0 +1,236 @@ +import json +import logging +import signal +from urllib.request import Request +import urllib.parse +import cfnresponse + +LOGGER = logging.getLogger() +LOGGER.setLevel(logging.INFO) + +API_CALL_SOURCE_HEADER_VALUE = "cfn-quick-start" + +class TimeoutError(Exception): + """Exception for timeouts""" + pass + +def call_datadog_api(uuid, event, method): + api_key = event["ResourceProperties"]["APIKey"] + app_key = event["ResourceProperties"]["APPKey"] + api_url = event["ResourceProperties"]["ApiURL"] + account_id = event["ResourceProperties"]["AccountId"] + role_name = event["ResourceProperties"]["RoleName"] + aws_partition = event["ResourceProperties"]["AWSPartition"] + account_tags = event["ResourceProperties"]["AccountTags"] + cspm = event["ResourceProperties"]["CloudSecurityPostureManagement"] + metrics_disabled = event["ResourceProperties"]["DisableMetricCollection"] + resource_collection_disabled = event['ResourceProperties']['DisableResourceCollection'] + + # Make the url Request + url = f"https://api.{api_url}/api/v2/integration/aws/accounts" + headers = { + "DD-API-KEY": api_key, + "DD-APPLICATION-KEY": app_key, + "Dd-Aws-Api-Call-Source": API_CALL_SOURCE_HEADER_VALUE, + } + + if method == "PATCH" or method == "DELETE": + url = url + "/" + uuid + + if method == "DELETE": + # DELETE request has no body + request = Request(url, headers=headers) + else: + # Create the request body for POST and PATCH + values = { + "data": { + "type": "account", + "attributes": { + "aws_account_id": account_id, + "account_tags": account_tags, + "aws_partition": aws_partition, + "auth_config": {"role_name": role_name}, + "metrics_config": { + "enabled": (metrics_disabled == "false"), + }, + "resources_config": { + "cloud_security_posture_management_collection": ( + cspm == "true" + ), + "extended_collection": ( + resource_collection_disabled == "false" + ) + } + } + } + } + + data = json.dumps(values) + data = data.encode("utf-8") # data should be bytes + request = Request(url, data=data, headers=headers) + request.add_header("Content-Type", "application/json; charset=utf-8") + request.add_header("Content-Length", len(data)) + + # Send the request + request.get_method = lambda: method + try: + response = urllib.request.urlopen(request) + except urllib.error.HTTPError as e: + # Return error response from API + response = e + return response + +def get_datadog_account(event): + api_key = event["ResourceProperties"]["APIKey"] + app_key = event["ResourceProperties"]["APPKey"] + api_url = event["ResourceProperties"]["ApiURL"] + account_id = event["ResourceProperties"]["AccountId"] + + # Make the url Request + url = f"https://api.{api_url}/api/v2/integration/aws/accounts?aws_account_id={account_id}" + headers = { + "DD-API-KEY": api_key, + "DD-APPLICATION-KEY": app_key, + "Dd-Aws-Api-Call-Source": API_CALL_SOURCE_HEADER_VALUE, + } + request = Request(url, headers=headers) + request.get_method = lambda: "GET" + try: + response = urllib.request.urlopen(request) + except urllib.error.HTTPError as e: + # Return error response from API + response = e + return response + + +def handler(event, context): + """Handle Lambda event from AWS""" + if event["RequestType"] == "Create": + LOGGER.info("Received Create request.") + method = "POST" + + elif event["RequestType"] == "Update": + LOGGER.info("Received Update request.") + method = "PATCH" + + elif event["RequestType"] == "Delete": + LOGGER.info("Received Delete request.") + method = "DELETE" + else: + LOGGER.info("Failed - received unexpected request.") + cfResponse = {"Message": "Received unexpected request type: {}".format(event["RequestType"])} + reason = json.dumps(cfResponse) + cfnresponse.send( + event, + context, + responseStatus="FAILED", + responseData=cfResponse, + reason=reason, + ) + return + + try: + # Call Datadog API and report response back to CloudFormation + uuid = "" + if event["RequestType"] == "Create": + # Check if account already exists to avoid 409 conflict. + # A failed POST triggers rollback which deletes the pre-existing integration, + # so we use PATCH instead of POST if the account is already registered. + datadog_account_response = get_datadog_account(event) + code = datadog_account_response.getcode() + data = datadog_account_response.read() + if code == 200 and data: + json_response = json.loads(data) + if len(json_response["data"]) > 0: + LOGGER.info("Account already exists in Datadog. Using PATCH to update instead of POST to avoid conflict.") + uuid = json_response["data"][0]["id"] + method = "PATCH" + else: + datadog_account_response = get_datadog_account(event) + uuid = extract_uuid_from_account_response(event, context, datadog_account_response) + if uuid is None: + return + response = call_datadog_api(uuid, event, method) + cfn_response_send_api_result(event, context, method, response) + + except Exception as e: + LOGGER.info("Failed - exception thrown during processing.") + cfResponse = {"Message": "Exception during processing: {}".format(e)} + reason = json.dumps(cfResponse) + cfnresponse.send( + event, + context, + "FAILED", + responseData=cfResponse, + reason=reason, + ) + +def extract_uuid_from_account_response(event, context, account_response): + json_response = "" + code = account_response.getcode() + data = account_response.read() + if data: + json_response = json.loads(data) + if code == 200 or code == 204: + if len(json_response["data"]) == 0: + if event["RequestType"] == "Delete": + # Report success so stack deletion can proceed + LOGGER.info("Account requested for deletion does not exist in Datadog. Proceeding with stack deletion.") + cfn_response_send_api_result(event, context, "DELETE", account_response) + else: + cfn_response_send_failure(event, context, "Datadog account not found.") + return None + if len(json_response["data"]) > 1: + cfn_response_send_failure(event, context, "Datadog account not unique.") + return None + return json_response["data"][0]["id"] + cfn_response_send_failure(event, context, "Datadog API returned error: {}".format(json_response)) + return None + + +def cfn_response_send_api_result(event, context, method, response): + reason = None + json_response = "" + code = response.getcode() + data = response.read() + if data: + json_response = json.loads(data) + if code == 200 or code == 204: + LOGGER.info("Success - Datadog API call was successful.") + response_status = "SUCCESS" + cfResponse = {"Message": "Datadog AWS Integration {} API request was successful.".format(method)} + + # return external ID for create and update + if method == "POST" or method == "PATCH": + external_id = json_response["data"]["attributes"]["auth_config"]["external_id"] + cfResponse["ExternalId"] = external_id + cfnresponse.send( + event, + context, + responseStatus=response_status, + responseData=cfResponse, + reason=reason, + ) + return + cfn_response_send_failure(event, context, "Datadog API returned error: {}".format(json_response)) + + +def cfn_response_send_failure(event, context, message): + LOGGER.info("Failed - Datadog API call failed.") + reason = None + response_status = "FAILED" + cfResponse = {"Message": message} + reason = json.dumps(cfResponse) + cfnresponse.send( + event, + context, + responseStatus=response_status, + responseData=cfResponse, + reason=reason, + ) + +def timeout_handler(_signal, _frame): + """Handle SIGALRM""" + raise TimeoutError("Lambda function timeout exceeded - increase the timeout set in the api_call Cloudformation template.") + +signal.signal(signal.SIGALRM, timeout_handler) diff --git a/aws_quickstart/datadog_integration_api_call_test.py b/aws_quickstart/datadog_integration_api_call_test.py new file mode 100644 index 00000000..715ad5af --- /dev/null +++ b/aws_quickstart/datadog_integration_api_call_test.py @@ -0,0 +1,308 @@ +#!/usr/bin/env python3 + +import json +import sys +import unittest +from types import SimpleNamespace +from unittest.mock import patch, Mock, MagicMock + +# cfnresponse is only available in the AWS Lambda runtime, so we mock it for testing +sys.modules["cfnresponse"] = MagicMock() + +from datadog_integration_api_call import ( + call_datadog_api, + get_datadog_account, + handler, + extract_uuid_from_account_response, +) + + +def make_event(request_type, account_id="123456789012"): + return { + "RequestType": request_type, + "ResourceProperties": { + "APIKey": "0123456789abcdef0123456789abcdef", + "APPKey": "0123456789abcdef0123456789abcdef12345678", + "ApiURL": "datadoghq.com", + "AccountId": account_id, + "RoleName": "DatadogIntegrationRole", + "AWSPartition": "aws", + "AccountTags": ["aws_account:123456789012"], + "CloudSecurityPostureManagement": "false", + "DisableMetricCollection": "false", + "DisableResourceCollection": "false", + }, + "StackId": "arn:aws:cloudformation:us-east-1:123456789012:stack/test/guid", + "RequestId": "test-request-id", + "LogicalResourceId": "DatadogAWSAccountIntegration", + "ResponseURL": "https://cloudformation-custom-resource-response-useast1.s3.amazonaws.com/test", + } + + +def make_mock_response(status_code, data=None): + """Create a mock HTTP response.""" + response = Mock() + response.getcode.return_value = status_code + if data is not None: + response.read.return_value = json.dumps(data).encode("utf-8") + else: + response.read.return_value = b"" + return response + + +EXISTING_ACCOUNT_RESPONSE = { + "data": [ + { + "id": "existing-uuid-1234", + "type": "account", + "attributes": { + "aws_account_id": "123456789012", + "auth_config": {"external_id": "ext-id-1234"}, + }, + } + ] +} + +EMPTY_ACCOUNT_RESPONSE = {"data": []} + +SUCCESS_POST_RESPONSE = { + "data": { + "id": "new-uuid-5678", + "type": "account", + "attributes": { + "aws_account_id": "123456789012", + "auth_config": {"external_id": "ext-id-5678"}, + }, + } +} + +SUCCESS_PATCH_RESPONSE = { + "data": { + "id": "existing-uuid-1234", + "type": "account", + "attributes": { + "aws_account_id": "123456789012", + "auth_config": {"external_id": "ext-id-1234"}, + }, + } +} + + +class TestHandlerCreateNewAccount(unittest.TestCase): + """Test Create when account does NOT already exist in Datadog.""" + + @patch("datadog_integration_api_call.cfnresponse") + @patch("datadog_integration_api_call.urllib.request.urlopen") + def test_create_new_account_uses_post(self, mock_urlopen, mock_cfnresponse): + """When account doesn't exist, Create should POST.""" + event = make_event("Create") + context = SimpleNamespace() + + # First call: get_datadog_account returns empty list + # Second call: call_datadog_api POST succeeds + mock_urlopen.side_effect = [ + make_mock_response(200, EMPTY_ACCOUNT_RESPONSE), + make_mock_response(200, SUCCESS_POST_RESPONSE), + ] + + handler(event, context) + + # Verify POST was used (second urlopen call) + self.assertEqual(mock_urlopen.call_count, 2) + post_request = mock_urlopen.call_args_list[1][0][0] + self.assertEqual(post_request.get_method(), "POST") + self.assertNotIn("/existing-uuid", post_request.full_url) + + # Verify SUCCESS reported to CloudFormation + mock_cfnresponse.send.assert_called_once() + call_kwargs = mock_cfnresponse.send.call_args + self.assertEqual(call_kwargs[1]["responseStatus"], "SUCCESS") + + +class TestHandlerCreateExistingAccount(unittest.TestCase): + """Test Create when account ALREADY exists in Datadog (the 409 fix).""" + + @patch("datadog_integration_api_call.cfnresponse") + @patch("datadog_integration_api_call.urllib.request.urlopen") + def test_create_existing_account_uses_patch(self, mock_urlopen, mock_cfnresponse): + """When account already exists, Create should fall back to PATCH.""" + event = make_event("Create") + context = SimpleNamespace() + + # First call: get_datadog_account returns existing account + # Second call: call_datadog_api PATCH succeeds + mock_urlopen.side_effect = [ + make_mock_response(200, EXISTING_ACCOUNT_RESPONSE), + make_mock_response(200, SUCCESS_PATCH_RESPONSE), + ] + + handler(event, context) + + # Verify PATCH was used with the existing UUID + self.assertEqual(mock_urlopen.call_count, 2) + patch_request = mock_urlopen.call_args_list[1][0][0] + self.assertEqual(patch_request.get_method(), "PATCH") + self.assertIn("/existing-uuid-1234", patch_request.full_url) + + # Verify SUCCESS reported to CloudFormation + mock_cfnresponse.send.assert_called_once() + call_kwargs = mock_cfnresponse.send.call_args + self.assertEqual(call_kwargs[1]["responseStatus"], "SUCCESS") + + @patch("datadog_integration_api_call.cfnresponse") + @patch("datadog_integration_api_call.urllib.request.urlopen") + def test_create_get_fails_falls_through_to_post(self, mock_urlopen, mock_cfnresponse): + """When the GET check fails (non-200), Create should still try POST.""" + event = make_event("Create") + context = SimpleNamespace() + + # First call: get_datadog_account returns error + # Second call: call_datadog_api POST succeeds + mock_urlopen.side_effect = [ + make_mock_response(500, None), + make_mock_response(200, SUCCESS_POST_RESPONSE), + ] + + handler(event, context) + + # Verify POST was used (fell through because GET returned non-200) + self.assertEqual(mock_urlopen.call_count, 2) + post_request = mock_urlopen.call_args_list[1][0][0] + self.assertEqual(post_request.get_method(), "POST") + + # Verify SUCCESS reported + mock_cfnresponse.send.assert_called_once() + call_kwargs = mock_cfnresponse.send.call_args + self.assertEqual(call_kwargs[1]["responseStatus"], "SUCCESS") + + +class TestHandlerUpdate(unittest.TestCase): + """Test Update behavior (should be unchanged).""" + + @patch("datadog_integration_api_call.cfnresponse") + @patch("datadog_integration_api_call.urllib.request.urlopen") + def test_update_uses_patch(self, mock_urlopen, mock_cfnresponse): + """Update should GET the account UUID then PATCH.""" + event = make_event("Update") + context = SimpleNamespace() + + mock_urlopen.side_effect = [ + make_mock_response(200, EXISTING_ACCOUNT_RESPONSE), + make_mock_response(200, SUCCESS_PATCH_RESPONSE), + ] + + handler(event, context) + + self.assertEqual(mock_urlopen.call_count, 2) + patch_request = mock_urlopen.call_args_list[1][0][0] + self.assertEqual(patch_request.get_method(), "PATCH") + self.assertIn("/existing-uuid-1234", patch_request.full_url) + + +class TestHandlerDelete(unittest.TestCase): + """Test Delete behavior (should be unchanged).""" + + @patch("datadog_integration_api_call.cfnresponse") + @patch("datadog_integration_api_call.urllib.request.urlopen") + def test_delete_uses_delete(self, mock_urlopen, mock_cfnresponse): + """Delete should GET the account UUID then DELETE.""" + event = make_event("Delete") + context = SimpleNamespace() + + mock_urlopen.side_effect = [ + make_mock_response(200, EXISTING_ACCOUNT_RESPONSE), + make_mock_response(204, None), + ] + + handler(event, context) + + self.assertEqual(mock_urlopen.call_count, 2) + delete_request = mock_urlopen.call_args_list[1][0][0] + self.assertEqual(delete_request.get_method(), "DELETE") + self.assertIn("/existing-uuid-1234", delete_request.full_url) + + @patch("datadog_integration_api_call.cfnresponse") + @patch("datadog_integration_api_call.urllib.request.urlopen") + def test_delete_nonexistent_account_succeeds(self, mock_urlopen, mock_cfnresponse): + """Delete of an account that doesn't exist should report SUCCESS.""" + event = make_event("Delete") + context = SimpleNamespace() + + mock_urlopen.side_effect = [ + make_mock_response(200, EMPTY_ACCOUNT_RESPONSE), + ] + + handler(event, context) + + # Should only call GET (no DELETE needed) + self.assertEqual(mock_urlopen.call_count, 1) + + # Should still report success so stack deletion can proceed + mock_cfnresponse.send.assert_called_once() + + +class TestHandlerUnexpectedRequestType(unittest.TestCase): + """Test unexpected request type.""" + + @patch("datadog_integration_api_call.cfnresponse") + def test_unexpected_request_type_fails(self, mock_cfnresponse): + event = make_event("Create") + event["RequestType"] = "Invalid" + context = SimpleNamespace() + + handler(event, context) + + mock_cfnresponse.send.assert_called_once() + call_args = mock_cfnresponse.send.call_args + self.assertEqual(call_args[1]["responseStatus"], "FAILED") + + +class TestExtractUuidFromAccountResponse(unittest.TestCase): + """Test the extract_uuid_from_account_response helper.""" + + @patch("datadog_integration_api_call.cfnresponse") + def test_extracts_uuid_from_single_account(self, mock_cfnresponse): + event = make_event("Update") + context = SimpleNamespace() + response = make_mock_response(200, EXISTING_ACCOUNT_RESPONSE) + + uuid = extract_uuid_from_account_response(event, context, response) + + self.assertEqual(uuid, "existing-uuid-1234") + + @patch("datadog_integration_api_call.cfnresponse") + def test_returns_none_for_empty_data_on_update(self, mock_cfnresponse): + event = make_event("Update") + context = SimpleNamespace() + response = make_mock_response(200, EMPTY_ACCOUNT_RESPONSE) + + uuid = extract_uuid_from_account_response(event, context, response) + + self.assertIsNone(uuid) + mock_cfnresponse.send.assert_called_once() + + @patch("datadog_integration_api_call.cfnresponse") + def test_returns_none_for_multiple_accounts(self, mock_cfnresponse): + event = make_event("Update") + context = SimpleNamespace() + multi_response = {"data": [{"id": "uuid-1"}, {"id": "uuid-2"}]} + response = make_mock_response(200, multi_response) + + uuid = extract_uuid_from_account_response(event, context, response) + + self.assertIsNone(uuid) + mock_cfnresponse.send.assert_called_once() + + @patch("datadog_integration_api_call.cfnresponse") + def test_returns_none_for_api_error(self, mock_cfnresponse): + event = make_event("Update") + context = SimpleNamespace() + response = make_mock_response(403, {"errors": ["Forbidden"]}) + + uuid = extract_uuid_from_account_response(event, context, response) + + self.assertIsNone(uuid) + + +if __name__ == "__main__": + unittest.main() diff --git a/aws_quickstart/datadog_integration_api_call_v2.yaml b/aws_quickstart/datadog_integration_api_call_v2.yaml index fc03c392..23571c81 100644 --- a/aws_quickstart/datadog_integration_api_call_v2.yaml +++ b/aws_quickstart/datadog_integration_api_call_v2.yaml @@ -103,239 +103,7 @@ Resources: Timeout: 5 Code: ZipFile: | - import json - import logging - import signal - from urllib.request import Request - import urllib.parse - import cfnresponse - - LOGGER = logging.getLogger() - LOGGER.setLevel(logging.INFO) - - API_CALL_SOURCE_HEADER_VALUE = "cfn-quick-start" - - class TimeoutError(Exception): - """Exception for timeouts""" - pass - - def call_datadog_api(uuid, event, method): - api_key = event["ResourceProperties"]["APIKey"] - app_key = event["ResourceProperties"]["APPKey"] - api_url = event["ResourceProperties"]["ApiURL"] - account_id = event["ResourceProperties"]["AccountId"] - role_name = event["ResourceProperties"]["RoleName"] - aws_partition = event["ResourceProperties"]["AWSPartition"] - account_tags = event["ResourceProperties"]["AccountTags"] - cspm = event["ResourceProperties"]["CloudSecurityPostureManagement"] - metrics_disabled = event["ResourceProperties"]["DisableMetricCollection"] - resource_collection_disabled = event['ResourceProperties']['DisableResourceCollection'] - - # Make the url Request - url = f"https://api.{api_url}/api/v2/integration/aws/accounts" - headers = { - "DD-API-KEY": api_key, - "DD-APPLICATION-KEY": app_key, - "Dd-Aws-Api-Call-Source": API_CALL_SOURCE_HEADER_VALUE, - } - - if method == "PATCH" or method == "DELETE": - url = url + "/" + uuid - - if method == "DELETE": - # DELETE request has no body - request = Request(url, headers=headers) - else: - # Create the request body for POST and PATCH - values = { - "data": { - "type": "account", - "attributes": { - "aws_account_id": account_id, - "account_tags": account_tags, - "aws_partition": aws_partition, - "auth_config": {"role_name": role_name}, - "metrics_config": { - "enabled": (metrics_disabled == "false"), - }, - "resources_config": { - "cloud_security_posture_management_collection": ( - cspm == "true" - ), - "extended_collection": ( - resource_collection_disabled == "false" - ) - } - } - } - } - - data = json.dumps(values) - data = data.encode("utf-8") # data should be bytes - request = Request(url, data=data, headers=headers) - request.add_header("Content-Type", "application/json; charset=utf-8") - request.add_header("Content-Length", len(data)) - - # Send the request - request.get_method = lambda: method - try: - response = urllib.request.urlopen(request) - except urllib.error.HTTPError as e: - # Return error response from API - response = e - return response - - def get_datadog_account(event): - api_key = event["ResourceProperties"]["APIKey"] - app_key = event["ResourceProperties"]["APPKey"] - api_url = event["ResourceProperties"]["ApiURL"] - account_id = event["ResourceProperties"]["AccountId"] - - # Make the url Request - url = f"https://api.{api_url}/api/v2/integration/aws/accounts?aws_account_id={account_id}" - headers = { - "DD-API-KEY": api_key, - "DD-APPLICATION-KEY": app_key, - "Dd-Aws-Api-Call-Source": API_CALL_SOURCE_HEADER_VALUE, - } - request = Request(url, headers=headers) - request.get_method = lambda: "GET" - try: - response = urllib.request.urlopen(request) - except urllib.error.HTTPError as e: - # Return error response from API - response = e - return response - - - def handler(event, context): - """Handle Lambda event from AWS""" - if event["RequestType"] == "Create": - LOGGER.info("Received Create request.") - method = "POST" - - elif event["RequestType"] == "Update": - LOGGER.info("Received Update request.") - method = "PATCH" - - elif event["RequestType"] == "Delete": - LOGGER.info("Received Delete request.") - method = "DELETE" - else: - LOGGER.info("Failed - received unexpected request.") - cfResponse = {"Message": "Received unexpected request type: {}".format(event["RequestType"])} - reason = json.dumps(cfResponse) - cfnresponse.send( - event, - context, - responseStatus="FAILED", - responseData=cfResponse, - reason=reason, - ) - return - - try: - # Call Datadog API and report response back to CloudFormation - uuid = "" - if event["RequestType"] == "Create": - datadog_account_response = get_datadog_account(event) - code = datadog_account_response.getcode() - data = datadog_account_response.read() - if code == 200 and data: - json_response = json.loads(data) - if len(json_response["data"]) > 0: - LOGGER.info("Account already exists in Datadog. Using PATCH to update instead of POST to avoid conflict.") - uuid = json_response["data"][0]["id"] - method = "PATCH" - else: - datadog_account_response = get_datadog_account(event) - uuid = extract_uuid_from_account_response(event, context, datadog_account_response) - if uuid is None: - return - response = call_datadog_api(uuid, event, method) - cfn_response_send_api_result(event, context, method, response) - - except Exception as e: - LOGGER.info("Failed - exception thrown during processing.") - cfResponse = {"Message": "Exception during processing: {}".format(e)} - reason = json.dumps(cfResponse) - cfnresponse.send( - event, - context, - "FAILED", - responseData=cfResponse, - reason=reason, - ) - - def extract_uuid_from_account_response(event, context, account_response): - json_response = "" - code = account_response.getcode() - data = account_response.read() - if data: - json_response = json.loads(data) - if code == 200 or code == 204: - if len(json_response["data"]) == 0: - if event["RequestType"] == "Delete": - # Report success so stack deletion can proceed - LOGGER.info("Account requested for deletion does not exist in Datadog. Proceeding with stack deletion.") - cfn_response_send_api_result(event, context, "DELETE", account_response) - else: - cfn_response_send_failure(event, context, "Datadog account not found.") - return None - if len(json_response["data"]) > 1: - cfn_response_send_failure(event, context, "Datadog account not unique.") - return None - return json_response["data"][0]["id"] - cfn_response_send_failure(event, context, "Datadog API returned error: {}".format(json_response)) - return None - - - def cfn_response_send_api_result(event, context, method, response): - reason = None - json_response = "" - code = response.getcode() - data = response.read() - if data: - json_response = json.loads(data) - if code == 200 or code == 204: - LOGGER.info("Success - Datadog API call was successful.") - response_status = "SUCCESS" - cfResponse = {"Message": "Datadog AWS Integration {} API request was successful.".format(method)} - - # return external ID for create and update - if method == "POST" or method == "PATCH": - external_id = json_response["data"]["attributes"]["auth_config"]["external_id"] - cfResponse["ExternalId"] = external_id - cfnresponse.send( - event, - context, - responseStatus=response_status, - responseData=cfResponse, - reason=reason, - ) - return - cfn_response_send_failure(event, context, "Datadog API returned error: {}".format(json_response)) - - - def cfn_response_send_failure(event, context, message): - LOGGER.info("Failed - Datadog API call failed.") - reason = None - response_status = "FAILED" - cfResponse = {"Message": message} - reason = json.dumps(cfResponse) - cfnresponse.send( - event, - context, - responseStatus=response_status, - responseData=cfResponse, - reason=reason, - ) - - def timeout_handler(_signal, _frame): - """Handle SIGALRM""" - raise TimeoutError("Lambda function timeout exceeded - increase the timeout set in the api_call Cloudformation template.") - - signal.signal(signal.SIGALRM, timeout_handler) + Outputs: ExternalId: Description: Datadog AWS Integration ExternalId diff --git a/aws_quickstart/release.sh b/aws_quickstart/release.sh index 8ef2a07a..0d6751d1 100755 --- a/aws_quickstart/release.sh +++ b/aws_quickstart/release.sh @@ -91,6 +91,7 @@ trap "rm -rf ${TEMP_DIR}" EXIT # Copy all YAML files to temp directory cp *.yaml "${TEMP_DIR}/" cp datadog_agentless_api_call.py "${TEMP_DIR}/" +cp datadog_integration_api_call.py "${TEMP_DIR}/" # Change to temp directory for processing cd "${TEMP_DIR}" @@ -118,6 +119,17 @@ for template in datadog_agentless_delegate_role.yaml datadog_agentless_scanning. ' "$template" < datadog_agentless_api_call.py done +# Process Integration API Call template +for template in datadog_integration_api_call_v2.yaml; do + # Replace ZIPFILE_PLACEHOLDER with the contents of the Python file + perl -i -pe ' + BEGIN { $p = do { local $/; } } + /^(\s+)/ && ( + $_ = join("\n", map { $1 . $_ } split(/\n/, $p)) . "\n" + ) + ' "$template" < datadog_integration_api_call.py +done + # Upload from temp directory if [ "$PRIVATE_TEMPLATE" = true ] ; then aws s3 cp . s3://${BUCKET}/aws/${VERSION} --recursive --exclude "*" --include "*.yaml" From f66d5ce49dbae66fcd9346707f08123941d438c6 Mon Sep 17 00:00:00 2001 From: David Han Date: Fri, 20 Feb 2026 15:10:19 -0500 Subject: [PATCH 3/6] Formatting --- .github/workflows/python-test.yml | 13 - .../datadog_integration_api_call_test.py | 308 ------------------ 2 files changed, 321 deletions(-) delete mode 100644 aws_quickstart/datadog_integration_api_call_test.py diff --git a/.github/workflows/python-test.yml b/.github/workflows/python-test.yml index 056a6d30..0191c36d 100644 --- a/.github/workflows/python-test.yml +++ b/.github/workflows/python-test.yml @@ -21,16 +21,3 @@ jobs: run: | cd aws_quickstart python -B -S -m unittest datadog_agentless_api_call_test.py -v - - integration_api_call_test: - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v4 - - uses: actions/setup-python@v5 - with: - python-version: "3.13" - - name: Run Integration API Call unit tests - run: | - cd aws_quickstart - python -B -S -m unittest datadog_integration_api_call_test.py -v diff --git a/aws_quickstart/datadog_integration_api_call_test.py b/aws_quickstart/datadog_integration_api_call_test.py deleted file mode 100644 index 715ad5af..00000000 --- a/aws_quickstart/datadog_integration_api_call_test.py +++ /dev/null @@ -1,308 +0,0 @@ -#!/usr/bin/env python3 - -import json -import sys -import unittest -from types import SimpleNamespace -from unittest.mock import patch, Mock, MagicMock - -# cfnresponse is only available in the AWS Lambda runtime, so we mock it for testing -sys.modules["cfnresponse"] = MagicMock() - -from datadog_integration_api_call import ( - call_datadog_api, - get_datadog_account, - handler, - extract_uuid_from_account_response, -) - - -def make_event(request_type, account_id="123456789012"): - return { - "RequestType": request_type, - "ResourceProperties": { - "APIKey": "0123456789abcdef0123456789abcdef", - "APPKey": "0123456789abcdef0123456789abcdef12345678", - "ApiURL": "datadoghq.com", - "AccountId": account_id, - "RoleName": "DatadogIntegrationRole", - "AWSPartition": "aws", - "AccountTags": ["aws_account:123456789012"], - "CloudSecurityPostureManagement": "false", - "DisableMetricCollection": "false", - "DisableResourceCollection": "false", - }, - "StackId": "arn:aws:cloudformation:us-east-1:123456789012:stack/test/guid", - "RequestId": "test-request-id", - "LogicalResourceId": "DatadogAWSAccountIntegration", - "ResponseURL": "https://cloudformation-custom-resource-response-useast1.s3.amazonaws.com/test", - } - - -def make_mock_response(status_code, data=None): - """Create a mock HTTP response.""" - response = Mock() - response.getcode.return_value = status_code - if data is not None: - response.read.return_value = json.dumps(data).encode("utf-8") - else: - response.read.return_value = b"" - return response - - -EXISTING_ACCOUNT_RESPONSE = { - "data": [ - { - "id": "existing-uuid-1234", - "type": "account", - "attributes": { - "aws_account_id": "123456789012", - "auth_config": {"external_id": "ext-id-1234"}, - }, - } - ] -} - -EMPTY_ACCOUNT_RESPONSE = {"data": []} - -SUCCESS_POST_RESPONSE = { - "data": { - "id": "new-uuid-5678", - "type": "account", - "attributes": { - "aws_account_id": "123456789012", - "auth_config": {"external_id": "ext-id-5678"}, - }, - } -} - -SUCCESS_PATCH_RESPONSE = { - "data": { - "id": "existing-uuid-1234", - "type": "account", - "attributes": { - "aws_account_id": "123456789012", - "auth_config": {"external_id": "ext-id-1234"}, - }, - } -} - - -class TestHandlerCreateNewAccount(unittest.TestCase): - """Test Create when account does NOT already exist in Datadog.""" - - @patch("datadog_integration_api_call.cfnresponse") - @patch("datadog_integration_api_call.urllib.request.urlopen") - def test_create_new_account_uses_post(self, mock_urlopen, mock_cfnresponse): - """When account doesn't exist, Create should POST.""" - event = make_event("Create") - context = SimpleNamespace() - - # First call: get_datadog_account returns empty list - # Second call: call_datadog_api POST succeeds - mock_urlopen.side_effect = [ - make_mock_response(200, EMPTY_ACCOUNT_RESPONSE), - make_mock_response(200, SUCCESS_POST_RESPONSE), - ] - - handler(event, context) - - # Verify POST was used (second urlopen call) - self.assertEqual(mock_urlopen.call_count, 2) - post_request = mock_urlopen.call_args_list[1][0][0] - self.assertEqual(post_request.get_method(), "POST") - self.assertNotIn("/existing-uuid", post_request.full_url) - - # Verify SUCCESS reported to CloudFormation - mock_cfnresponse.send.assert_called_once() - call_kwargs = mock_cfnresponse.send.call_args - self.assertEqual(call_kwargs[1]["responseStatus"], "SUCCESS") - - -class TestHandlerCreateExistingAccount(unittest.TestCase): - """Test Create when account ALREADY exists in Datadog (the 409 fix).""" - - @patch("datadog_integration_api_call.cfnresponse") - @patch("datadog_integration_api_call.urllib.request.urlopen") - def test_create_existing_account_uses_patch(self, mock_urlopen, mock_cfnresponse): - """When account already exists, Create should fall back to PATCH.""" - event = make_event("Create") - context = SimpleNamespace() - - # First call: get_datadog_account returns existing account - # Second call: call_datadog_api PATCH succeeds - mock_urlopen.side_effect = [ - make_mock_response(200, EXISTING_ACCOUNT_RESPONSE), - make_mock_response(200, SUCCESS_PATCH_RESPONSE), - ] - - handler(event, context) - - # Verify PATCH was used with the existing UUID - self.assertEqual(mock_urlopen.call_count, 2) - patch_request = mock_urlopen.call_args_list[1][0][0] - self.assertEqual(patch_request.get_method(), "PATCH") - self.assertIn("/existing-uuid-1234", patch_request.full_url) - - # Verify SUCCESS reported to CloudFormation - mock_cfnresponse.send.assert_called_once() - call_kwargs = mock_cfnresponse.send.call_args - self.assertEqual(call_kwargs[1]["responseStatus"], "SUCCESS") - - @patch("datadog_integration_api_call.cfnresponse") - @patch("datadog_integration_api_call.urllib.request.urlopen") - def test_create_get_fails_falls_through_to_post(self, mock_urlopen, mock_cfnresponse): - """When the GET check fails (non-200), Create should still try POST.""" - event = make_event("Create") - context = SimpleNamespace() - - # First call: get_datadog_account returns error - # Second call: call_datadog_api POST succeeds - mock_urlopen.side_effect = [ - make_mock_response(500, None), - make_mock_response(200, SUCCESS_POST_RESPONSE), - ] - - handler(event, context) - - # Verify POST was used (fell through because GET returned non-200) - self.assertEqual(mock_urlopen.call_count, 2) - post_request = mock_urlopen.call_args_list[1][0][0] - self.assertEqual(post_request.get_method(), "POST") - - # Verify SUCCESS reported - mock_cfnresponse.send.assert_called_once() - call_kwargs = mock_cfnresponse.send.call_args - self.assertEqual(call_kwargs[1]["responseStatus"], "SUCCESS") - - -class TestHandlerUpdate(unittest.TestCase): - """Test Update behavior (should be unchanged).""" - - @patch("datadog_integration_api_call.cfnresponse") - @patch("datadog_integration_api_call.urllib.request.urlopen") - def test_update_uses_patch(self, mock_urlopen, mock_cfnresponse): - """Update should GET the account UUID then PATCH.""" - event = make_event("Update") - context = SimpleNamespace() - - mock_urlopen.side_effect = [ - make_mock_response(200, EXISTING_ACCOUNT_RESPONSE), - make_mock_response(200, SUCCESS_PATCH_RESPONSE), - ] - - handler(event, context) - - self.assertEqual(mock_urlopen.call_count, 2) - patch_request = mock_urlopen.call_args_list[1][0][0] - self.assertEqual(patch_request.get_method(), "PATCH") - self.assertIn("/existing-uuid-1234", patch_request.full_url) - - -class TestHandlerDelete(unittest.TestCase): - """Test Delete behavior (should be unchanged).""" - - @patch("datadog_integration_api_call.cfnresponse") - @patch("datadog_integration_api_call.urllib.request.urlopen") - def test_delete_uses_delete(self, mock_urlopen, mock_cfnresponse): - """Delete should GET the account UUID then DELETE.""" - event = make_event("Delete") - context = SimpleNamespace() - - mock_urlopen.side_effect = [ - make_mock_response(200, EXISTING_ACCOUNT_RESPONSE), - make_mock_response(204, None), - ] - - handler(event, context) - - self.assertEqual(mock_urlopen.call_count, 2) - delete_request = mock_urlopen.call_args_list[1][0][0] - self.assertEqual(delete_request.get_method(), "DELETE") - self.assertIn("/existing-uuid-1234", delete_request.full_url) - - @patch("datadog_integration_api_call.cfnresponse") - @patch("datadog_integration_api_call.urllib.request.urlopen") - def test_delete_nonexistent_account_succeeds(self, mock_urlopen, mock_cfnresponse): - """Delete of an account that doesn't exist should report SUCCESS.""" - event = make_event("Delete") - context = SimpleNamespace() - - mock_urlopen.side_effect = [ - make_mock_response(200, EMPTY_ACCOUNT_RESPONSE), - ] - - handler(event, context) - - # Should only call GET (no DELETE needed) - self.assertEqual(mock_urlopen.call_count, 1) - - # Should still report success so stack deletion can proceed - mock_cfnresponse.send.assert_called_once() - - -class TestHandlerUnexpectedRequestType(unittest.TestCase): - """Test unexpected request type.""" - - @patch("datadog_integration_api_call.cfnresponse") - def test_unexpected_request_type_fails(self, mock_cfnresponse): - event = make_event("Create") - event["RequestType"] = "Invalid" - context = SimpleNamespace() - - handler(event, context) - - mock_cfnresponse.send.assert_called_once() - call_args = mock_cfnresponse.send.call_args - self.assertEqual(call_args[1]["responseStatus"], "FAILED") - - -class TestExtractUuidFromAccountResponse(unittest.TestCase): - """Test the extract_uuid_from_account_response helper.""" - - @patch("datadog_integration_api_call.cfnresponse") - def test_extracts_uuid_from_single_account(self, mock_cfnresponse): - event = make_event("Update") - context = SimpleNamespace() - response = make_mock_response(200, EXISTING_ACCOUNT_RESPONSE) - - uuid = extract_uuid_from_account_response(event, context, response) - - self.assertEqual(uuid, "existing-uuid-1234") - - @patch("datadog_integration_api_call.cfnresponse") - def test_returns_none_for_empty_data_on_update(self, mock_cfnresponse): - event = make_event("Update") - context = SimpleNamespace() - response = make_mock_response(200, EMPTY_ACCOUNT_RESPONSE) - - uuid = extract_uuid_from_account_response(event, context, response) - - self.assertIsNone(uuid) - mock_cfnresponse.send.assert_called_once() - - @patch("datadog_integration_api_call.cfnresponse") - def test_returns_none_for_multiple_accounts(self, mock_cfnresponse): - event = make_event("Update") - context = SimpleNamespace() - multi_response = {"data": [{"id": "uuid-1"}, {"id": "uuid-2"}]} - response = make_mock_response(200, multi_response) - - uuid = extract_uuid_from_account_response(event, context, response) - - self.assertIsNone(uuid) - mock_cfnresponse.send.assert_called_once() - - @patch("datadog_integration_api_call.cfnresponse") - def test_returns_none_for_api_error(self, mock_cfnresponse): - event = make_event("Update") - context = SimpleNamespace() - response = make_mock_response(403, {"errors": ["Forbidden"]}) - - uuid = extract_uuid_from_account_response(event, context, response) - - self.assertIsNone(uuid) - - -if __name__ == "__main__": - unittest.main() From 68d8ec3eda3d36abddf1cb1e883cacac16f1267e Mon Sep 17 00:00:00 2001 From: David Han Date: Fri, 20 Feb 2026 15:28:03 -0500 Subject: [PATCH 4/6] Test reverting --- .../datadog_integration_api_call.py | 236 ----------------- .../datadog_integration_api_call_v2.yaml | 237 +++++++++++++++++- aws_quickstart/release.sh | 12 - 3 files changed, 236 insertions(+), 249 deletions(-) delete mode 100644 aws_quickstart/datadog_integration_api_call.py diff --git a/aws_quickstart/datadog_integration_api_call.py b/aws_quickstart/datadog_integration_api_call.py deleted file mode 100644 index ecbc8ad7..00000000 --- a/aws_quickstart/datadog_integration_api_call.py +++ /dev/null @@ -1,236 +0,0 @@ -import json -import logging -import signal -from urllib.request import Request -import urllib.parse -import cfnresponse - -LOGGER = logging.getLogger() -LOGGER.setLevel(logging.INFO) - -API_CALL_SOURCE_HEADER_VALUE = "cfn-quick-start" - -class TimeoutError(Exception): - """Exception for timeouts""" - pass - -def call_datadog_api(uuid, event, method): - api_key = event["ResourceProperties"]["APIKey"] - app_key = event["ResourceProperties"]["APPKey"] - api_url = event["ResourceProperties"]["ApiURL"] - account_id = event["ResourceProperties"]["AccountId"] - role_name = event["ResourceProperties"]["RoleName"] - aws_partition = event["ResourceProperties"]["AWSPartition"] - account_tags = event["ResourceProperties"]["AccountTags"] - cspm = event["ResourceProperties"]["CloudSecurityPostureManagement"] - metrics_disabled = event["ResourceProperties"]["DisableMetricCollection"] - resource_collection_disabled = event['ResourceProperties']['DisableResourceCollection'] - - # Make the url Request - url = f"https://api.{api_url}/api/v2/integration/aws/accounts" - headers = { - "DD-API-KEY": api_key, - "DD-APPLICATION-KEY": app_key, - "Dd-Aws-Api-Call-Source": API_CALL_SOURCE_HEADER_VALUE, - } - - if method == "PATCH" or method == "DELETE": - url = url + "/" + uuid - - if method == "DELETE": - # DELETE request has no body - request = Request(url, headers=headers) - else: - # Create the request body for POST and PATCH - values = { - "data": { - "type": "account", - "attributes": { - "aws_account_id": account_id, - "account_tags": account_tags, - "aws_partition": aws_partition, - "auth_config": {"role_name": role_name}, - "metrics_config": { - "enabled": (metrics_disabled == "false"), - }, - "resources_config": { - "cloud_security_posture_management_collection": ( - cspm == "true" - ), - "extended_collection": ( - resource_collection_disabled == "false" - ) - } - } - } - } - - data = json.dumps(values) - data = data.encode("utf-8") # data should be bytes - request = Request(url, data=data, headers=headers) - request.add_header("Content-Type", "application/json; charset=utf-8") - request.add_header("Content-Length", len(data)) - - # Send the request - request.get_method = lambda: method - try: - response = urllib.request.urlopen(request) - except urllib.error.HTTPError as e: - # Return error response from API - response = e - return response - -def get_datadog_account(event): - api_key = event["ResourceProperties"]["APIKey"] - app_key = event["ResourceProperties"]["APPKey"] - api_url = event["ResourceProperties"]["ApiURL"] - account_id = event["ResourceProperties"]["AccountId"] - - # Make the url Request - url = f"https://api.{api_url}/api/v2/integration/aws/accounts?aws_account_id={account_id}" - headers = { - "DD-API-KEY": api_key, - "DD-APPLICATION-KEY": app_key, - "Dd-Aws-Api-Call-Source": API_CALL_SOURCE_HEADER_VALUE, - } - request = Request(url, headers=headers) - request.get_method = lambda: "GET" - try: - response = urllib.request.urlopen(request) - except urllib.error.HTTPError as e: - # Return error response from API - response = e - return response - - -def handler(event, context): - """Handle Lambda event from AWS""" - if event["RequestType"] == "Create": - LOGGER.info("Received Create request.") - method = "POST" - - elif event["RequestType"] == "Update": - LOGGER.info("Received Update request.") - method = "PATCH" - - elif event["RequestType"] == "Delete": - LOGGER.info("Received Delete request.") - method = "DELETE" - else: - LOGGER.info("Failed - received unexpected request.") - cfResponse = {"Message": "Received unexpected request type: {}".format(event["RequestType"])} - reason = json.dumps(cfResponse) - cfnresponse.send( - event, - context, - responseStatus="FAILED", - responseData=cfResponse, - reason=reason, - ) - return - - try: - # Call Datadog API and report response back to CloudFormation - uuid = "" - if event["RequestType"] == "Create": - # Check if account already exists to avoid 409 conflict. - # A failed POST triggers rollback which deletes the pre-existing integration, - # so we use PATCH instead of POST if the account is already registered. - datadog_account_response = get_datadog_account(event) - code = datadog_account_response.getcode() - data = datadog_account_response.read() - if code == 200 and data: - json_response = json.loads(data) - if len(json_response["data"]) > 0: - LOGGER.info("Account already exists in Datadog. Using PATCH to update instead of POST to avoid conflict.") - uuid = json_response["data"][0]["id"] - method = "PATCH" - else: - datadog_account_response = get_datadog_account(event) - uuid = extract_uuid_from_account_response(event, context, datadog_account_response) - if uuid is None: - return - response = call_datadog_api(uuid, event, method) - cfn_response_send_api_result(event, context, method, response) - - except Exception as e: - LOGGER.info("Failed - exception thrown during processing.") - cfResponse = {"Message": "Exception during processing: {}".format(e)} - reason = json.dumps(cfResponse) - cfnresponse.send( - event, - context, - "FAILED", - responseData=cfResponse, - reason=reason, - ) - -def extract_uuid_from_account_response(event, context, account_response): - json_response = "" - code = account_response.getcode() - data = account_response.read() - if data: - json_response = json.loads(data) - if code == 200 or code == 204: - if len(json_response["data"]) == 0: - if event["RequestType"] == "Delete": - # Report success so stack deletion can proceed - LOGGER.info("Account requested for deletion does not exist in Datadog. Proceeding with stack deletion.") - cfn_response_send_api_result(event, context, "DELETE", account_response) - else: - cfn_response_send_failure(event, context, "Datadog account not found.") - return None - if len(json_response["data"]) > 1: - cfn_response_send_failure(event, context, "Datadog account not unique.") - return None - return json_response["data"][0]["id"] - cfn_response_send_failure(event, context, "Datadog API returned error: {}".format(json_response)) - return None - - -def cfn_response_send_api_result(event, context, method, response): - reason = None - json_response = "" - code = response.getcode() - data = response.read() - if data: - json_response = json.loads(data) - if code == 200 or code == 204: - LOGGER.info("Success - Datadog API call was successful.") - response_status = "SUCCESS" - cfResponse = {"Message": "Datadog AWS Integration {} API request was successful.".format(method)} - - # return external ID for create and update - if method == "POST" or method == "PATCH": - external_id = json_response["data"]["attributes"]["auth_config"]["external_id"] - cfResponse["ExternalId"] = external_id - cfnresponse.send( - event, - context, - responseStatus=response_status, - responseData=cfResponse, - reason=reason, - ) - return - cfn_response_send_failure(event, context, "Datadog API returned error: {}".format(json_response)) - - -def cfn_response_send_failure(event, context, message): - LOGGER.info("Failed - Datadog API call failed.") - reason = None - response_status = "FAILED" - cfResponse = {"Message": message} - reason = json.dumps(cfResponse) - cfnresponse.send( - event, - context, - responseStatus=response_status, - responseData=cfResponse, - reason=reason, - ) - -def timeout_handler(_signal, _frame): - """Handle SIGALRM""" - raise TimeoutError("Lambda function timeout exceeded - increase the timeout set in the api_call Cloudformation template.") - -signal.signal(signal.SIGALRM, timeout_handler) diff --git a/aws_quickstart/datadog_integration_api_call_v2.yaml b/aws_quickstart/datadog_integration_api_call_v2.yaml index 23571c81..9f3b062e 100644 --- a/aws_quickstart/datadog_integration_api_call_v2.yaml +++ b/aws_quickstart/datadog_integration_api_call_v2.yaml @@ -103,7 +103,242 @@ Resources: Timeout: 5 Code: ZipFile: | - + import json + import logging + import signal + from urllib.request import Request + import urllib.parse + import cfnresponse + + LOGGER = logging.getLogger() + LOGGER.setLevel(logging.INFO) + + API_CALL_SOURCE_HEADER_VALUE = "cfn-quick-start" + + class TimeoutError(Exception): + """Exception for timeouts""" + pass + + def call_datadog_api(uuid, event, method): + api_key = event["ResourceProperties"]["APIKey"] + app_key = event["ResourceProperties"]["APPKey"] + api_url = event["ResourceProperties"]["ApiURL"] + account_id = event["ResourceProperties"]["AccountId"] + role_name = event["ResourceProperties"]["RoleName"] + aws_partition = event["ResourceProperties"]["AWSPartition"] + account_tags = event["ResourceProperties"]["AccountTags"] + cspm = event["ResourceProperties"]["CloudSecurityPostureManagement"] + metrics_disabled = event["ResourceProperties"]["DisableMetricCollection"] + resource_collection_disabled = event['ResourceProperties']['DisableResourceCollection'] + + # Make the url Request + url = f"https://api.{api_url}/api/v2/integration/aws/accounts" + headers = { + "DD-API-KEY": api_key, + "DD-APPLICATION-KEY": app_key, + "Dd-Aws-Api-Call-Source": API_CALL_SOURCE_HEADER_VALUE, + } + + if method == "PATCH" or method == "DELETE": + url = url + "/" + uuid + + if method == "DELETE": + # DELETE request has no body + request = Request(url, headers=headers) + else: + # Create the request body for POST and PATCH + values = { + "data": { + "type": "account", + "attributes": { + "aws_account_id": account_id, + "account_tags": account_tags, + "aws_partition": aws_partition, + "auth_config": {"role_name": role_name}, + "metrics_config": { + "enabled": (metrics_disabled == "false"), + }, + "resources_config": { + "cloud_security_posture_management_collection": ( + cspm == "true" + ), + "extended_collection": ( + resource_collection_disabled == "false" + ) + } + } + } + } + + data = json.dumps(values) + data = data.encode("utf-8") # data should be bytes + request = Request(url, data=data, headers=headers) + request.add_header("Content-Type", "application/json; charset=utf-8") + request.add_header("Content-Length", len(data)) + + # Send the request + request.get_method = lambda: method + try: + response = urllib.request.urlopen(request) + except urllib.error.HTTPError as e: + # Return error response from API + response = e + return response + + def get_datadog_account(event): + api_key = event["ResourceProperties"]["APIKey"] + app_key = event["ResourceProperties"]["APPKey"] + api_url = event["ResourceProperties"]["ApiURL"] + account_id = event["ResourceProperties"]["AccountId"] + + # Make the url Request + url = f"https://api.{api_url}/api/v2/integration/aws/accounts?aws_account_id={account_id}" + headers = { + "DD-API-KEY": api_key, + "DD-APPLICATION-KEY": app_key, + "Dd-Aws-Api-Call-Source": API_CALL_SOURCE_HEADER_VALUE, + } + request = Request(url, headers=headers) + request.get_method = lambda: "GET" + try: + response = urllib.request.urlopen(request) + except urllib.error.HTTPError as e: + # Return error response from API + response = e + return response + + + def handler(event, context): + """Handle Lambda event from AWS""" + if event["RequestType"] == "Create": + LOGGER.info("Received Create request.") + method = "POST" + + elif event["RequestType"] == "Update": + LOGGER.info("Received Update request.") + method = "PATCH" + + elif event["RequestType"] == "Delete": + LOGGER.info("Received Delete request.") + method = "DELETE" + else: + LOGGER.info("Failed - received unexpected request.") + cfResponse = {"Message": "Received unexpected request type: {}".format(event["RequestType"])} + reason = json.dumps(cfResponse) + cfnresponse.send( + event, + context, + responseStatus="FAILED", + responseData=cfResponse, + reason=reason, + ) + return + + try: + # Call Datadog API and report response back to CloudFormation + uuid = "" + if event["RequestType"] == "Create": + # Check if account already exists to avoid 409 conflict. + # A failed POST triggers rollback which deletes the pre-existing integration, + # so we use PATCH instead of POST if the account is already registered. + datadog_account_response = get_datadog_account(event) + code = datadog_account_response.getcode() + data = datadog_account_response.read() + if code == 200 and data: + json_response = json.loads(data) + if len(json_response["data"]) > 0: + LOGGER.info("Account already exists in Datadog. Using PATCH to update instead of POST to avoid conflict.") + uuid = json_response["data"][0]["id"] + method = "PATCH" + else: + datadog_account_response = get_datadog_account(event) + uuid = extract_uuid_from_account_response(event, context, datadog_account_response) + if uuid is None: + return + response = call_datadog_api(uuid, event, method) + cfn_response_send_api_result(event, context, method, response) + + except Exception as e: + LOGGER.info("Failed - exception thrown during processing.") + cfResponse = {"Message": "Exception during processing: {}".format(e)} + reason = json.dumps(cfResponse) + cfnresponse.send( + event, + context, + "FAILED", + responseData=cfResponse, + reason=reason, + ) + + def extract_uuid_from_account_response(event, context, account_response): + json_response = "" + code = account_response.getcode() + data = account_response.read() + if data: + json_response = json.loads(data) + if code == 200 or code == 204: + if len(json_response["data"]) == 0: + if event["RequestType"] == "Delete": + # Report success so stack deletion can proceed + LOGGER.info("Account requested for deletion does not exist in Datadog. Proceeding with stack deletion.") + cfn_response_send_api_result(event, context, "DELETE", account_response) + else: + cfn_response_send_failure(event, context, "Datadog account not found.") + return None + if len(json_response["data"]) > 1: + cfn_response_send_failure(event, context, "Datadog account not unique.") + return None + return json_response["data"][0]["id"] + cfn_response_send_failure(event, context, "Datadog API returned error: {}".format(json_response)) + return None + + + def cfn_response_send_api_result(event, context, method, response): + reason = None + json_response = "" + code = response.getcode() + data = response.read() + if data: + json_response = json.loads(data) + if code == 200 or code == 204: + LOGGER.info("Success - Datadog API call was successful.") + response_status = "SUCCESS" + cfResponse = {"Message": "Datadog AWS Integration {} API request was successful.".format(method)} + + # return external ID for create and update + if method == "POST" or method == "PATCH": + external_id = json_response["data"]["attributes"]["auth_config"]["external_id"] + cfResponse["ExternalId"] = external_id + cfnresponse.send( + event, + context, + responseStatus=response_status, + responseData=cfResponse, + reason=reason, + ) + return + cfn_response_send_failure(event, context, "Datadog API returned error: {}".format(json_response)) + + + def cfn_response_send_failure(event, context, message): + LOGGER.info("Failed - Datadog API call failed.") + reason = None + response_status = "FAILED" + cfResponse = {"Message": message} + reason = json.dumps(cfResponse) + cfnresponse.send( + event, + context, + responseStatus=response_status, + responseData=cfResponse, + reason=reason, + ) + + def timeout_handler(_signal, _frame): + """Handle SIGALRM""" + raise TimeoutError("Lambda function timeout exceeded - increase the timeout set in the api_call Cloudformation template.") + + signal.signal(signal.SIGALRM, timeout_handler) Outputs: ExternalId: Description: Datadog AWS Integration ExternalId diff --git a/aws_quickstart/release.sh b/aws_quickstart/release.sh index 0d6751d1..8ef2a07a 100755 --- a/aws_quickstart/release.sh +++ b/aws_quickstart/release.sh @@ -91,7 +91,6 @@ trap "rm -rf ${TEMP_DIR}" EXIT # Copy all YAML files to temp directory cp *.yaml "${TEMP_DIR}/" cp datadog_agentless_api_call.py "${TEMP_DIR}/" -cp datadog_integration_api_call.py "${TEMP_DIR}/" # Change to temp directory for processing cd "${TEMP_DIR}" @@ -119,17 +118,6 @@ for template in datadog_agentless_delegate_role.yaml datadog_agentless_scanning. ' "$template" < datadog_agentless_api_call.py done -# Process Integration API Call template -for template in datadog_integration_api_call_v2.yaml; do - # Replace ZIPFILE_PLACEHOLDER with the contents of the Python file - perl -i -pe ' - BEGIN { $p = do { local $/; } } - /^(\s+)/ && ( - $_ = join("\n", map { $1 . $_ } split(/\n/, $p)) . "\n" - ) - ' "$template" < datadog_integration_api_call.py -done - # Upload from temp directory if [ "$PRIVATE_TEMPLATE" = true ] ; then aws s3 cp . s3://${BUCKET}/aws/${VERSION} --recursive --exclude "*" --include "*.yaml" From 56822a767f30dd3f0b6fffc96dacd9bb5e6c8184 Mon Sep 17 00:00:00 2001 From: David Han Date: Fri, 20 Feb 2026 15:33:13 -0500 Subject: [PATCH 5/6] Upload new version --- aws_organizations/version.txt | 2 +- aws_quickstart/version.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/aws_organizations/version.txt b/aws_organizations/version.txt index b913b7c6..52cad5ee 100644 --- a/aws_organizations/version.txt +++ b/aws_organizations/version.txt @@ -1 +1 @@ -v4.1.0 +v4.1.1 diff --git a/aws_quickstart/version.txt b/aws_quickstart/version.txt index 5ce26e9e..237fc3e4 100644 --- a/aws_quickstart/version.txt +++ b/aws_quickstart/version.txt @@ -1 +1 @@ -v4.5.2 +v4.5.3 From 21d4ff48e439b636e6693a75ad33cb6136651c21 Mon Sep 17 00:00:00 2001 From: David Han Date: Fri, 20 Feb 2026 16:30:24 -0500 Subject: [PATCH 6/6] Update case for deploying CFT with same role name --- aws_organizations/README.md | 2 +- aws_organizations/main_organizations.yaml | 38 ++++++++++++++----- aws_quickstart/README.md | 2 +- .../datadog_integration_api_call_v2.yaml | 35 ++++++++++++----- 4 files changed, 57 insertions(+), 20 deletions(-) diff --git a/aws_organizations/README.md b/aws_organizations/README.md index ed149c21..788d4063 100644 --- a/aws_organizations/README.md +++ b/aws_organizations/README.md @@ -37,7 +37,7 @@ Before getting started, ensure you have the following prerequisites: ## Pre-existing Integrations -If you deploy this StackSet to an AWS Organization or OU that includes accounts which already have a Datadog integration configured (e.g., via a standalone stack), the template will detect the existing integration and update it rather than attempting to create a duplicate. This prevents a 409 conflict error that would otherwise trigger a rollback and delete the pre-existing integration. +If you deploy this StackSet to an AWS Organization or OU that includes accounts which already have a Datadog integration configured (e.g., via a standalone stack), the template will detect the existing integration and update it rather than attempting to create a duplicate. This prevents a 409 conflict error that would otherwise trigger a rollback and delete the pre-existing integration. Additionally, if the stack fails for any other reason (e.g., IAM role name conflict) and rolls back, the rollback will preserve the pre-existing integration rather than deleting it. ## Datadog::Integrations::AWS diff --git a/aws_organizations/main_organizations.yaml b/aws_organizations/main_organizations.yaml index 47ebe431..5c38ddfb 100644 --- a/aws_organizations/main_organizations.yaml +++ b/aws_organizations/main_organizations.yaml @@ -272,7 +272,11 @@ Resources: try: # Call Datadog API and report response back to CloudFormation uuid = "" + preexisting = False if event["RequestType"] == "Create": + # Check if account already exists to avoid 409 conflict. + # A failed POST triggers rollback which deletes the pre-existing integration, + # so we use PATCH instead of POST if the account is already registered. datadog_account_response = get_datadog_account(event) code = datadog_account_response.getcode() data = datadog_account_response.read() @@ -282,13 +286,28 @@ Resources: LOGGER.info("Account already exists in Datadog. Using PATCH to update instead of POST to avoid conflict.") uuid = json_response["data"][0]["id"] method = "PATCH" + preexisting = True + elif event["RequestType"] == "Delete": + # If this resource was created by patching a pre-existing integration, + # do not delete it on stack rollback/deletion. + physical_resource_id = event.get("PhysicalResourceId", "") + if physical_resource_id == "PREEXISTING": + LOGGER.info("Integration was pre-existing. Skipping delete to preserve existing integration.") + cfResponse = {"Message": "Skipped delete for pre-existing integration."} + cfnresponse.send(event, context, responseStatus="SUCCESS", responseData=cfResponse, reason=None) + return + datadog_account_response = get_datadog_account(event) + uuid = extract_uuid_from_account_response(event, context, datadog_account_response) + if uuid is None: + return else: datadog_account_response = get_datadog_account(event) uuid = extract_uuid_from_account_response(event, context, datadog_account_response) if uuid is None: return response = call_datadog_api(uuid, event, method) - cfn_response_send_api_result(event, context, method, response) + physical_id = "PREEXISTING" if preexisting else None + cfn_response_send_api_result(event, context, method, response, physical_id) except Exception as e: LOGGER.info("Failed - exception thrown during processing.") @@ -325,7 +344,7 @@ Resources: return None - def cfn_response_send_api_result(event, context, method, response): + def cfn_response_send_api_result(event, context, method, response, physical_id=None): reason = None json_response = "" code = response.getcode() @@ -341,13 +360,14 @@ Resources: if method == "POST" or method == "PATCH": external_id = json_response["data"]["attributes"]["auth_config"]["external_id"] cfResponse["ExternalId"] = external_id - cfnresponse.send( - event, - context, - responseStatus=response_status, - responseData=cfResponse, - reason=reason, - ) + send_kwargs = { + "responseStatus": response_status, + "responseData": cfResponse, + "reason": reason, + } + if physical_id: + send_kwargs["physicalResourceId"] = physical_id + cfnresponse.send(event, context, **send_kwargs) return cfn_response_send_failure(event, context, "Datadog API returned error: {}".format(json_response)) diff --git a/aws_quickstart/README.md b/aws_quickstart/README.md index bfe19690..753f1673 100644 --- a/aws_quickstart/README.md +++ b/aws_quickstart/README.md @@ -25,7 +25,7 @@ This template creates the following AWS resources required by the Datadog AWS in ## Pre-existing Integrations -If you deploy this CloudFormation stack to an AWS account that already has a Datadog integration configured, the stack will detect the existing integration and update it rather than attempting to create a duplicate. This prevents a 409 conflict error that would otherwise trigger a rollback and delete the pre-existing integration. +If you deploy this CloudFormation stack to an AWS account that already has a Datadog integration configured, the stack will detect the existing integration and update it rather than attempting to create a duplicate. This prevents a 409 conflict error that would otherwise trigger a rollback and delete the pre-existing integration. Additionally, if the stack fails for any other reason (e.g., IAM role name conflict) and rolls back, the rollback will preserve the pre-existing integration rather than deleting it. ## Updating your CloudFormation Stack diff --git a/aws_quickstart/datadog_integration_api_call_v2.yaml b/aws_quickstart/datadog_integration_api_call_v2.yaml index 9f3b062e..435c95bd 100644 --- a/aws_quickstart/datadog_integration_api_call_v2.yaml +++ b/aws_quickstart/datadog_integration_api_call_v2.yaml @@ -237,6 +237,7 @@ Resources: try: # Call Datadog API and report response back to CloudFormation uuid = "" + preexisting = False if event["RequestType"] == "Create": # Check if account already exists to avoid 409 conflict. # A failed POST triggers rollback which deletes the pre-existing integration, @@ -250,13 +251,28 @@ Resources: LOGGER.info("Account already exists in Datadog. Using PATCH to update instead of POST to avoid conflict.") uuid = json_response["data"][0]["id"] method = "PATCH" + preexisting = True + elif event["RequestType"] == "Delete": + # If this resource was created by patching a pre-existing integration, + # do not delete it on stack rollback/deletion. + physical_resource_id = event.get("PhysicalResourceId", "") + if physical_resource_id == "PREEXISTING": + LOGGER.info("Integration was pre-existing. Skipping delete to preserve existing integration.") + cfResponse = {"Message": "Skipped delete for pre-existing integration."} + cfnresponse.send(event, context, responseStatus="SUCCESS", responseData=cfResponse, reason=None) + return + datadog_account_response = get_datadog_account(event) + uuid = extract_uuid_from_account_response(event, context, datadog_account_response) + if uuid is None: + return else: datadog_account_response = get_datadog_account(event) uuid = extract_uuid_from_account_response(event, context, datadog_account_response) if uuid is None: return response = call_datadog_api(uuid, event, method) - cfn_response_send_api_result(event, context, method, response) + physical_id = "PREEXISTING" if preexisting else None + cfn_response_send_api_result(event, context, method, response, physical_id) except Exception as e: LOGGER.info("Failed - exception thrown during processing.") @@ -293,7 +309,7 @@ Resources: return None - def cfn_response_send_api_result(event, context, method, response): + def cfn_response_send_api_result(event, context, method, response, physical_id=None): reason = None json_response = "" code = response.getcode() @@ -309,13 +325,14 @@ Resources: if method == "POST" or method == "PATCH": external_id = json_response["data"]["attributes"]["auth_config"]["external_id"] cfResponse["ExternalId"] = external_id - cfnresponse.send( - event, - context, - responseStatus=response_status, - responseData=cfResponse, - reason=reason, - ) + send_kwargs = { + "responseStatus": response_status, + "responseData": cfResponse, + "reason": reason, + } + if physical_id: + send_kwargs["physicalResourceId"] = physical_id + cfnresponse.send(event, context, **send_kwargs) return cfn_response_send_failure(event, context, "Datadog API returned error: {}".format(json_response))