22import boto3
33import fnmatch
44import json
5+ import lambda_lib
56import logging
67import os
78import pypd
89import requests
9- #import slack
1010from botocore .exceptions import ClientError
1111
1212# TODO:
1616logger .setLevel (logging .DEBUG )
1717states = {}
1818
19- def get_config ():
20- # Allow different config file to be specified
21- if 'LAMBDA_CONFIG' in os .environ and len (os .getenv ('LAMBDA_CONFIG' )) > 0 :
22- config_file = os .getenv ('LAMBDA_CONFIG' )
23- if config_file .find ("s3://" ) == 0 :
24- # read from S3 or copy default to s3
25- s3_bucket = config_file [5 :len (config_file )].split ("/" )[0 ]
26- sep = "/"
27- s3_object = sep .join (config_file [5 :len (config_file )].split ("/" )[1 :])
28- s3 = boto3 .client ('s3' )
29- try :
30- config_result = s3 .get_object (Bucket = s3_bucket , Key = s3_object )
31- logger .debug ("S3 get config: {}" .format (config_result ))
32- config_raw = config_result ["Body" ].read ()
33- config = json .loads (config_raw )
34- except ClientError as e :
35- logger .warn ("Config not found in S3: {}" .format (e ))
36- logger .warn ("Copying default config to S3" )
37- with open ('config.json' ) as f :
38- config = json .load (f )
39- s3 .put_object (Bucket = s3_bucket , Key = s3_object , Body = json .dumps (config , indent = 2 ))
40- else :
41- logger .debug ("Reading config file: {}" .format (config_file ))
42- with open (config_file ) as f :
43- config = json .load (f )
44- else :
45- logger .debug ("Reading config file: config.json" )
46- with open ('config.json' ) as f :
47- config = json .load (f )
48-
49- logger .debug ("Config before overrides: {}" .format (json .dumps (config , indent = 2 )))
50- # Replace config data with environment data if it exists
51- # Environment variables are keys uppercased
52- for key in config .keys ():
53- if key .upper () in os .environ :
54- value = os .getenv (key .upper ())
55- try :
56- value_json = json .loads (value )
57- logger .debug ("Got json: {}" .format (json .dumps (value_json )))
58- if len (value_json .keys ()) > 0 :
59- config [key ] = value_json
60- except :
61- if len (value ) > 0 :
62- config [key ] = value
63- logger .debug ("Config after overrides: {}" .format (json .dumps (config , indent = 2 )))
64- return config
65-
66- def get_secret (secret_name , endpoint_url , region_name ):
67- session = boto3 .session .Session ()
68- client = session .client (
69- service_name = 'secretsmanager' ,
70- region_name = region_name ,
71- endpoint_url = endpoint_url
72- )
73-
74- try :
75- get_secret_value_response = client .get_secret_value (
76- SecretId = secret_name
77- )
78- except ClientError as e :
79- if e .response ['Error' ]['Code' ] == 'ResourceNotFoundException' :
80- print ("The requested secret " + secret_name + " was not found" )
81- elif e .response ['Error' ]['Code' ] == 'InvalidRequestException' :
82- print ("The request was invalid due to:" , e )
83- elif e .response ['Error' ]['Code' ] == 'InvalidParameterException' :
84- print ("The request had invalid params:" , e )
85- else :
86- # Decrypted secret using the associated KMS CMK
87- # Depending on whether the secret was a string or binary, one of these fields will be populated
88- if 'SecretString' in get_secret_value_response :
89- secret = get_secret_value_response ['SecretString' ]
90- #print(json.dumps(json.loads(secret), indent=2))
91- #print(secret)
92- return secret
93- else :
94- binary_secret_data = get_secret_value_response ['SecretBinary' ]
95- return binary_secret_data
9619
9720#def get_s3_parts():
9821
@@ -113,41 +36,17 @@ def get_pagerduty_services(s3_bucket, prefix, pattern):
11336 # secretsmanager:GetSecretValue - to retrieve the encrypted part of the secret
11437 # kms:Decrypt - Only required if you used a custom KMS CMK to encrypt your secret
11538 # Return: list of channel names
116- services = []
117- s3 = boto3 .client ('s3' )
118- list = s3 .list_objects_v2 (Bucket = s3_bucket , Prefix = prefix )
119- # TODO: Handle no objects found
120- #print(list)
121- for obj in list ["Contents" ]:
122- # Pattern match on text after prefix. UNIX globbing - Could use regex
123- if fnmatch .fnmatch (obj ["Key" ], prefix + "/" + pattern ):
124- print ("Reading object: {}" .format (obj ["Key" ]))
125- try :
126- service_result = s3 .get_object (Bucket = s3_bucket , Key = obj ["Key" ])
127- logger .debug ("S3 get config: {}" .format (service_result ))
128- service_raw = service_result ["Body" ].read ()
129- service = json .loads (service_raw )
130- services .append (service )
131- except ClientError as e :
132- logger .warn ("Config not found in S3: {}" .format (e ))
133- logger .warn ("Copying default config to S3" )
39+ services = lambda_lib .get_integration_parts (s3_bucket , prefix , pattern )
13440 #print("PagerDuty services: {}".format(json.dumps(services, indent=2)))
13541 return services
13642
137- #print(json.dumps(list, indent=2))
138- #config_result = s3.get_object(Bucket=config["s3_bucket_parts"], Key=s3_object)
139-
140- #datadog_api_base = "https://api.datadoghq.com/api/v1/"
141- #integration_base = datadog_api_base + "integration/"
142- #slack = integration_base + "slack"
143-
144- def get_schedules (config ):
43+ def get_schedules (datadog_keys ):
14544 # Get from DD
14645 # Read: curl -v "https://api.datadoghq.com/api/v1/integration/pagerduty?api_key=${api_key}&application_key=${app_key}"
14746 # ["schedules"]
14847 url_base = "https://api.datadoghq.com/api/v1/integration/pagerduty"
149- api = "?api_key=" + config [ "datadog_api_key " ]
150- app = "&application_key=" + config [ "datadog_app_key " ]
48+ api = "?api_key=" + datadog_keys [ "api_key " ]
49+ app = "&application_key=" + datadog_keys [ "app_key " ]
15150 url = url_base + api + app + "&run_check=true"
15251 print ("URL: {}" .format (url ))
15352 #result = requests.delete(url_base + api + app)
@@ -159,8 +58,7 @@ def get_schedules(config):
15958 #print("Results: {}".format(json.dumps(data, indent=2)))
16059 return data ["schedules" ]
16160
162-
163- def write_datadog_pagerduty (config , services , schedules ):
61+ def write_datadog_pagerduty (config , datadog_keys , services , schedules ):
16462 # Datadog: url, api_key, app_key
16563 # POST: Create integration: Does add, not delete or update. Can have duplicate entries
16664 # PUT: Create/Update integration: updates, deletes
@@ -171,40 +69,28 @@ def write_datadog_pagerduty(config, services, schedules):
17169 data ["subdomain" ] = config ["pagerduty_subdomain" ]
17270 data ["schedules" ] = schedules
17371 data ["api_token" ] = config ["pagerduty_ro_key" ]
174- print (json .dumps (data , indent = 2 ))
175- url_base = "https://api.datadoghq.com/api/v1/integration/pagerduty"
176- api = "?api_key=" + config ["datadog_api_key" ]
177- app = "&application_key=" + config ["datadog_app_key" ]
178- url = url_base + api + app + "&run_check=true"
179- headers = {"Content-type" : "application/json" }
180- print ("URL: {}" .format (url ))
181- #result = requests.delete(url_base + api + app)
182- #print(result)
183- #result = requests.post(url, data=json.dumps(data), headers=headers)
184- result = requests .put (url , data = json .dumps (data ), headers = headers )
185- print (result )
186- if result .status_code != 204 :
187- logger .error ("Datadog Pagerduty integration update failed with status: {}" .format (result .status_code ))
188- #logger.debug("HTTP response header {}".format(json.dumps(result.headers, indent=2)))
189-
190- #
191- # datadog_integration_slack():
192- # read, write, delete
72+ #print(json.dumps(data, indent=2))
73+ lambda_lib .write_datadog_integration (datadog_keys , 'pagerduty' , data )
19374
19475def lambda_handler (event , context ):
19576 global states
196- config = get_config ()
77+ config = lambda_lib . get_config ()
19778
19879 # Setup logging
19980 log_level = getattr (logging , config ["log_level" ].upper (), None )
20081 if not isinstance (log_level , int ):
20182 raise ValueError ('Invalid log level: {}' .format (config ["log_level" ]))
20283 logger .setLevel (log_level )
84+ # TODO: read event, if DeletedObject remove PagerDuty service
85+ logger .debug ("Event: {}" .format (json .dumps (event , indent = 2 )))
86+ logger .debug ("Context: {}" .format (context ))
87+
20388 services = get_pagerduty_services (config ["s3_bucket_parts" ], config ["path_parts" ], config ["parts_pattern" ])
204- schedules = get_schedules (config )
205- #slack_webhook = get_secret(config["secret_name"], config["secret_endpoint_url"], config["aws_region"])
206- #slack_hooks = get_slack_hooks(slack_webhook)
207- write_datadog_pagerduty (config , services , schedules )
89+ # Secrets: DD api, DD app, pagerduty RO
90+ datadog_keys = lambda_lib .get_secret (config ["secrets" ]["datadog_api" ], config ["secret_endpoint_url" ], config ["aws_region" ])
91+ config ["pagerduty_ro_key" ] = lambda_lib .get_secret (config ["secrets" ]["pagerduty_ro" ], config ["secret_endpoint_url" ], config ["aws_region" ])
92+ schedules = get_schedules (datadog_keys )
93+ write_datadog_pagerduty (config , datadog_keys , services , schedules )
20894
20995 return 'Done'
21096
0 commit comments