I'm trying to build a simple CICD Pipeline usind CDK python library. I have the code for creating a construct which consists of the following:
I want to pass on some output values from the lambda invoke action to the build stage. However, after defining the environment variables when creating the codebuild project construct, they are not being passed at all.
This is the code I have for the cdk application:
class ModelDeployCICDStack(Stack):
def __init__(self, scope: Construct, construct_id: str, envs_deployment,**kwargs) -> None:
super().__init__(scope, construct_id, **kwargs)
codebuild_lambda_role = iam.Role(
self,
"CodeBuildLambdaInvokeAction",
role_name=f"CICDPipelineCodeBuildLambdaInvokeRole",
managed_policies=[
iam.ManagedPolicy.from_aws_managed_policy_name("AWSLambda_FullAccess"),
iam.ManagedPolicy.from_aws_managed_policy_name("CloudWatchFullAccess")
],
assumed_by=iam.CompositePrincipal(
iam.ServicePrincipal("lambda.amazonaws.com"),
)
)
codebuild_lambda_role.add_to_policy(
iam.PolicyStatement(
actions=["sts:AssumeRole"],
resources=["arn:aws:iam::XXXXXXX:role/DescribeCfnStackRole"]
)
)
my_lambda = _lambda.Function(
self,
"CfnCAALambdaFunc",
runtime=_lambda.Runtime.PYTHON_3_8,
code=_lambda.Code.from_asset("ticket_irregularities_cicd_cdk/lambda"),
handler="cfn_caa_helper.lambda_handler",
role = codebuild_lambda_role
)
lambda_invoke_action = aws_codepipeline_actions.LambdaInvokeAction(
action_name="Lambda",
lambda_= my_lambda,
variables_namespace="ConfigMetadata"
)
# Create CodePipeline project
# Note: It is created here so resource s3 bucket can be referenced
pipeline = codepipeline.Pipeline(
self, "CodePipeline",
pipeline_name="ModelDeploy-CodePipeline",
)
# Create codebuild role
codebuild_role = iam.Role(
self,
"CodeBuildRole",
role_name = f"CodeBuildRoleModelDeployPipeline",
assumed_by = iam.CompositePrincipal(iam.ServicePrincipal("codebuild.amazonaws.com"),
iam.ServicePrincipal("codepipeline.amazonaws.com")
)
)
# Define the CodeBuild project for the building of the model deployment code
# For this, the output of running the code, is a set of Cloudformation templates
build_project_execute = codebuild.PipelineProject(
self, id = "CodeBuildProject",
project_name="ModelDeploy-CodeBuild",
role=codebuild_role,
build_spec = codebuild.BuildSpec.from_source_filename("buildspec.yml"),
environment= codebuild.BuildEnvironment(build_image=codebuild.LinuxBuildImage.AMAZON_LINUX_2_2,),
environment_variables={
"SAGEMAKER_EXECUTION_ROLE_ARN_DEV": codebuild.BuildEnvironmentVariable(
type=codebuild.BuildEnvironmentVariableType.PLAINTEXT,
value= envs_deployment["dev"]["sagemaker"]
),
"LAMBDA_EXECUTION_ROLE_ARN_DEV": codebuild.BuildEnvironmentVariable(
type=codebuild.BuildEnvironmentVariableType.PLAINTEXT,
value= envs_deployment["dev"]["lambda"]
),
"DEFAULT_BUCKET_DEV": codebuild.BuildEnvironmentVariable(
type=codebuild.BuildEnvironmentVariableType.PLAINTEXT,
value= envs_deployment["dev"]["pipeline_bucket"]
),
"SAGEMAKER_EXECUTION_ROLE_ARN_STAGING": codebuild.BuildEnvironmentVariable(
type=codebuild.BuildEnvironmentVariableType.PLAINTEXT,
value= "#{ConfigMetadata.SMRoleArn}"
),
"LAMBDA_EXECUTION_ROLE_ARN_STAGING": codebuild.BuildEnvironmentVariable(
type=codebuild.BuildEnvironmentVariableType.PLAINTEXT,
value= "#{ConfigMetadata.LambdaRoleArn}"
),
"DEFAULT_BUCKET_STAGING": codebuild.BuildEnvironmentVariable(
type=codebuild.BuildEnvironmentVariableType.PLAINTEXT,
value= "#{ConfigMetadata.S3BucketName}"
),
"ARTIFACT_BUCKET": codebuild.BuildEnvironmentVariable(
type=codebuild.BuildEnvironmentVariableType.PLAINTEXT,
value= pipeline.artifact_bucket.bucket_name
),
"MODEL_BUILD_S3_BUCKET": codebuild.BuildEnvironmentVariable(
type=codebuild.BuildEnvironmentVariableType.PLAINTEXT,
value=envs_deployment["dev"]["modelbuild_bucket"]
),
"EXPORT_TEMPLATE_NAME": codebuild.BuildEnvironmentVariable(
type=codebuild.BuildEnvironmentVariableType.PLAINTEXT,
value= "template-export.yml"
),
"EXPORT_TEMPLATE_DEV_CONFIG": codebuild.BuildEnvironmentVariable(
type=codebuild.BuildEnvironmentVariableType.PLAINTEXT,
value= "dev-config-export.json"
),
"EXPORT_TEMPLATE_STAGING_CONFIG": codebuild.BuildEnvironmentVariable(
type=codebuild.BuildEnvironmentVariableType.PLAINTEXT,
value= "staging-config-export.json"
),
"EXPORT_TEMPLATE_PROD_CONFIG": codebuild.BuildEnvironmentVariable(
type=codebuild.BuildEnvironmentVariableType.PLAINTEXT,
value= "prod-config-export.json"
),
}
)
# Define source action - what repository to look for changes
# Note: The connection needs to be set up manually once - using the console
source_code_output= codepipeline.Artifact()
source_action = aws_codepipeline_actions.CodeStarConnectionsSourceAction(
action_name="Github_Source_ModelDeploy",
owner="XXXXXXXXXX",
repo="sm-modeldeploy",
branch="main",
output=source_code_output,
connection_arn="arn:aws:codestar-connections:eu-north-1:XXXXXXXXXX:connection/XXXXXXXXXXXXXXX"
)
# Define approval action before executing codebuild project
manual_approval_for_build = aws_codepipeline_actions.ManualApprovalAction(
action_name="ApproveBuildingTemplates",
)
source_input_cfn_template = codepipeline.Artifact("artifact1")
source_input_cfn_template_1 = codepipeline.Artifact("artifact2")
source_input_cfn_template_2 = codepipeline.Artifact("artifact3")
source_input_cfn_template_3 = codepipeline.Artifact("artifact4")
# Define code build action (which will used previous codebuild project and code source output)
# The underlying codebuild project, produces some output/artifacts files that will be used later on
build_action = aws_codepipeline_actions.CodeBuildAction(
action_name="BuildCfnTemplatesDeployment",
project=build_project_execute,
input=source_code_output,
outputs=[source_input_cfn_template, source_input_cfn_template_1, source_input_cfn_template_2, source_input_cfn_template_3]
)
# Add stage with source action
pipeline.add_stage(stage_name="Source", actions=[source_action])
# Add lambda invoke action
pipeline.add_stage(stage_name="LambdaInvoke", actions=[lambda_invoke_action])
# Add stage with manual approval action
pipeline.add_stage(stage_name="Approve", actions=[manual_approval_for_build])
# Add stage with build action
pipeline.add_stage(stage_name="Build", actions=[build_action])
And this is the code for the lambda function that is used for the CodePipeline action:
import boto3
import json
CODEPIPELINE_CLIENT = boto3.client('codepipeline')
def assume_crossaccount_role(role_arn, role_session_name="cfn_lookup_outputs"):
role_info = {
'RoleArn': role_arn,
'RoleSessionName': role_session_name
}
client = boto3.client('sts')
credentials = client.assume_role(**role_info)
session = boto3.session.Session(
aws_access_key_id=credentials['Credentials']['AccessKeyId'],
aws_secret_access_key=credentials['Credentials']['SecretAccessKey'],
aws_session_token=credentials['Credentials']['SessionToken']
)
return session
boto_session = assume_crossaccount_role("arn:aws:iam::XXXXXXXXXXXXXXX:role/DescribeCfnStackRole")
CF_CLIENT = boto_session.client('cloudformation')
def lambda_handler(event, context):
# Replace 'your-stack-name' with the name of your CloudFormation stack
stack_name = 'ModelDeployStaging-ModelDeployInfra'
try:
# Describe the stack to get its information
response = CF_CLIENT.describe_stacks(StackName=stack_name)
# Extract the stack outputs
stack = response['Stacks'][0] # Assuming there is only one stack with this name
outputs = stack.get('Outputs', [])
if not outputs:
# Report failure to CodePipeline
CODEPIPELINE_CLIENT.put_job_failure_result(
jobId=event['CodePipeline.job']['id'],
failureDetails={
'type': 'JobFailed',
'message': f"No outputs found for stack '{stack_name}'"
}
)
else:
output_dict = {output['OutputKey']: output['OutputValue'] for output in outputs}
print(output_dict)
# Report success to CodePipeline with the output values
CODEPIPELINE_CLIENT.put_job_success_result(
jobId=event['CodePipeline.job']['id'],
outputVariables=output_dict
)
except Exception as e:
# Report failure to CodePipeline with the error message
CODEPIPELINE_CLIENT.put_job_failure_result(
jobId=event['CodePipeline.job']['id'],
failureDetails={
'type': 'JobFailed',
'message': f"Error: {str(e)}"
}
)
Here: the variable output_dict
has the following value {'SMRoleArn': 'arn:aws:iam::XXXXXXX:role/SageMakerExecXXXXXXXXXXXXXX', 'LambdaRoleArn': 'arn:aws:iam::XXXXXXXXX:role/LambdaExecutioXXXXXXXXXX', 'S3BucketName': 'modeldeploystaging-XXXXXXXXXXXXXXXXXXXX'}
And I'm passing them using the .put_job_success_result method in the lambda function.
In my build stage, within the CodePipeline project, I have a simple buildspec file that has the commands to print out the value of the environment variables, but it doesn't print the correct values.
Am I missing something obvious? Can someone help me out on this?
I have taken this question and accepted answer as reference: How to fetch SSM Parameters from two different accounts using AWS CDK But i had no luck.
Found the answer to what I was doing wrong. I was not grasping the difference between the stages of the CodePipeline, the actions in each stage, and the action provider (for example, codebuild project). The output variables from the Lambda action should be given to the build action environment variables, and these will be propagated to the action provider, in this case, the codebuild project build will have those environment variables.
Here is the code for defining the Stack for the CodePipeline project.
class ModelDeployCICDStack(Stack):
def __init__(self, scope: Construct, construct_id: str, envs_deployment,**kwargs) -> None:
super().__init__(scope, construct_id, **kwargs)
codebuild_lambda_role = iam.Role(
self,
"CodeBuildLambdaInvokeAction",
role_name=f"CICDPipelineCodeBuildLambdaInvokeRole",
managed_policies=[
iam.ManagedPolicy.from_aws_managed_policy_name("AWSLambda_FullAccess"),
iam.ManagedPolicy.from_aws_managed_policy_name("CloudWatchFullAccess")
],
assumed_by=iam.CompositePrincipal(
iam.ServicePrincipal("lambda.amazonaws.com"),
)
)
codebuild_lambda_role.add_to_policy(
iam.PolicyStatement(
actions=["sts:AssumeRole"],
resources=["arn:aws:iam::XXXXXXX:role/DescribeCfnStackRole"]
)
)
my_lambda = _lambda.Function(
self,
"CfnCAALambdaFunc",
runtime=_lambda.Runtime.PYTHON_3_8,
code=_lambda.Code.from_asset("ticket_irregularities_cicd_cdk/lambda"),
handler="cfn_caa_helper.lambda_handler",
role = codebuild_lambda_role
)
lambda_invoke_action = aws_codepipeline_actions.LambdaInvokeAction(
action_name="Lambda",
lambda_= my_lambda,
variables_namespace="ConfigMetadata"
)
# Create CodePipeline project
# Note: It is created here so resource s3 bucket can be referenced
pipeline = codepipeline.Pipeline(
self, "CodePipeline",
pipeline_name="ModelDeploy-CodePipeline",
)
# Create codebuild role
codebuild_role = iam.Role(
self,
"CodeBuildRole",
role_name = f"CodeBuildRoleModelDeployPipeline",
assumed_by = iam.CompositePrincipal(iam.ServicePrincipal("codebuild.amazonaws.com"),
iam.ServicePrincipal("codepipeline.amazonaws.com")
)
)
# Define the CodeBuild project for the building of the model deployment code
# For this, the output of running the code, is a set of Cloudformation templates
build_project_execute = codebuild.PipelineProject(
self, id = "CodeBuildProject",
project_name="ModelDeploy-CodeBuild",
role=codebuild_role,
build_spec = codebuild.BuildSpec.from_source_filename("buildspec.yml"),
environment= codebuild.BuildEnvironment(build_image=codebuild.LinuxBuildImage.AMAZON_LINUX_2_2,),
environment_variables={
"SAGEMAKER_EXECUTION_ROLE_ARN_DEV": codebuild.BuildEnvironmentVariable(
type=codebuild.BuildEnvironmentVariableType.PLAINTEXT,
value= envs_deployment["dev"]["sagemaker"]
),
"LAMBDA_EXECUTION_ROLE_ARN_DEV": codebuild.BuildEnvironmentVariable(
type=codebuild.BuildEnvironmentVariableType.PLAINTEXT,
value= envs_deployment["dev"]["lambda"]
),
"DEFAULT_BUCKET_DEV": codebuild.BuildEnvironmentVariable(
type=codebuild.BuildEnvironmentVariableType.PLAINTEXT,
value= envs_deployment["dev"]["pipeline_bucket"]
),
"ARTIFACT_BUCKET": codebuild.BuildEnvironmentVariable(
type=codebuild.BuildEnvironmentVariableType.PLAINTEXT,
value= pipeline.artifact_bucket.bucket_name
),
"MODEL_BUILD_S3_BUCKET": codebuild.BuildEnvironmentVariable(
type=codebuild.BuildEnvironmentVariableType.PLAINTEXT,
value=envs_deployment["dev"]["modelbuild_bucket"]
),
"EXPORT_TEMPLATE_NAME": codebuild.BuildEnvironmentVariable(
type=codebuild.BuildEnvironmentVariableType.PLAINTEXT,
value= "template-export.yml"
),
"EXPORT_TEMPLATE_DEV_CONFIG": codebuild.BuildEnvironmentVariable(
type=codebuild.BuildEnvironmentVariableType.PLAINTEXT,
value= "dev-config-export.json"
),
"EXPORT_TEMPLATE_STAGING_CONFIG": codebuild.BuildEnvironmentVariable(
type=codebuild.BuildEnvironmentVariableType.PLAINTEXT,
value= "staging-config-export.json"
),
"EXPORT_TEMPLATE_PROD_CONFIG": codebuild.BuildEnvironmentVariable(
type=codebuild.BuildEnvironmentVariableType.PLAINTEXT,
value= "prod-config-export.json"
),
}
)
# Define source action - what repository to look for changes
# Note: The connection needs to be set up manually once - using the console
source_code_output= codepipeline.Artifact()
source_action = aws_codepipeline_actions.CodeStarConnectionsSourceAction(
action_name="Github_Source_ModelDeploy",
owner="XXXXXXXXX",
repo="sm-modeldeploy",
branch="main",
output=source_code_output,
connection_arn="arn:aws:codestar-connections:eu-north-1:XXXXXXXXXXXXXX:connection/XXXXXXXXXXXXXXXXX"
)
# Define approval action before executing codebuild project
manual_approval_for_build = aws_codepipeline_actions.ManualApprovalAction(
action_name="ApproveBuildingTemplates",
run_order=1
)
# # Define approval section before deploying cloudformation template to dev
# manual_approval_for_cloudformation = aws_codepipeline_actions.ManualApprovalAction(
# action_name="ApproveDeploymentDev"
# )
source_input_cfn_template = codepipeline.Artifact("artifact1")
source_input_cfn_template_1 = codepipeline.Artifact("artifact2")
source_input_cfn_template_2 = codepipeline.Artifact("artifact3")
source_input_cfn_template_3 = codepipeline.Artifact("artifact4")
# Define code build action (which will used previous codebuild project and code source output)
# The underlying codebuild project, produces some output/artifacts files that will be used later on
build_action = aws_codepipeline_actions.CodeBuildAction(
action_name="BuildCfnTemplatesDeployment",
project=build_project_execute,
input=source_code_output,
outputs=[source_input_cfn_template, source_input_cfn_template_1, source_input_cfn_template_2, source_input_cfn_template_3],
environment_variables={
"SAGEMAKER_EXECUTION_ROLE_ARN_STAGING": codebuild.BuildEnvironmentVariable(
type=codebuild.BuildEnvironmentVariableType.PLAINTEXT,
value= lambda_invoke_action.variable("SMRoleArn")
),
"LAMBDA_EXECUTION_ROLE_ARN_STAGING": codebuild.BuildEnvironmentVariable(
type=codebuild.BuildEnvironmentVariableType.PLAINTEXT,
value= lambda_invoke_action.variable("LambdaRoleArn")
),
"DEFAULT_BUCKET_STAGING": codebuild.BuildEnvironmentVariable(
type=codebuild.BuildEnvironmentVariableType.PLAINTEXT,
value= lambda_invoke_action.variable("S3BucketName")
),
},
run_order=2
)
# # Define deploy action in dev account - action creates/updates a cloudformation stack
# dev_deploy_action = aws_codepipeline_actions.CloudFormationCreateUpdateStackAction(
# action_name="CloudFormationDeployDev",
# admin_permissions=True,
# template_path=source_input_cfn_template.at_path("template-export.yml"),
# template_configuration=source_input_cfn_template_1.at_path("dev-config-export.json"),
# stack_name="CICDPipeline-ModelDeploy-Dev"
# )
# Add stage with source action
pipeline.add_stage(stage_name="Source", actions=[source_action])
# Add lambda invoke action
pipeline.add_stage(stage_name="LambdaInvoke", actions=[lambda_invoke_action])
# Add stage with manual approval action
pipeline.add_stage(
stage_name="BuilSageMakerPipelineDeploy",
actions=[
manual_approval_for_build,
build_action
]
)
Hope the code can be useful to others, as an example to how to build a CodePipeline project with a Lambda action, which can be used to perform X operation and pass output variables to following actions.