Configure SAM pipelines with Terraform
SAM (Serverless application model) comes with a great feature with staged deployments. You should definitely have a loo if you didn't try it yet. The one thing I don't like is the generated names. Having all these random strings in IAM roles, S3 buckets makes me crazy. Below are the terraform scripts and the Github Actions pipeline with two stages - Staging and Production for deployments.
First, create an S3 bucket manually to keep the Terraform's state. Then use id for the config.tf
:
terraform {
backend "s3" {
encrypt = true
bucket = "<SOME_UNIQ_NAME>"
region = "eu-west-1"
key = "terraform.tfstate"
}
required_providers {
aws = {
source = "hashicorp/aws",
version = "~> 4.0"
}
}
required_version = ">= 0.13"
}
Create staging prerequisites:
resource "aws_iam_user" "deploy_staging_user" {
name = "deploy-staging-user"
}
resource "aws_iam_user_policy" "deploy_staging_user" {
name = "AssumeRoles"
user = aws_iam_user.deploy_staging_user.name
policy = jsonencode({
Version = "2012-10-17"
"Statement" : [
{
"Condition" : {
"StringEquals" : {
"aws:ResourceTag/Role" : "pipeline-execution-role"
}
},
"Action" : [
"sts:AssumeRole"
],
"Resource" : "*",
"Effect" : "Allow"
}
]
})
}
resource "aws_iam_role" "deploy_staging_cf_execution_role" {
name = "deploy-staging-cf-execution-role"
assume_role_policy = data.aws_iam_policy_document.staging_cf_execution_role_trust_relationship.json
inline_policy {
name = "GrantCloudFormationFullAccess"
policy = data.aws_iam_policy_document.staging_cf_full_access.json
}
}
resource "aws_iam_role" "deploy_staging_pipeline_execution_role" {
name = "deploy-staging-pipeline-execution-role"
assume_role_policy = data.aws_iam_policy_document.staging_pipeline_execution_role_trust_relationship.json
inline_policy {
name = "PipelineExecutionRolePermissions"
policy = data.aws_iam_policy_document.staging_pipeline_execution_role_permissions.json
}
}
data "aws_iam_policy_document" "staging_cf_full_access" {
statement {
actions = ["*"]
resources = ["*"]
effect = "Allow"
}
}
data "aws_iam_policy_document" "staging_cf_execution_role_trust_relationship" {
statement {
effect = "Allow"
actions = ["sts:AssumeRole"]
principals {
type = "Service"
identifiers = ["cloudformation.amazonaws.com"]
}
}
}
data "aws_iam_policy_document" "staging_pipeline_execution_role_trust_relationship" {
statement {
effect = "Allow"
actions = ["sts:AssumeRole"]
principals {
type = "AWS"
identifiers = [aws_iam_user.deploy_staging_user.arn]
}
}
// allow access from prod user to deploy on merge into main
statement {
effect = "Allow"
actions = ["sts:AssumeRole"]
principals {
type = "AWS"
identifiers = [aws_iam_user.deploy_production_user.arn]
}
}
statement {
effect = "Allow"
actions = ["sts:AssumeRole"]
principals {
type = "AWS"
identifiers = ["arn:aws:iam::00000000000:root"]
}
condition {
test = "StringEquals"
variable = "aws:PrincipalTag/Role"
values = [
"aws-sam-pipeline-codebuild-service-role"
]
}
}
}
data "aws_iam_policy_document" "staging_pipeline_execution_role_permissions" {
statement {
actions = ["iam:PassRole"]
resources = [aws_iam_role.deploy_staging_cf_execution_role.arn]
effect = "Allow"
}
statement {
actions = [
"cloudformation:CreateChangeSet",
"cloudformation:DescribeChangeSet",
"cloudformation:ExecuteChangeSet",
"cloudformation:DeleteStack",
"cloudformation:DescribeStackEvents",
"cloudformation:DescribeStacks",
"cloudformation:GetTemplate",
"cloudformation:GetTemplateSummary",
"cloudformation:DescribeStackResource"
]
resources = ["*"]
effect = "Allow"
}
statement {
actions = [
"s3:DeleteObject",
"s3:GetObject*",
"s3:PutObject*",
"s3:GetBucket*",
"s3:List*"
]
resources = [
aws_s3_bucket.staging_artifacts_bucket.arn,
"${aws_s3_bucket.staging_artifacts_bucket.arn}/*",
]
effect = "Allow"
}
}
resource "aws_s3_bucket" "staging_artifacts_bucket" {
bucket = "staging-artifacts-bucket-name"
lifecycle {
prevent_destroy = true
}
}
resource "aws_s3_bucket_public_access_block" "staging_artifacts_bucket" {
bucket = aws_s3_bucket.staging_artifacts_bucket.id
block_public_acls = true
block_public_policy = true
ignore_public_acls = true
restrict_public_buckets = true
}
resource "aws_s3_bucket_acl" "staging_artifacts_bucket" {
bucket = aws_s3_bucket.staging_artifacts_bucket.id
acl = "private"
}
And production (it's basically a copy-paste, you may want to use a module for it)
resource "aws_iam_user" "deploy_production_user" {
name = "deploy-production-user"
}
resource "aws_iam_user_policy" "deploy_production_user" {
name = "AssumeRoles"
user = aws_iam_user.deploy_production_user.name
policy = jsonencode({
Version = "2012-10-17"
"Statement" : [
{
"Condition" : {
"StringEquals" : {
"aws:ResourceTag/Role" : "pipeline-execution-role"
}
},
"Action" : [
"sts:AssumeRole"
],
"Resource" : "*",
"Effect" : "Allow"
}
]
})
}
resource "aws_iam_role" "deploy_production_cf_execution_role" {
name = "deploy-production-cf-execution-role"
assume_role_policy = data.aws_iam_policy_document.production_cf_execution_role_trust_relationship.json
inline_policy {
name = "GrantCloudFormationFullAccess"
policy = data.aws_iam_policy_document.production_cf_full_access.json
}
}
resource "aws_iam_role" "deploy_production_pipeline_execution_role" {
name = "deploy-production-pipeline-execution-role"
assume_role_policy = data.aws_iam_policy_document.production_pipeline_execution_role_trust_relationship.json
inline_policy {
name = "PipelineExecutionRolePermissions"
policy = data.aws_iam_policy_document.production_pipeline_execution_role_permissions.json
}
}
data "aws_iam_policy_document" "production_cf_full_access" {
statement {
actions = ["*"]
resources = ["*"]
effect = "Allow"
}
}
data "aws_iam_policy_document" "production_cf_execution_role_trust_relationship" {
statement {
effect = "Allow"
actions = ["sts:AssumeRole"]
principals {
type = "Service"
identifiers = ["cloudformation.amazonaws.com"]
}
}
}
data "aws_iam_policy_document" "production_pipeline_execution_role_trust_relationship" {
statement {
effect = "Allow"
actions = ["sts:AssumeRole"]
principals {
type = "AWS"
identifiers = [aws_iam_user.deploy_production_user.arn]
}
}
statement {
effect = "Allow"
actions = ["sts:AssumeRole"]
principals {
type = "AWS"
identifiers = ["arn:aws:iam::00000000000:root"]
}
condition {
test = "StringEquals"
variable = "aws:PrincipalTag/Role"
values = [
"aws-sam-pipeline-codebuild-service-role"
]
}
}
}
data "aws_iam_policy_document" "production_pipeline_execution_role_permissions" {
statement {
actions = ["iam:PassRole"]
resources = [aws_iam_role.deploy_production_cf_execution_role.arn]
effect = "Allow"
}
statement {
actions = [
"cloudformation:CreateChangeSet",
"cloudformation:DescribeChangeSet",
"cloudformation:ExecuteChangeSet",
"cloudformation:DeleteStack",
"cloudformation:DescribeStackEvents",
"cloudformation:DescribeStacks",
"cloudformation:GetTemplate",
"cloudformation:GetTemplateSummary",
"cloudformation:DescribeStackResource"
]
resources = ["*"]
effect = "Allow"
}
statement {
actions = [
"s3:DeleteObject",
"s3:GetObject*",
"s3:PutObject*",
"s3:GetBucket*",
"s3:List*"
]
resources = [
aws_s3_bucket.production_artifacts_bucket.arn,
"${aws_s3_bucket.production_artifacts_bucket.arn}/*",
]
effect = "Allow"
}
}
resource "aws_s3_bucket" "production_artifacts_bucket" {
bucket = "production-artifacts-bucket-name"
lifecycle {
prevent_destroy = true
}
}
resource "aws_s3_bucket_public_access_block" "production_artifacts_bucket" {
bucket = aws_s3_bucket.production_artifacts_bucket.id
block_public_acls = true
block_public_policy = true
ignore_public_acls = true
restrict_public_buckets = true
}
resource "aws_s3_bucket_acl" "production_artifacts_bucket" {
bucket = aws_s3_bucket.production_artifacts_bucket.id
acl = "private"
}
The Github Actions pipeline is basically one generated by SAM. Note, that you need to add two sets of secrets in repository's secrets for STAGING_AWS_ACCESS_KEY_ID/PROD_AWS_ACCESS_KEY_ID and STAGING_AWS_SECRET_ACCESS_KEY/PROD_AWS_SECRET_ACCESS_KEY
name: Pipeline
on:
push:
branches:
- 'main'
- 'feature**'
delete:
branches:
- 'feature**'
env:
STAGING_PIPELINE_USER_ACCESS_KEY_ID: ${{ secrets.STAGING_AWS_ACCESS_KEY_ID }}
STAGING_PIPELINE_USER_SECRET_ACCESS_KEY: ${{ secrets.STAGING_AWS_SECRET_ACCESS_KEY }}
SAM_TEMPLATE: template.yaml
STAGING_STACK_NAME: staging-stack-name
STAGING_PIPELINE_EXECUTION_ROLE: arn:aws:iam::00000000000:role/deploy-staging-pipeline-execution-role
STAGING_CLOUDFORMATION_EXECUTION_ROLE: arn:aws:iam::00000000000:role/deploy-staging-cf-execution-role
STAGING_ARTIFACTS_BUCKET: staging-artifacts-bucket-name
STAGING_REGION: eu-west-1
PROD_PIPELINE_USER_ACCESS_KEY_ID: ${{ secrets.PROD_AWS_ACCESS_KEY_ID }}
PROD_PIPELINE_USER_SECRET_ACCESS_KEY: ${{ secrets.PROD_AWS_SECRET_ACCESS_KEY }}
PROD_STACK_NAME: production-stack-name
PROD_PIPELINE_EXECUTION_ROLE: arn:aws:iam::00000000000:role/deploy-production-pipeline-execution-role
PROD_CLOUDFORMATION_EXECUTION_ROLE: arn:aws:iam::00000000000:role/deploy-production-cf-execution-role
PROD_ARTIFACTS_BUCKET: production-artifacts-bucket-name
PROD_REGION: eu-west-1
jobs:
test:
if: github.event_name == 'push'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- run: |
# trigger the tests here
delete-feature:
if: startsWith(github.event.ref, 'feature') && github.event_name == 'delete'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
- uses: aws-actions/setup-sam@v1
- name: Assume the testing pipeline user role
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ env.STAGING_PIPELINE_USER_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ env.STAGING_PIPELINE_USER_SECRET_ACCESS_KEY }}
aws-region: ${{ env.STAGING_REGION }}
role-to-assume: ${{ env.STAGING_PIPELINE_EXECUTION_ROLE }}
role-session-name: testing-packaging
role-duration-seconds: 3600
role-skip-session-tagging: true
- name: Delete feature branch stack
env:
FEATURE_BRANCH_NAME: ${{ github.event.ref }}
run: |
sam delete \
--stack-name $(echo ${FEATURE_BRANCH_NAME##*/} | tr -cd '[a-zA-Z0-9-]') \
--region ${STAGING_REGION} \
--no-prompts
build-and-deploy-feature:
# this stage is triggered only for feature branches (feature*),
# which will build the stack and deploy to a stack named with branch name.
if: startsWith(github.ref, 'refs/heads/feature')
needs: [test]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
- uses: aws-actions/setup-sam@v1
- run: sam build --template ${SAM_TEMPLATE}
- name: Assume the testing pipeline user role
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ env.STAGING_PIPELINE_USER_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ env.STAGING_PIPELINE_USER_SECRET_ACCESS_KEY }}
aws-region: ${{ env.STAGING_REGION }}
role-to-assume: ${{ env.STAGING_PIPELINE_EXECUTION_ROLE }}
role-session-name: feature-deployment
role-duration-seconds: 3600
role-skip-session-tagging: true
- name: Deploy to feature stack in the testing account
shell: bash
run: |
sam deploy --stack-name $(echo ${GITHUB_REF##*/} | tr -cd '[a-zA-Z0-9-]') \
--capabilities CAPABILITY_IAM \
--region ${STAGING_REGION} \
--s3-bucket ${STAGING_ARTIFACTS_BUCKET} \
--no-fail-on-empty-changeset \
--role-arn ${STAGING_CLOUDFORMATION_EXECUTION_ROLE}
build-and-package:
if: github.ref == 'refs/heads/main'
needs: [test]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
- uses: aws-actions/setup-sam@v1
- name: Build resources
run: sam build --template ${SAM_TEMPLATE}
- name: Assume the testing pipeline user role
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ env.PROD_PIPELINE_USER_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ env.PROD_PIPELINE_USER_SECRET_ACCESS_KEY }}
aws-region: ${{ env.STAGING_REGION }}
role-to-assume: ${{ env.STAGING_PIPELINE_EXECUTION_ROLE }}
role-session-name: testing-packaging
role-duration-seconds: 3600
role-skip-session-tagging: true
- name: Upload artifacts to testing artifact buckets
run: |
sam package \
--s3-bucket ${STAGING_ARTIFACTS_BUCKET} \
--region ${STAGING_REGION} \
--output-template-file packaged-testing.yaml
- uses: actions/upload-artifact@v2
with:
name: packaged-testing.yaml
path: packaged-testing.yaml
- name: Assume the prod pipeline user role
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ env.PROD_PIPELINE_USER_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ env.PROD_PIPELINE_USER_SECRET_ACCESS_KEY }}
aws-region: ${{ env.PROD_REGION }}
role-to-assume: ${{ env.PROD_PIPELINE_EXECUTION_ROLE }}
role-session-name: prod-packaging
role-duration-seconds: 3600
role-skip-session-tagging: true
- name: Upload artifacts to production artifact buckets
run: |
sam package \
--s3-bucket ${PROD_ARTIFACTS_BUCKET} \
--region ${PROD_REGION} \
--output-template-file packaged-prod.yaml
- uses: actions/upload-artifact@v2
with:
name: packaged-prod.yaml
path: packaged-prod.yaml
deploy-testing:
if: github.ref == 'refs/heads/main'
needs: [build-and-package]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
- uses: aws-actions/setup-sam@v1
- uses: actions/download-artifact@v2
with:
name: packaged-testing.yaml
- name: Assume the testing pipeline user role
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ env.PROD_PIPELINE_USER_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ env.PROD_PIPELINE_USER_SECRET_ACCESS_KEY }}
aws-region: ${{ env.STAGING_REGION }}
role-to-assume: ${{ env.STAGING_PIPELINE_EXECUTION_ROLE }}
role-session-name: testing-deployment
role-duration-seconds: 3600
role-skip-session-tagging: true
- name: Deploy to testing account
run: |
sam deploy --stack-name ${STAGING_STACK_NAME} \
--template packaged-testing.yaml \
--capabilities CAPABILITY_IAM \
--region ${STAGING_REGION} \
--s3-bucket ${STAGING_ARTIFACTS_BUCKET} \
--no-fail-on-empty-changeset \
--role-arn ${STAGING_CLOUDFORMATION_EXECUTION_ROLE}
integration-test:
if: github.ref == 'refs/heads/main'
needs: [deploy-testing]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- run: |
# trigger the integration tests here
deploy-prod:
if: github.ref == 'refs/heads/main'
needs: [integration-test]
runs-on: ubuntu-latest
# Configure GitHub Action Environment to have a manual approval step before deployment to production
# https://docs.github.com/en/actions/reference/environments
# environment: <configured-environment>
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
- uses: aws-actions/setup-sam@v1
- uses: actions/download-artifact@v2
with:
name: packaged-prod.yaml
- name: Assume the prod pipeline user role
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ env.PROD_PIPELINE_USER_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ env.PROD_PIPELINE_USER_SECRET_ACCESS_KEY }}
aws-region: ${{ env.PROD_REGION }}
role-to-assume: ${{ env.PROD_PIPELINE_EXECUTION_ROLE }}
role-session-name: prod-deployment
role-duration-seconds: 3600
role-skip-session-tagging: true
- name: Deploy to production account
run: |
sam deploy --stack-name ${PROD_STACK_NAME} \
--template packaged-prod.yaml \
--capabilities CAPABILITY_IAM \
--region ${PROD_REGION} \
--s3-bucket ${PROD_ARTIFACTS_BUCKET} \
--no-fail-on-empty-changeset \
--role-arn ${PROD_CLOUDFORMATION_EXECUTION_ROLE}
And after running all tf scripts and triggering a build, you should see the working app with eye-friendly names.
That's all, folks!