Skip to content

Sonar Multi Account CLI #9

Sonar Multi Account CLI

Sonar Multi Account CLI #9

name: 'Sonar Multi Account CLI'
concurrency:
group: multi_account
on:
workflow_call:
inputs:
branch:
required: true
type: string
additional_tags:
required: true
type: string
default: '{"OwnerEmail": "[email protected]", "ManagerEmail": "[email protected]", "TeamEmail": "[email protected]", "Description": "DSFKit_Tests", "Environment": "Test", "DataClassification": "Internal"}'
secrets:
AWS_ACCESS_KEY_ID:
required: true
AWS_SECRET_ACCESS_KEY:
required: true
AWS_ACCESS_KEY_ID_STAGE:
required: true
AWS_SECRET_ACCESS_KEY_STAGE:
required: true
SLACK_WEBHOOK_URL:
required: true
JUMP_SERVER_KEY:
required: true
workflow_dispatch:
inputs:
branch:
required: true
type: string
delay_destroy:
description: 'Delay the destroy step and subsequent steps to allow investigation'
type: boolean
default: false
required: false
additional_tags:
required: true
type: string
default: '{"OwnerEmail": "[email protected]", "ManagerEmail": "[email protected]", "TeamEmail": "[email protected]", "Description": "DSFKit_Tests", "Environment": "Test", "DataClassification": "Internal"}'
env:
TF_CLI_ARGS: "-no-color"
TF_INPUT: 0
TF_VAR_gw_count: 1
EXAMPLE_DIR: ./examples/aws/installation/sonar_multi_account_deployment
AWS_REGION: ap-southeast-1
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
TF_WORKSPACE: "multi_account"
JUMP_SERVER_KEY: ${{ secrets.JUMP_SERVER_KEY }}
AWS_KEY_PATH: "jump_server_key.cer"
DESTROY_DELAY_SECONDS: 1800
permissions:
contents: read
jobs:
terraform:
name: 'Multi Account ${{ inputs.branch }}'
runs-on: ubuntu-latest
environment: test
# Use the Bash shell regardless whether the GitHub Actions runner is ubuntu-latest, macos-latest, or windows-latest
defaults:
run:
shell: bash
steps:
# Checkout the repository to the GitHub Actions runner
- name: Checkout
uses: actions/checkout@v3
with:
ref: ${{ inputs.branch }}
- name: Change the modules source to local
run: |
find ./examples/ -type f -exec sed -i -f sed.expr {} \;
- name: Create terraform backend file
run: |
cat << EOF > $EXAMPLE_DIR/backend.tf
terraform {
backend "s3" {
bucket = "terraform-state-bucket-dsfkit-github-tests"
key = "states/terraform.tfstate"
dynamodb_table = "terraform-state-lock"
region = "us-east-1"
}
}
EOF
- name: Create tfvars File
run: |
cat << EOF > $EXAMPLE_DIR/terraform.tfvars
${{ vars.TFVAR_PARAMETERS_MULTI_ACCOUNT_AUTOMATION_V1 }}
EOF
- name: Cat tfvars File
run: cat $EXAMPLE_DIR/terraform.tfvars
- name: Add Profile Credentials to ~/.aws/credentials
run: |
aws configure set aws_access_key_id $AWS_ACCESS_KEY_ID --profile ${{ vars.DEV_PROFILE_NAME }}
aws configure set aws_secret_access_key $AWS_SECRET_ACCESS_KEY --profile ${{ vars.DEV_PROFILE_NAME }}
aws configure set aws_access_key_id ${{ secrets.AWS_ACCESS_KEY_ID_STAGE }} --profile ${{ vars.STAGE_PROFILE_NAME }}
aws configure set aws_secret_access_key ${{ secrets.AWS_SECRET_ACCESS_KEY_STAGE }} --profile ${{ vars.STAGE_PROFILE_NAME }}
- name: Get The Public IP
run: echo curr_ip=$(curl -s https://ipinfo.io/ip) >> $GITHUB_ENV
- name: Set IP in AWS Security Group
env:
AWS_REGION: ap-southeast-1
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID_STAGE }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY_STAGE }}
run: |
aws_sg=$(aws ec2 authorize-security-group-ingress --group-id ${{ vars.JUMP_SERVER_SG_ID }} --protocol tcp --port 22 --cidr $curr_ip/32)
echo sg_id=$(echo $aws_sg | jq '.SecurityGroupRules[0].SecurityGroupRuleId') >> $GITHUB_ENV
- name: Create Key File
run: |
echo "${{ secrets.JUMP_SERVER_KEY }}" > $EXAMPLE_DIR/$AWS_KEY_PATH
sudo chmod 400 $EXAMPLE_DIR/$AWS_KEY_PATH
# Install the latest version of Terraform CLI and configure the Terraform CLI configuration file with a Terraform Cloud user API token
- name: Setup Terraform
uses: hashicorp/setup-terraform@v2
with:
terraform_wrapper: false
terraform_version: ~1.7.0
- name: Setup jq
uses: sergeysova/jq-action@v2
# Initialize a new or existing Terraform working directory by creating initial files, loading any remote state, downloading modules, etc.
- name: Terraform Init
run: terraform -chdir=$EXAMPLE_DIR init
- name: Cleaning environment
run: |
mv $EXAMPLE_DIR/main.tf{,_}
mv $EXAMPLE_DIR/outputs.tf{,_}
terraform -chdir=$EXAMPLE_DIR destroy -auto-approve
mv $EXAMPLE_DIR/main.tf{_,}
mv $EXAMPLE_DIR/outputs.tf{_,}
- name: Terraform Validate
run: terraform -chdir=$EXAMPLE_DIR validate
# Generates an execution plan for Terraform
- name: Terraform Plan
run: terraform -chdir=$EXAMPLE_DIR plan -var additional_tags='${{ env.TF_VAR_additional_tags }}'
- name: Terraform Apply
run: terraform -chdir=$EXAMPLE_DIR apply -var additional_tags='${{ env.TF_VAR_additional_tags }}' -auto-approve
- name: Terraform Output
if: always()
run: terraform -chdir=$EXAMPLE_DIR output -json
- name: Collect Artifacts
uses: actions/upload-artifact@v4
with:
name: collected-keys
path: |
${{ env.EXAMPLE_DIR }}/ssh_keys
- name: Check how was the workflow run
id: check-trigger
if: ${{ failure() }}
run: |
if [ "${{ github.event_name }}" == "schedule" ]; then
echo "run-by=Automation" >> $GITHUB_OUTPUT
else
echo "run-by=${{ github.actor }}" >> $GITHUB_OUTPUT
fi
# This step allows time for investigation of the failed resources before destroying them
- name: Conditional Delay
if: ${{ failure() }}
run: |
echo "delay_destroy: ${{ inputs.delay_destroy }}"
if [ "${{ inputs.delay_destroy }}" == "true" ]; then
echo "Terraform workspace: $TF_WORKSPACE"
curl -X POST -H 'Content-type: application/json' --data '{"text":":exclamation: :exclamation: :exclamation:\n*${{ github.workflow }} ${{ env.TF_WORKSPACE }} automation Failed*\n You have ${{ env.DESTROY_DELAY_SECONDS }} seconds to investigate the environment before it is destroyed :alarm_clock:\n<https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}|Please check the job!>\nRun by: ${{ steps.check-trigger.outputs.run-by }}", "channel": "#edsf_automation"}' ${{ secrets.SLACK_WEBHOOK_URL }}
echo ""
echo "Sleeping for $((DESTROY_DELAY_SECONDS / 60)) minutes before destroying the environment"
sleep $DESTROY_DELAY_SECONDS
fi
- name: Terraform Destroy
if: always()
run: terraform -chdir=$EXAMPLE_DIR destroy -auto-approve
- name: Delete Security Group
env:
AWS_REGION: ap-southeast-1
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID_STAGE }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY_STAGE }}
if: always()
run: aws ec2 revoke-security-group-ingress --group-id ${{ vars.JUMP_SERVER_SG_ID }} --security-group-rule-ids ${{ env.sg_id }}
# Send job failure to Slack
- name: Send Slack When Failure
run: |
if [ ${{ inputs.branch }} == 'master' ]; then
curl -X POST -H 'Content-type: application/json' --data '{"text":":exclamation: :exclamation: :exclamation:\n*Sonar Multi Account Prod ${{ inputs.workspace }} automation Failed*\n<https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}|Please check the job!>\nRun by: ${{ steps.check-trigger.outputs.run-by }}", "channel": "#dsfkit-prod"}' ${{ secrets.SLACK_WEBHOOK_URL }}
else
curl -X POST -H 'Content-type: application/json' --data '{"text":":exclamation: :exclamation: :exclamation:\n*Sonar Multi Account Dev nightly ${{ inputs.workspace }} automation Failed*\n<https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}|Please check the job!>\nRun by: ${{ steps.check-trigger.outputs.run-by }}", "channel": "#edsf_automation"}' ${{ secrets.SLACK_WEBHOOK_URL }}
fi
if: ${{ failure() }}