Skip to content

adding major version 4.17.0.20.0 #87

adding major version 4.17.0.20.0

adding major version 4.17.0.20.0 #87

name: 'DSF POC CLI - Azure'
on:
workflow_call:
inputs:
use_modules_from_terraform_registry:
required: true
type: boolean
explicit_ref:
required: true
type: string
secrets:
AWS_ACCESS_KEY_ID:
required: true
AWS_SECRET_ACCESS_KEY:
required: true
SLACK_WEBHOOK_URL:
required: true
ARM_CLIENT_SECRET:
required: true
DAM_LICENSE:
required: true
workflow_dispatch:
inputs:
use_modules_from_terraform_registry:
type: boolean
required: false
delay_destroy:
description: 'Delay the destroy step and subsequent steps to allow investigation'
type: boolean
default: false
required: false
push:
branches:
- 'dev'
paths:
- 'modules/azurerm/**'
- 'modules/null/**'
- 'examples/azure/poc/dsf_deployment/*'
pull_request:
types:
- 'opened'
- 'reopened'
branches:
- 'dev'
paths:
- 'modules/azurerm/**'
- 'modules/null/**'
- 'examples/azure/poc/dsf_deployment/*'
env:
TF_CLI_ARGS: "-no-color"
TF_INPUT: 0
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} # aws creds are needed for s3 backend
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
ARM_SUBSCRIPTION_ID: ${{ vars.ARM_SUBSCRIPTION_ID }}
ARM_CLIENT_ID: ${{ vars.ARM_CLIENT_ID }}
ARM_TENANT_ID: ${{ vars.ARM_TENANT_ID }}
ARM_CLIENT_SECRET: ${{ secrets.ARM_CLIENT_SECRET }}
DESTROY_DELAY_SECONDS: 1800
permissions:
contents: read
jobs:
terraform:
strategy:
fail-fast: false
matrix:
include:
- name: DSF POC
workspace: azure_cli-all-
enable_sonar: true
enable_dam: true
enable_dra: true
- name: DSF POC - SONAR
workspace: azure_cli-sonar-
enable_sonar: true
enable_dam: false
enable_dra: false
- name: DSF POC - DAM
workspace: azure_cli-dam-
enable_sonar: false
enable_dam: true
enable_dra: false
- name: DSF POC - DRA
workspace: azure_cli-dra-
enable_sonar: false
enable_dam: false
enable_dra: true
name: '${{ matrix.name }}'
runs-on: ubuntu-latest
env:
EXAMPLE_DIR: ./examples/azure/poc/dsf_deployment
TF_VAR_enable_sonar: ${{ matrix.enable_sonar }}
TF_VAR_enable_dam: ${{ matrix.enable_dam }}
TF_VAR_enable_dra: ${{ matrix.enable_dra }}
environment: test
# Use the Bash shell regardless whether the GitHub Actions runner is ubuntu-latest, macos-latest, or windows-latest
defaults:
run:
shell: bash
steps:
- name: Pick ref
run: |
if [ -z "${{ inputs.explicit_ref }}" ]; then
echo REF=${{ github.ref }} >> $GITHUB_ENV;
else
echo REF=${{ inputs.explicit_ref }} >> $GITHUB_ENV;
fi
- name: Set Workspace Name
run: |
echo "Event Name: ${{ github.event_name }}"
if [ ${{ github.event_name }} == 'schedule' ]; then
echo TF_WORKSPACE=${{ matrix.workspace }}${{ github.event_name }}-$REF >> $GITHUB_ENV
else
echo TF_WORKSPACE=${{ matrix.workspace }}${{ github.run_number }}-${{ github.run_attempt }}-${{ inputs.explicit_ref }} >> $GITHUB_ENV
echo TMP_WORKSPACE_NAME=${{ matrix.workspace }}${{ github.run_number }}-${{ github.run_attempt }}-${{ inputs.explicit_ref }} >> $GITHUB_ENV
fi
# Checkout the repository to the GitHub Actions runner
- name: Checkout
uses: actions/checkout@v3
with:
ref: ${{ env.REF }}
- name: Change the modules source to local
if: ${{ inputs.use_modules_from_terraform_registry == false }}
run: |
find $EXAMPLE_DIR -type f -exec sed -i -f sed.expr {} \;
- name: Create terraform backend file
run: |
cat << EOF > $EXAMPLE_DIR/backend.tf
terraform {
backend "s3" {
bucket = "terraform-state-bucket-dsfkit-github-tests"
key = "states/terraform.tfstate"
dynamodb_table = "terraform-state-lock"
region = "us-east-1"
}
}
EOF
- name: Create terraform.tfvars file
run: |
cat << EOF > $EXAMPLE_DIR/terraform.tfvars
resource_group_location = "East US"
tarball_location = {
az_resource_group = "dsf-rg"
az_storage_account = "dsfinstallation"
az_container = "sonar"
az_blob = "jsonar-4.17.0.20.0.tar.gz"
}
dam_agent_installation_location = {
az_resource_group = "dsf-rg"
az_storage_account = "dsfinstallation"
az_container = "dam"
az_blob = "Imperva-ragent-UBN-px86_64-b14.6.0.60.0.636085.bsx"
}
simulation_db_types_for_agent=["PostgreSql", "MySql"]
dra_admin_vhd_details = {
storage_account_name = "dsfinstallation"
container_name = "dra"
path_to_vhd = "DRA-4.16.0.10.0.119_30763_x86_64-Admin.vhd"
}
dra_analytics_vhd_details = {
storage_account_name = "dsfinstallation"
container_name = "dra"
path_to_vhd = "DRA-4.16.0.10.0.119_30763_x86_64-Analytics.vhd"
}
EOF
# Install the latest version of Terraform CLI and configure the Terraform CLI configuration file with a Terraform Cloud user API token
- name: Setup Terraform
uses: hashicorp/setup-terraform@v2
with:
terraform_wrapper: false
terraform_version: ~1.7.0
- name: Setup jq
uses: sergeysova/jq-action@v2
- name: Create License File
env:
MY_SECRET: ${{ secrets.DAM_LICENSE }}
run: |
echo "${{ secrets.DAM_LICENSE }}" | base64 -d > $EXAMPLE_DIR/license.mprv
cat $EXAMPLE_DIR/license.mprv
# Initialize a new or existing Terraform working directory by creating initial files, loading any remote state, downloading modules, etc.
- name: Terraform Init
run: terraform -chdir=$EXAMPLE_DIR init
env:
TF_WORKSPACE: default
- name: Cleaning environment
run: |
if [ ${{ github.event_name }} == 'schedule' ]; then
mv $EXAMPLE_DIR/main.tf{,_}
mv $EXAMPLE_DIR/outputs.tf{,_}
mv $EXAMPLE_DIR/sonar.tf{,_}
mv $EXAMPLE_DIR/dam.tf{,_}
mv $EXAMPLE_DIR/dra.tf{,_}
mv $EXAMPLE_DIR/agent_sources.tf{,_}
mv $EXAMPLE_DIR/agentless_sources.tf{,_}
mv $EXAMPLE_DIR/networking.tf{,_}
ls -la $EXAMPLE_DIR
terraform -chdir=$EXAMPLE_DIR destroy -var dam_license=license.mprv -auto-approve
mv $EXAMPLE_DIR/main.tf{_,}
mv $EXAMPLE_DIR/outputs.tf{_,}
mv $EXAMPLE_DIR/sonar.tf{_,}
mv $EXAMPLE_DIR/dam.tf{_,}
mv $EXAMPLE_DIR/dra.tf{_,}
mv $EXAMPLE_DIR/agent_sources.tf{_,}
mv $EXAMPLE_DIR/agentless_sources.tf{_,}
mv $EXAMPLE_DIR/networking.tf{_,}
fi
- name: Terraform Validate
run: terraform -chdir=$EXAMPLE_DIR validate
# Generates an execution plan for Terraform
- name: Terraform Plan
run: |
terraform -chdir=$EXAMPLE_DIR workspace list
terraform -chdir=$EXAMPLE_DIR plan -var dam_license=license.mprv
# On push to "main", build or change infrastructure according to Terraform configuration files
# Note: It is recommended to set up a required "strict" status check in your repository for "Terraform Cloud". See the documentation on "strict" required status checks for more information: https://help.github.com/en/github/administering-a-repository/types-of-required-status-checks
- name: Terraform Apply
id: apply
# if: github.ref == 'refs/heads/"master"' && github.event_name == 'push' || github.event_name == 'workflow_dispatch'
run: terraform -chdir=$EXAMPLE_DIR apply -var dam_license=license.mprv -auto-approve
- name: Terraform Output
if: always()
run: terraform -chdir=$EXAMPLE_DIR output -json
- name: Collect Artifacts
id: collect-artifacts
if: always()
uses: actions/upload-artifact@v4
with:
name: collected-keys-${{ env.TF_WORKSPACE }}
path: |
${{ env.EXAMPLE_DIR }}/ssh_keys
- name: Check how was the workflow run
id: check-trigger
if: ${{ failure() }}
run: |
if [ "${{ github.event_name }}" == "schedule" ]; then
echo "run-by=Automation" >> $GITHUB_OUTPUT
else
echo "run-by=${{ github.actor }}" >> $GITHUB_OUTPUT
fi
# This step allows time for investigation of the failed resources before destroying them
- name: Conditional Delay
if: ${{ failure() }}
run: |
echo "delay_destroy: ${{ inputs.delay_destroy }}"
if [ "${{ inputs.delay_destroy }}" == "true" ]; then
echo "Terraform workspace: $TF_WORKSPACE"
curl -X POST -H 'Content-type: application/json' --data '{"text":":exclamation: :exclamation: :exclamation:\n*${{ github.workflow }} ${{ env.TF_WORKSPACE }} automation Failed*\n You have ${{ env.DESTROY_DELAY_SECONDS }} seconds to investigate the environment before it is destroyed :alarm_clock:\n<https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}|Please check the job!>\nRun by: ${{ steps.check-trigger.outputs.run-by }}", "channel": "#edsf_automation"}' ${{ secrets.SLACK_WEBHOOK_URL }}
echo ""
echo "Sleeping for $((DESTROY_DELAY_SECONDS / 60)) minutes before destroying the environment"
sleep $DESTROY_DELAY_SECONDS
fi
- name: Terraform Destroy
id: destroy
if: always()
run: |
if [ '${{ steps.apply.conclusion }}' == 'success' ] || [ ${{ github.event_name }} != 'schedule' ]; then
terraform -chdir=$EXAMPLE_DIR destroy -var dam_license=license.mprv -auto-approve
fi
- name: Terraform Delete Workspace
if: always()
run: |
if [ '${{ steps.destroy.conclusion }}' == 'success' ] && [ ${{ github.event_name }} != 'schedule' ]; then
terraform -chdir=$EXAMPLE_DIR workspace delete $TMP_WORKSPACE_NAME
fi
env:
TF_WORKSPACE: default
# Send job failure to Slack
- name: Send Slack When Failure
run: |
if [ ${{ env.REF }} == 'master' ]; then
curl -X POST -H 'Content-type: application/json' --data '{"text":":exclamation: :exclamation: :exclamation:\n*${{ matrix.name }} Prod ${{ inputs.workspace }} automation on Azure Failed*\n<https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}|Please check the job!>\nRun by: ${{ steps.check-trigger.outputs.run-by }}", "channel": "#dsfkit-prod"}' ${{ secrets.SLACK_WEBHOOK_URL }}
elif [ ${{ env.REF }} == 'dev' ]; then
curl -X POST -H 'Content-type: application/json' --data '{"text":":exclamation: :exclamation: :exclamation:\n*${{ matrix.name }} dev ${{ inputs.workspace }} automation on Azure Failed*\n<https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}|Please check the job!>\nRun by: ${{ steps.check-trigger.outputs.run-by }}", "channel": "#edsf_automation"}' ${{ secrets.SLACK_WEBHOOK_URL }}
else
curl -X POST -H 'Content-type: application/json' --data '{"text":":exclamation: :exclamation: :exclamation:\n*${{ matrix.name }} private branch ${{ inputs.workspace }} automation on Azure Failed*\n<https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}|Please check the job!>\nRun by: ${{ steps.check-trigger.outputs.run-by }}", "channel": "#edsf_automation"}' ${{ secrets.SLACK_WEBHOOK_URL }}
fi
if: ${{ failure() }}