Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fargate profile added and fix aws managed eks #57

Merged
merged 12 commits into from
Aug 10, 2023
2 changes: 1 addition & 1 deletion .github/workflows/tflint.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,6 @@ on:
workflow_dispatch:
jobs:
tf-lint:
uses: clouddrove/github-shared-workflows/.github/workflows/tf-lint.yml@master
uses: clouddrove/github-shared-workflows/.github/workflows/tf-lint.yml@tflint-test
secrets:
GITHUB: ${{ secrets.GITHUB }}
124 changes: 39 additions & 85 deletions _example/aws_managed/example.tf
Original file line number Diff line number Diff line change
Expand Up @@ -3,48 +3,46 @@ provider "aws" {
}

locals {

name = "clouddrove-eks"
region = "eu-west-1"
name = "clouddrove-eks"
region = "eu-west-1"
vpc_cidr_block = module.vpc.vpc_cidr_block
additional_cidr_block = "172.16.0.0/16"
environment = "test"
label_order = ["name", "environment"]
tags = {
"kubernetes.io/cluster/${module.eks.cluster_name}" = "owned"
}
}

################################################################################
# VPC
# VPC module call
################################################################################

module "vpc" {
source = "clouddrove/vpc/aws"
version = "2.0.0"

name = "${local.name}-vpc"
environment = "test"
label_order = ["environment", "name"]

cidr_block = "10.10.0.0/16"
environment = local.environment
cidr_block = "10.10.0.0/16"
}

# ################################################################################
# # Subnets
# # Subnets moudle call
# ################################################################################

module "subnets" {
source = "clouddrove/subnet/aws"
version = "2.0.0"

name = "${local.name}-subnet"
environment = "test"
label_order = ["environment", "name"]

name = "${local.name}-subnet"
environment = local.environment
nat_gateway_enabled = true
single_nat_gateway = true
availability_zones = ["${local.region}a", "${local.region}b", "${local.region}c"]
vpc_id = module.vpc.vpc_id
type = "public-private"
igw_id = module.vpc.igw_id
cidr_block = module.vpc.vpc_cidr_block
cidr_block = local.vpc_cidr_block
ipv6_cidr_block = module.vpc.ipv6_cidr_block
enable_ipv6 = false

Expand Down Expand Up @@ -125,49 +123,31 @@ module "subnets" {
]
}

################################################################################
# Keypair
################################################################################

module "keypair" {
source = "clouddrove/keypair/aws"
version = "1.3.0"

name = "${local.name}-key"
environment = "test"
label_order = ["name", "environment"]

enable_key_pair = true
public_key = "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDc4AjHFctUATtd5of4u9bJtTgkh9bKogSDjxc9QqbylRORxUa422jO+t1ldTVdyqDRKltxQCJb4v23HZc2kssU5uROxpiF2fzgiHXRduL+RtyOtY2J+rNUdCRmHz4WQySblYpgteIJZpVo2smwdek8xSpjoHXhgxxa9hb4pQQwyjtVGEdH8vdYwtxgPZgPVaJgHVeJgVmhjTf2VGTATaeR9txzHsEPxhe/n1y34mQjX0ygEX8x0RZzlGziD1ih3KPaIHcpTVSYYk4LOoMK38vEI67SIMomskKn4yU043s+t9ZriJwk2V9+oU6tJU/5E1rd0SskXUhTypc3/Znc/rkYtLe8s6Uy26LOrBFzlhnCT7YH1XbCv3rEO+Nn184T4BSHeW2up8UJ1SOEd+WzzynXczdXoQcBN2kaz4dYFpRXchsAB6ejZrbEq7wyZvutf11OiS21XQ67+30lEL2WAO4i95e4sI8AdgwJgzrqVcicr3ImE+BRDkndMn5k1LhNGqwMD3Iuoel84xvinPAcElDLiFmL3BJVA/53bAlUmWqvUGW9SL5JpLUmZgE6kp+Tps7D9jpooGGJKmqgJLkJTzAmTSJh0gea/rT5KwI4j169TQD9xl6wFqns4BdQ4dMKHQCgDx8LbEd96l9F9ruWwQ8EAZBe4nIEKTV9ri+04JVhSQ== [email protected]"
}

# ################################################################################
# Security Groups
# Security Groups module call
################################################################################

module "ssh" {
source = "clouddrove/security-group/aws"
version = "2.0.0"

name = "${local.name}-ssh"
environment = "test"
label_order = ["environment", "name"]

vpc_id = module.vpc.vpc_id
environment = local.environment
vpc_id = module.vpc.vpc_id
new_sg_ingress_rules_with_cidr_blocks = [{
rule_count = 1
from_port = 22
protocol = "tcp"
to_port = 22
cidr_blocks = [module.vpc.vpc_cidr_block, "172.16.0.0/16"]
cidr_blocks = [local.vpc_cidr_block, local.additional_cidr_block]
description = "Allow ssh traffic."
},
{
rule_count = 2
from_port = 27017
protocol = "tcp"
to_port = 27017
cidr_blocks = ["172.16.0.0/16"]
cidr_blocks = [local.additional_cidr_block]
description = "Allow Mongodb traffic."
}
]
Expand All @@ -178,15 +158,15 @@ module "ssh" {
from_port = 22
protocol = "tcp"
to_port = 22
cidr_blocks = [module.vpc.vpc_cidr_block, "172.16.0.0/16"]
cidr_blocks = [local.vpc_cidr_block, local.additional_cidr_block]
description = "Allow ssh outbound traffic."
},
{
rule_count = 2
from_port = 27017
protocol = "tcp"
to_port = 27017
cidr_blocks = ["172.16.0.0/16"]
cidr_blocks = [local.additional_cidr_block]
description = "Allow Mongodb outbound traffic."
}]
}
Expand All @@ -196,8 +176,7 @@ module "http_https" {
version = "2.0.0"

name = "${local.name}-http-https"
environment = "test"
label_order = ["name", "environment"]
environment = local.environment

vpc_id = module.vpc.vpc_id
## INGRESS Rules
Expand All @@ -206,23 +185,23 @@ module "http_https" {
from_port = 22
protocol = "tcp"
to_port = 22
cidr_blocks = [module.vpc.vpc_cidr_block]
cidr_blocks = [local.vpc_cidr_block]
description = "Allow ssh traffic."
},
{
rule_count = 2
from_port = 80
protocol = "tcp"
to_port = 80
cidr_blocks = [module.vpc.vpc_cidr_block]
cidr_blocks = [local.vpc_cidr_block]
description = "Allow http traffic."
},
{
rule_count = 3
from_port = 443
protocol = "tcp"
to_port = 443
cidr_blocks = [module.vpc.vpc_cidr_block]
cidr_blocks = [local.vpc_cidr_block]
description = "Allow https traffic."
}
]
Expand All @@ -241,15 +220,15 @@ module "http_https" {
}

################################################################################
# KMS Module
# KMS Module call
################################################################################
module "kms" {
source = "clouddrove/kms/aws"
version = "1.3.0"

name = "${local.name}-kms"
environment = "test"
label_order = ["environment", "name"]
environment = local.environment
label_order = local.label_order
enabled = true
description = "KMS key for EBS of EKS nodes"
enable_key_rotation = false
Expand All @@ -263,51 +242,41 @@ data "aws_iam_policy_document" "kms" {
effect = "Allow"
principals {
type = "AWS"
identifiers = ["*"]
identifiers = ["arn:aws:iam::${data.aws_caller_identity.current.account_id}:root"]
}
actions = ["kms:*"]
resources = ["*"]
}

}

data "aws_caller_identity" "current" {}

################################################################################
# EKS Module
# EKS Module call
################################################################################

module "eks" {
source = "../.."
enabled = true

name = local.name
environment = "test"
label_order = ["environment", "name"]
environment = local.environment
label_order = local.label_order

# EKS
kubernetes_version = "1.27"
endpoint_private_access = true
endpoint_public_access = true
enabled_cluster_log_types = ["api", "audit", "authenticator", "controllerManager", "scheduler"]
oidc_provider_enabled = true

kubernetes_version = "1.27"
endpoint_public_access = true
# Networking
vpc_id = module.vpc.vpc_id
subnet_ids = module.subnets.private_subnet_id
allowed_security_groups = [module.ssh.security_group_id]
eks_additional_security_group_ids = ["${module.ssh.security_group_id}", "${module.http_https.security_group_id}"]
allowed_cidr_blocks = ["10.0.0.0/16"]
allowed_cidr_blocks = [local.vpc_cidr_block]

################################################################################
# AWS Managed Node Group
################################################################################
# Node Groups Defaults Values It will Work all Node Groups
managed_node_group_defaults = {
subnet_ids = module.subnets.private_subnet_id
key_name = module.keypair.name
nodes_additional_security_group_ids = [module.ssh.security_group_id]
# ami_id = "ami-064d3e6a8ca655d19"
# ami_type = "AL2_ARM_64"
# instance_types = ["t4g.medium"]
tags = {
"kubernetes.io/cluster/${module.eks.cluster_name}" = "shared"
"k8s.io/cluster/${module.eks.cluster_name}" = "shared"
Expand All @@ -331,7 +300,7 @@ module "eks" {
name = "${module.eks.cluster_name}-critical"
capacity_type = "ON_DEMAND"
min_size = 1
max_size = 7
max_size = 2
desired_size = 2
instance_types = ["t3.medium"]
}
Expand All @@ -340,24 +309,13 @@ module "eks" {
name = "${module.eks.cluster_name}-application"
capacity_type = "SPOT"
min_size = 1
max_size = 7
max_size = 2
desired_size = 1
force_update_version = true
instance_types = ["t3.medium"]
}
}

# -- Enable Add-Ons in EKS Cluster
addons = [
{
addon_name = "coredns"
addon_version = "v1.10.1-eksbuild.1"
resolve_conflicts = "OVERWRITE"
service_account_role_arn = "${module.eks.node_group_iam_role_arn}"
}
]

# -- Set this to `true` only when you have correct iam_user details.
apply_config_map_aws_auth = true
map_additional_iam_users = [
{
Expand All @@ -367,11 +325,7 @@ module "eks" {
}
]
}

################################################################################
# Kubernetes provider configuration
################################################################################

## Kubernetes provider configuration
data "aws_eks_cluster" "this" {
depends_on = [module.eks]
name = module.eks.cluster_id
Expand Down
8 changes: 6 additions & 2 deletions _example/aws_managed/versions.tf
Original file line number Diff line number Diff line change
@@ -1,11 +1,15 @@
# Terraform version
terraform {
required_version = ">= 1.5.0"
required_version = ">= 1.5.4"

required_providers {
aws = {
source = "hashicorp/aws"
version = ">= 5.5.0"
version = ">= 5.11.0"
}
cloudinit = {
source = "hashicorp/cloudinit"
version = ">= 2.0"
}
}
}
Loading