refactor: Refactoring to match the rest of terraform-aws-modules (#1583)

This commit is contained in:
Anton Babenko
2021-09-16 11:35:44 +02:00
committed by GitHub
parent 619b4a0d48
commit 2bdf7d7dd6
76 changed files with 1350 additions and 1037 deletions

View File

@@ -1,76 +1,16 @@
terraform {
required_version = ">= 0.12.6"
}
provider "aws" {
region = var.region
}
data "aws_eks_cluster" "cluster" {
name = module.eks.cluster_id
}
data "aws_eks_cluster_auth" "cluster" {
name = module.eks.cluster_id
}
provider "kubernetes" {
host = data.aws_eks_cluster.cluster.endpoint
cluster_ca_certificate = base64decode(data.aws_eks_cluster.cluster.certificate_authority.0.data)
token = data.aws_eks_cluster_auth.cluster.token
load_config_file = false
}
data "aws_availability_zones" "available" {
}
locals {
cluster_name = "test-eks-${random_string.suffix.result}"
}
resource "random_string" "suffix" {
length = 8
special = false
}
module "vpc" {
source = "terraform-aws-modules/vpc/aws"
version = "~> 2.47"
name = "test-vpc"
cidr = "172.16.0.0/16"
azs = data.aws_availability_zones.available.names
private_subnets = ["172.16.1.0/24", "172.16.2.0/24", "172.16.3.0/24"]
public_subnets = ["172.16.4.0/24", "172.16.5.0/24", "172.16.6.0/24"]
enable_nat_gateway = true
single_nat_gateway = true
enable_dns_hostnames = true
public_subnet_tags = {
"kubernetes.io/cluster/${local.cluster_name}" = "shared"
"kubernetes.io/role/elb" = "1"
}
private_subnet_tags = {
"kubernetes.io/cluster/${local.cluster_name}" = "shared"
"kubernetes.io/role/internal-elb" = "1"
}
region = local.region
}
module "eks" {
source = "../.."
source = "../.."
cluster_name = local.cluster_name
cluster_version = "1.20"
subnets = [module.vpc.private_subnets[0], module.vpc.public_subnets[1]]
fargate_subnets = [module.vpc.private_subnets[2]]
cluster_version = "1.21"
tags = {
Environment = "test"
GithubRepo = "terraform-aws-eks"
GithubOrg = "terraform-aws-modules"
}
vpc_id = module.vpc.vpc_id
vpc_id = local.vpc.vpc_id
subnets = [local.vpc.private_subnets[0], local.vpc.public_subnets[1]]
fargate_subnets = [local.vpc.private_subnets[2]]
fargate_profiles = {
default = {
@@ -84,25 +24,178 @@ module "eks" {
},
{
namespace = "default"
# Kubernetes labels for selection
# labels = {
# Environment = "test"
# GithubRepo = "terraform-aws-eks"
# GithubOrg = "terraform-aws-modules"
# }
labels = {
WorkerType = "fargate"
}
}
]
# using specific subnets instead of all the ones configured in eks
# subnets = ["subnet-0ca3e3d1234a56c78"]
tags = {
Owner = "default"
}
}
secondary = {
name = "secondary"
selectors = [
{
namespace = "default"
labels = {
Environment = "test"
GithubRepo = "terraform-aws-eks"
GithubOrg = "terraform-aws-modules"
}
}
]
# Using specific subnets instead of the ones configured in EKS (`subnets` and `fargate_subnets`)
subnets = [local.vpc.private_subnets[1]]
tags = {
Owner = "test"
Owner = "secondary"
}
}
}
map_roles = var.map_roles
map_users = var.map_users
map_accounts = var.map_accounts
manage_aws_auth = false
tags = {
Environment = "test"
GithubRepo = "terraform-aws-eks"
GithubOrg = "terraform-aws-modules"
}
}
##############################################
# Calling submodule with existing EKS cluster
##############################################
module "fargate_profile_existing_cluster" {
source = "../../modules/fargate"
cluster_name = module.barebone_eks.cluster_id
subnets = [local.vpc.private_subnets[0], local.vpc.private_subnets[1]]
fargate_profiles = {
profile1 = {
name = "profile1"
selectors = [
{
namespace = "kube-system"
labels = {
k8s-app = "kube-dns"
}
},
{
namespace = "profile"
labels = {
WorkerType = "fargate"
}
}
]
tags = {
Owner = "profile1"
}
}
profile2 = {
name = "profile2"
selectors = [
{
namespace = "default"
labels = {
Fargate = "profile2"
}
}
]
# Using specific subnets instead of the ones configured in EKS (`subnets` and `fargate_subnets`)
subnets = [local.vpc.private_subnets[1]]
tags = {
Owner = "profile2"
}
}
}
tags = {
DoYouLoveFargate = "Yes"
}
}
#############
# Kubernetes
#############
data "aws_eks_cluster" "cluster" {
name = module.eks.cluster_id
}
data "aws_eks_cluster_auth" "cluster" {
name = module.eks.cluster_id
}
provider "kubernetes" {
host = data.aws_eks_cluster.cluster.endpoint
cluster_ca_certificate = base64decode(data.aws_eks_cluster.cluster.certificate_authority[0].data)
token = data.aws_eks_cluster_auth.cluster.token
}
############################################################
# Barebone EKS Cluster where submodules can add extra stuff
############################################################
module "barebone_eks" {
source = "../.."
cluster_name = "barebone-${local.cluster_name}"
cluster_version = "1.21"
vpc_id = local.vpc.vpc_id
subnets = local.vpc.private_subnets
tags = {
Environment = "test"
Barebone = "yes_please"
}
}
#############
# Kubernetes
#############
data "aws_eks_cluster" "barebone" {
name = module.barebone_eks.cluster_id
}
data "aws_eks_cluster_auth" "barebone" {
name = module.barebone_eks.cluster_id
}
provider "kubernetes" {
alias = "barebone"
host = data.aws_eks_cluster.barebone.endpoint
cluster_ca_certificate = base64decode(data.aws_eks_cluster.barebone.certificate_authority[0].data)
token = data.aws_eks_cluster_auth.barebone.token
}
################################################################################
# Supporting resources (managed in "_bootstrap" directory)
################################################################################
data "terraform_remote_state" "bootstrap" {
backend = "local"
config = {
path = "../_bootstrap/terraform.tfstate"
}
}
locals {
region = data.terraform_remote_state.bootstrap.outputs.region
cluster_name = data.terraform_remote_state.bootstrap.outputs.cluster_name
vpc = data.terraform_remote_state.bootstrap.outputs.vpc
}