Files
terraform-aws-eks/examples/basic/main.tf
Daniel Piddock 0c1ed0e6e9 Improvement: Require kubernetes provider >=1.11.1 (#784)
BREAKING CHANGE: The terraform-aws-eks module now require at least kubernetes `1.11.1`. This may cause terraform to fail to init if users have set version = "1.10" like we had in the examples.
2020-03-18 13:56:51 +01:00

160 lines
3.6 KiB
HCL

terraform {
required_version = ">= 0.12.0"
}
provider "aws" {
version = ">= 2.28.1"
region = var.region
}
provider "random" {
version = "~> 2.1"
}
provider "local" {
version = "~> 1.2"
}
provider "null" {
version = "~> 2.1"
}
provider "template" {
version = "~> 2.1"
}
data "aws_eks_cluster" "cluster" {
name = module.eks.cluster_id
}
data "aws_eks_cluster_auth" "cluster" {
name = module.eks.cluster_id
}
provider "kubernetes" {
host = data.aws_eks_cluster.cluster.endpoint
cluster_ca_certificate = base64decode(data.aws_eks_cluster.cluster.certificate_authority.0.data)
token = data.aws_eks_cluster_auth.cluster.token
load_config_file = false
version = "~> 1.11"
}
data "aws_availability_zones" "available" {
}
locals {
cluster_name = "test-eks-${random_string.suffix.result}"
}
resource "random_string" "suffix" {
length = 8
special = false
}
resource "aws_security_group" "worker_group_mgmt_one" {
name_prefix = "worker_group_mgmt_one"
vpc_id = module.vpc.vpc_id
ingress {
from_port = 22
to_port = 22
protocol = "tcp"
cidr_blocks = [
"10.0.0.0/8",
]
}
}
resource "aws_security_group" "worker_group_mgmt_two" {
name_prefix = "worker_group_mgmt_two"
vpc_id = module.vpc.vpc_id
ingress {
from_port = 22
to_port = 22
protocol = "tcp"
cidr_blocks = [
"192.168.0.0/16",
]
}
}
resource "aws_security_group" "all_worker_mgmt" {
name_prefix = "all_worker_management"
vpc_id = module.vpc.vpc_id
ingress {
from_port = 22
to_port = 22
protocol = "tcp"
cidr_blocks = [
"10.0.0.0/8",
"172.16.0.0/12",
"192.168.0.0/16",
]
}
}
module "vpc" {
source = "terraform-aws-modules/vpc/aws"
version = "2.6.0"
name = "test-vpc"
cidr = "10.0.0.0/16"
azs = data.aws_availability_zones.available.names
private_subnets = ["10.0.1.0/24", "10.0.2.0/24", "10.0.3.0/24"]
public_subnets = ["10.0.4.0/24", "10.0.5.0/24", "10.0.6.0/24"]
enable_nat_gateway = true
single_nat_gateway = true
enable_dns_hostnames = true
public_subnet_tags = {
"kubernetes.io/cluster/${local.cluster_name}" = "shared"
"kubernetes.io/role/elb" = "1"
}
private_subnet_tags = {
"kubernetes.io/cluster/${local.cluster_name}" = "shared"
"kubernetes.io/role/internal-elb" = "1"
}
}
module "eks" {
source = "../.."
cluster_name = local.cluster_name
subnets = module.vpc.private_subnets
tags = {
Environment = "test"
GithubRepo = "terraform-aws-eks"
GithubOrg = "terraform-aws-modules"
}
vpc_id = module.vpc.vpc_id
worker_groups = [
{
name = "worker-group-1"
instance_type = "t2.small"
additional_userdata = "echo foo bar"
asg_desired_capacity = 2
additional_security_group_ids = [aws_security_group.worker_group_mgmt_one.id]
},
{
name = "worker-group-2"
instance_type = "t2.medium"
additional_userdata = "echo foo bar"
additional_security_group_ids = [aws_security_group.worker_group_mgmt_two.id]
asg_desired_capacity = 1
},
]
worker_additional_security_group_ids = [aws_security_group.all_worker_mgmt.id]
map_roles = var.map_roles
map_users = var.map_users
map_accounts = var.map_accounts
}