mirror of
https://github.com/ysoftdevs/terraform-aws-eks.git
synced 2026-01-15 08:14:12 +01:00
* Add destroy-time flag * Update changelog Fix cluster count * Fix cluster count * Fix docs * Fix outputs * Fix unsupported attribute on cluster_certificate_authority_data output Co-Authored-By: Daniel Piddock <33028589+dpiddockcmp@users.noreply.github.com> * Remove unnecessary flatten from cluster_endpoint output Co-Authored-By: Daniel Piddock <33028589+dpiddockcmp@users.noreply.github.com> * Improve description of var.enabled * Fix errors manifesting when used on an existing-cluster * Update README.md * Renamed destroy-time flag * Revert removal of changelog addition entry * Update flag name in readme * Update flag variable name * Update cluster referencing for consistency * Update flag name to `create_eks` * Fixed incorrect count-based reference to aws_eks_cluster.this (there's only one) * Replaced all incorrect aws_eks_cluster.this[count.index] references (there will be just one, so using '[0]'). * Changelog update, explicitly mentioning flag * Fixed interpolation deprecation warning * Fixed outputs to support conditional cluster * Applied create_eks to aws_auth.tf * Removed unused variable. Updated Changelog. Formatting. * Fixed references to aws_eks_cluster.this[0] that would raise errors when setting create_eks to false whilst having launch templates or launch configurations configured. * Readme and example updates. * Revert "Readme and example updates." This reverts commit 18a0746355e136010ad54858a1b518406f6a3638. * Updated readme section of conditionally creation with provider example. * Added conditions to node_groups. * Fixed reversed map_roles check * Update aws_auth.tf Revert this due to https://github.com/terraform-aws-modules/terraform-aws-eks/pull/611
62 lines
1.8 KiB
HCL
62 lines
1.8 KiB
HCL
data "aws_caller_identity" "current" {
|
|
}
|
|
|
|
data "template_file" "launch_template_worker_role_arns" {
|
|
count = var.create_eks ? local.worker_group_launch_template_count : 0
|
|
template = file("${path.module}/templates/worker-role.tpl")
|
|
|
|
vars = {
|
|
worker_role_arn = "arn:aws:iam::${data.aws_caller_identity.current.account_id}:role/${element(
|
|
coalescelist(
|
|
aws_iam_instance_profile.workers_launch_template.*.role,
|
|
data.aws_iam_instance_profile.custom_worker_group_launch_template_iam_instance_profile.*.role_name,
|
|
),
|
|
count.index,
|
|
)}"
|
|
platform = lookup(
|
|
var.worker_groups_launch_template[count.index],
|
|
"platform",
|
|
local.workers_group_defaults["platform"]
|
|
)
|
|
}
|
|
}
|
|
|
|
data "template_file" "worker_role_arns" {
|
|
count = var.create_eks ? local.worker_group_count : 0
|
|
template = file("${path.module}/templates/worker-role.tpl")
|
|
|
|
vars = {
|
|
worker_role_arn = "arn:aws:iam::${data.aws_caller_identity.current.account_id}:role/${element(
|
|
coalescelist(
|
|
aws_iam_instance_profile.workers.*.role,
|
|
data.aws_iam_instance_profile.custom_worker_group_iam_instance_profile.*.role_name,
|
|
[""]
|
|
),
|
|
count.index,
|
|
)}"
|
|
platform = lookup(
|
|
var.worker_groups[count.index],
|
|
"platform",
|
|
local.workers_group_defaults["platform"]
|
|
)
|
|
}
|
|
}
|
|
|
|
resource "kubernetes_config_map" "aws_auth" {
|
|
count = var.create_eks && var.manage_aws_auth ? 1 : 0
|
|
|
|
metadata {
|
|
name = "aws-auth"
|
|
namespace = "kube-system"
|
|
}
|
|
|
|
data = {
|
|
mapRoles = <<EOF
|
|
${join("", distinct(concat(data.template_file.launch_template_worker_role_arns.*.rendered, data.template_file.worker_role_arns.*.rendered)))}
|
|
%{if length(var.map_roles) != 0}${yamlencode(var.map_roles)}%{endif}
|
|
EOF
|
|
mapUsers = yamlencode(var.map_users)
|
|
mapAccounts = yamlencode(var.map_accounts)
|
|
}
|
|
}
|