From 283e9b203ce33e240a9d23eec910e24e8decbd88 Mon Sep 17 00:00:00 2001 From: brandoconnor Date: Wed, 6 Jun 2018 21:58:12 -0700 Subject: [PATCH] cleaning up before initial release --- .pre-commit-config.yaml | 14 ++++++-- .travis.yml | 48 ++++++++++++++++++++++++++++ README.md | 9 ++---- cluster.tf | 10 +++--- data.tf | 12 +++++++ examples/eks_test_fixture/outputs.tf | 19 +++++++++-- main.tf | 5 +-- outputs.tf | 6 ++-- workers.tf | 12 +++++-- 9 files changed, 109 insertions(+), 26 deletions(-) create mode 100644 .travis.yml diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 1f47aa7..16cbedc 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,9 +1,17 @@ # See http://pre-commit.com for more information # See http://pre-commit.com/hooks.html for more hooks repos: -- repo: https://github.com/pre-commit/pre-commit-hooks - sha: v0.9.2 - hooks: +- repo: git://github.com/antonbabenko/pre-commit-terraform + rev: v1.7.1 + sha: 091f8b15d7b458e5a0aca642483deb2205e7db02 + hooks: + - id: terraform_fmt + # - id: terraform_docs +- repo: git://github.com/pre-commit/pre-commit-hooks + rev: v1.2.3 + sha: 92e1570c282e3c69a1f8b5b8dd8d286fe27cfaa7 + hooks: + - id: check-merge-conflict - id: trailing-whitespace # - id: end-of-file-fixer - id: check-yaml diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 0000000..ad4aefd --- /dev/null +++ b/.travis.yml @@ -0,0 +1,48 @@ +language: ruby +sudo: required +dist: trusty +services: +- docker +rvm: +- 2.4.2 +before_install: +- echo "before_install" +install: +- echo "install" +- gem install bundler --no-rdoc --no-ri +- bundle install +before_script: +- echo 'before_script' +- export AWS_REGION='us-west-2' +- export TF_VAR_region=${AWS_REGION} +- echo "using AWS_REGION=${AWS_REGION}" +- export TF_WARN_OUTPUT_ERRORS=1 +- curl --silent --output terraform.zip https://releases.hashicorp.com/terraform/0.11.7/terraform_0.11.7_linux_amd64.zip +- sha256sum terraform.zip | grep "6b8ce67647a59b2a3f70199c304abca0ddec0e49fd060944c26f666298e23418" +- unzip terraform.zip ; rm -f terraform.zip; chmod +x terraform +- mkdir -p ${HOME}/bin ; export PATH=${PATH}:${HOME}/bin; mv terraform ${HOME}/bin/ +- terraform -v +script: +- echo 'script' +- terraform init +- terraform fmt -check=true +- terraform validate -var "region=${AWS_REGION}" -var "subnets=[]" -var "vpc_id=vpc-abcde012" -var "load_balancer_name=my-lb" -var "log_bucket_name=my-log-bucket" -var "security_groups=[]" +- docker run --rm -v $(pwd):/app/ --workdir=/app/ -t wata727/tflint --error-with-issues +- cd examples/eks_test_fixture +- terraform init +- terraform fmt -check=true +- terraform validate +- cd - +- terraform -v +- bundle exec kitchen test --destroy always +deploy: + provider: script + script: ci/deploy.sh + on: + branch: master +notifications: + email: + recipients: + - brandon@atscale.run + on_success: change + on_failure: change diff --git a/README.md b/README.md index ecd047d..63f0392 100644 --- a/README.md +++ b/README.md @@ -14,9 +14,6 @@ and its [source code](https://github.com/terraform-providers/terraform-provider- * You want to create a set of resources around an EKS cluster: namely an autoscaling group of workers and a security group for them. * You've created a Virtual Private Cloud (VPC) and subnets where you intend to put this EKS. -It's recommended you use this module with [terraform-aws-vpc](https://registry.terraform.io/modules/terraform-aws-modules/vpc/aws), -and [terraform-aws-security-group](https://registry.terraform.io/modules/terraform-aws-modules/security-group/aws). - ## Usage example A full example leveraging other community modules is contained in the [examples/eks_test_fixture directory](https://github.com/terraform-aws-modules/terraform-aws-eks/tree/master/examples/eks_test_fixture). Here's the gist of using it via the Terraform registry: @@ -104,8 +101,8 @@ MIT Licensed. See [LICENSE](https://github.com/terraform-aws-modules/terraform-a |------|-------------| | cluster_certificate_authority_data | Nested attribute containing certificate-authority-data for your cluster. Tis is the base64 encoded certificate data required to communicate with your cluster. | | cluster_endpoint | The endpoint for your Kubernetes API server. | -| cluster_id | The name of the cluster. | +| cluster_id | The name/id of the cluster. | | cluster_security_group_ids | description | | cluster_version | The Kubernetes server version for the cluster. | -| config_map_aws_auth | description | -| kubeconfig | description | +| config_map_aws_auth | | +| kubeconfig | kubectl config file contents for this cluster. | diff --git a/cluster.tf b/cluster.tf index 4ad3f44..797d422 100644 --- a/cluster.tf +++ b/cluster.tf @@ -14,11 +14,6 @@ resource "aws_eks_cluster" "this" { ] } -resource "aws_iam_role" "cluster" { - name_prefix = "${var.cluster_name}" - assume_role_policy = "${data.aws_iam_policy_document.cluster_assume_role_policy.json}" -} - resource "aws_security_group" "cluster" { name_prefix = "${var.cluster_name}" description = "Cluster communication with workers nodes" @@ -56,6 +51,11 @@ resource "aws_security_group_rule" "cluster_https_cidr_ingress" { type = "ingress" } +resource "aws_iam_role" "cluster" { + name_prefix = "${var.cluster_name}" + assume_role_policy = "${data.aws_iam_policy_document.cluster_assume_role_policy.json}" +} + resource "aws_iam_role_policy_attachment" "cluster_AmazonEKSClusterPolicy" { policy_arn = "arn:aws:iam::aws:policy/AmazonEKSClusterPolicy" role = "${aws_iam_role.cluster.name}" diff --git a/data.tf b/data.tf index 4f5d6fc..c1b615c 100644 --- a/data.tf +++ b/data.tf @@ -30,7 +30,19 @@ data "aws_iam_policy_document" "cluster_assume_role_policy" { } } +resource "null_resource" "tags_as_list_of_maps" { + count = "${length(keys(var.tags))}" + + triggers = "${map( + "key", "${element(keys(var.tags), count.index)}", + "value", "${element(values(var.tags), count.index)}", + "propagate_at_launch", "true" + )}" +} + locals { + asg_tags = ["${null_resource.tags_as_list_of_maps.*.triggers}"] + # More information: https://amazon-eks.s3-us-west-2.amazonaws.com/1.10.3/2018-06-05/amazon-eks-nodegroup.yaml workers_userdata = <