This commit is contained in:
Fabien Gaubert 2023-06-21 14:41:43 +02:00
commit 5cf2ac0217
38 changed files with 2379 additions and 0 deletions

15
.config/.mdl_style.rb Normal file
View file

@ -0,0 +1,15 @@
all
rule "MD029", style: "ordered"
# our changelog does this, by design
exclude_rule 'MD024'
# Exclude line length
exclude_rule 'MD013'
# Inline HTML
exclude_rule 'MD033'
# Trailing spaces
exclude_rule 'MD009'

View file

@ -0,0 +1,32 @@
---
exclude: '(^.+\.log$)'
default_language_version:
python: python3
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.2.0
hooks:
- id: check-yaml
- id: end-of-file-fixer
- id: trailing-whitespace
args: [--markdown-linebreak-ext=md]
- id: check-json
- id: check-merge-conflict
- id: check-case-conflict
- id: mixed-line-ending
- id: check-added-large-files
#- id: check-shebang-scripts-are-executable
- id: check-symlinks
- id: detect-private-key
- id: pretty-format-json
args:
- --autofix
- id: detect-aws-credentials
args:
- --allow-missing-credentials
- repo: https://github.com/antonbabenko/pre-commit-terraform
rev: v1.74.2
hooks:
- id: terraform_validate
- id: terraform_fmt

0
.config/.shellcheckrc Normal file
View file

View file

@ -0,0 +1,53 @@
---
# this is required
formatter: "markdown"
version: ""
header-from: main.tf
footer-from: ""
recursive:
enabled: true
path: .
sections:
hide: []
show: []
# deprecated in v0.13.0, removed in v0.15.0
hide-all: false
# deprecated in v0.13.0, removed in v0.15.0
show-all: true
content: ""
output:
file: "README.md"
mode: inject
template: |-
<!-- BEGINNING OF PRE-COMMIT-TERRAFORM DOCS HOOK -->
{{ .Content }}
<!-- END OF PRE-COMMIT-TERRAFORM DOCS HOOK -->
output-values:
enabled: false
from: ""
sort:
enabled: true
by: name
settings:
anchor: true
color: true
default: true
description: false
escape: true
hide-empty: false
html: true
indent: 2
lockfile: true
read-comments: true
required: true
sensitive: true
type: true

View file

@ -0,0 +1,4 @@
[rules]
skip-rules = [
]

23
.config/.tflint.hcl Normal file
View file

@ -0,0 +1,23 @@
plugin "aws" {
enabled = true
version = "0.23.0"
source = "github.com/terraform-linters/tflint-ruleset-aws"
}
rule "terraform_naming_convention" {
enabled = true
}
rule "terraform_documented_outputs" {
enabled = true
}
rule "terraform_documented_variables" {
enabled = true
}
plugin "terraform" {
enabled = true
version = "0.2.2"
source = "github.com/terraform-linters/tflint-ruleset-terraform"
}

21
.config/.trivy.yaml Normal file
View file

@ -0,0 +1,21 @@
---
format: table
debug: true
insecure: false
ignore-policy:
- avd-aws-0057
- avd-aws-0104
severity:
- CRITICAL
- HIGH
exclude:
- .terraform/**/*
- terraform-aws-modules
- .terraform
- .terraform/modules/**/*
skip-dirs:
- .terraform/**/*
- terraform-aws-modules
- .terraform
- .terraform/modules/**/*

33
.config/.yamllintrc Normal file
View file

@ -0,0 +1,33 @@
---
yaml-files:
- '*.yaml'
- '*.yml'
- '.yamllint'
rules:
braces: enable
brackets: enable
colons: enable
commas: enable
comments:
level: warning
comments-indentation:
level: warning
document-end: disable
document-start:
level: warning
empty-lines: enable
empty-values: disable
float-values: disable
hyphens: enable
indentation: enable
key-duplicates: enable
key-ordering: disable
line-length: disable
new-line-at-end-of-file: enable
new-lines: enable
octal-values: disable
quoted-strings: disable
trailing-spaces: enable
truthy: disable

21
.editorconfig Normal file
View file

@ -0,0 +1,21 @@
root = true
[*]
indent_style = space
indent_size = 2
charset = utf-8
trim_trailing_whitespace = true
insert_final_newline = true
[{*.{py,md},Dockerfile}]
indent_size = 4
[*.md]
trim_trailing_whitespace = false
[{Makefile,**.mk,**.mk.j2}]
# Use tabs for indentation (Makefiles require tabs)
indent_style = tab
[.terraform-version]
insert_final_newline = false

7
.gitattributes vendored Normal file
View file

@ -0,0 +1,7 @@
terraform/ export-ignore
README.md export-ignore
.gitattributes export-ignore
config.yaml export-ignore
.gitignore export-ignore
get-starter-kit.sh export-ignore
remove-starter-kit.sh export-ignore

52
.gitignore-project Normal file
View file

@ -0,0 +1,52 @@
# Source: https://github.com/github/gitignore/blob/main/Terraform.gitignore
# Local .terraform directories
**/.terraform/*
# .tfstate files
*.tfstate
*.tfstate.*
# Crash log files
crash.log
crash.*.log
# Exclude all .tfvars files, which are likely to contain sensitive data, such as
# password, private keys, and other secrets. These should not be part of version
# control as they are data points which are potentially sensitive and subject
# to change depending on the environment.
*.tfvars
*.tfvars.json
# Ignore override files as they are usually used to override resources locally and so
# are not checked in
override.tf
override.tf.json
*_override.tf
*_override.tf.json
# Include override files you do wish to add to version control using negated pattern
# !example_override.tf
# Include tfplan files to ignore the plan output of command: terraform plan -out=tfplan
# example: *tfplan*
# Ignore CLI configuration files
.terraformrc
terraform.rc
# End https://github.com/github/gitignore/blob/main/Terraform.gitignore
# Additional Terraform files
tfplan.binary
tfplan.json
.terraform.lock.hcl
# TerraformStarterKit
.config/
.backup/
automation/
docker-compose-tools.yml
docker-compose.yml
.gitignore-project
configure.yaml.dist
.env
makeplan.mk

26
LICENSE Normal file
View file

@ -0,0 +1,26 @@
Copyright (c) 2023, Orange Business
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS AS IS AND ANY
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

349
Makefile Normal file
View file

@ -0,0 +1,349 @@
# import config.
# you can change the default config with `make cnf="config_special.env" build`
cnf ?= .env
ifneq ("$(wildcard $(cnf))","")
include $(cnf)
export $(shell sed 's/=.*//' $(cnf))
endif
cur_date = $(shell date '+%Y-%m-%d-%H-%M-%S')
# Make will use bash instead of sh
SHELL := /usr/bin/env bash
ifdef CICD_MODE
SHELL := /usr/bin/env sh
endif
# Interactive mode
NON_INTERACTIVE ?= 0
help: ## This help.
@awk 'BEGIN {FS = ":.*?## "} /^[a-zA-Z_-]+:.*?## / {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' $(MAKEFILE_LIST)
.DEFAULT_GOAL := help
# Name of the output of the terraform plan
# Name of the output of the terraform plan
PLAN_BINARY_FILE=tfplan.binary
PLAN_JSON_FILE=tfplan.json
# Select the config file based of the stage
CONFIG_FILE := parameters.auto.tfvars
VAR_PARAMETERS := -var-file=../common.tfvars -var-file=${CONFIG_FILE} -var="module_path=${CURRENT_DIR}"
DOCKER_COMPOSE_FILES = -f docker-compose.yml
DOCKER_COMPOSE_FILES_TOOLS = -f docker-compose-tools.yml
DOCKER_COMPOSE = docker compose ${DOCKER_COMPOSE_FILES}
DOCKER_COMPOSE_DEV_TOOLS = docker compose ${DOCKER_COMPOSE_FILES_TOOLS}
ifdef CICD_MODE
ROLE_NAME := ${CICD_ROLE_NAME}
else
ROLE_NAME := ${LOCAL_ROLE_NAME}
endif
TERRAFORM_INIT = init --upgrade \
-backend-config="bucket=${TF_VAR_backend_bucket_name}"\
-backend-config="region=${TF_VAR_backend_bucket_region}" \
-backend-config="dynamodb_table=${TF_VAR_backend_dynamodb_table}" \
-backend-config="key=${PROJECT_NAME}${subst terraform,,$(CURRENT_DIR)}.tfstate" \
-backend-config="role_arn=${TF_VAR_backend_bucket_access_role}"
ifdef CICD_MODE
TFENV_EXEC ?= $(shell which tfenv)
TERRAFORM_EXEC ?= $(shell which terraform)
TFLINT_RUN ?= $(shell which tflint) --config .config/.tflint.hcl
PRECOMMIT_RUN ?= $(shell which pre-commit)
DOTENV_LINTER ?= $(shell which dotenv-linter)
SHELL_LINT ?= $(shell which shellcheck)
YAML_LINT ?= $(shell which yamllint)
MD_LINT ?= $(shell which mdl) --style config/.mdl_style.rb
TRIVY_RUN ?= $(shell which trivy)
TERRASCAN_RUN ?= $(shell which terrascan)
TERRAFORM_COMPLIANCE_RUN ?= $(shell which terraform-compliance)
TERRAFORM_DOCS ?= $(shell which terraform-docs)
else
TFENV_EXEC = $(DOCKER_COMPOSE) exec terraform
TERRAFORM_EXEC = $(DOCKER_COMPOSE) exec terraform
TFLINT_RUN = $(DOCKER_COMPOSE_DEV_TOOLS) run --rm lint --config ./.config/.tflint.hcl
PRECOMMIT_RUN = $(DOCKER_COMPOSE_DEV_TOOLS) run --rm precommit
DOTENV_LINTER = $(DOCKER_COMPOSE_DEV_TOOLS) run --rm dotenv-linter
SHELL_LINT = $(DOCKER_COMPOSE_DEV_TOOLS) run --rm shell_lint shellcheck
YAML_LINT = $(DOCKER_COMPOSE_DEV_TOOLS) run --rm yaml_lint yamllint
MD_LINT = $(DOCKER_COMPOSE_DEV_TOOLS) run --rm markdown_lint mdl --style ./.config/.mdl_style.rb
TRIVY_RUN = $(DOCKER_COMPOSE_DEV_TOOLS) run --rm trivy
TERRASCAN_RUN = $(DOCKER_COMPOSE_DEV_TOOLS) run --rm terrascan
TERRAFORM_COMPLIANCE_RUN = $(DOCKER_COMPOSE_DEV_TOOLS) run --rm terraform-compliance
TERRAFORM_DOCS = $(DOCKER_COMPOSE_DEV_TOOLS) run --rm terraform-docs
endif
debug: ## Print debug logs
debug:
ifeq ($(PRINT_DEBUG),"true")
printenv
echo $(TFENV_EXEC)
echo $(TERRAFORM_EXEC)
echo $(TFLINT_RUN)
echo $(PRECOMMIT_RUN)
endif
CONFIG_FILE := parameters.auto.tfvars
ifdef CICD_MODE
VAR_PARAMETERS := -var-file=$(shell pwd)/terraform/common.tfvars \
-var-file=${CONFIG_FILE} \
-var="module_path=${CURRENT_DIR}" \
#-var="backend_bucket_key=${CURRENT_DIR}"
else
VAR_PARAMETERS := -var-file=/workdir/terraform/common.tfvars \
-var-file=${CONFIG_FILE} \
-var="module_path=${CURRENT_DIR}" \
#-var="backend_bucket_key=${CURRENT_DIR}"
endif
########################################################################################################################
# FUNCTIONS
########################################################################################################################
terraform_validate:
ifndef CICD_MODE
$(TFENV_EXEC) /bin/sh -c "cd ${CURRENT_DIR} && tfenv install"
$(TERRAFORM_EXEC) /bin/sh -c "cd ${CURRENT_DIR} && terraform $(TERRAFORM_INIT)"
$(TERRAFORM_EXEC) /bin/sh -c "cd ${CURRENT_DIR} && terraform validate"
else
cd ${CURRENT_DIR} && tfenv install
cd ${CURRENT_DIR} && terraform $(TERRAFORM_INIT)
cd ${CURRENT_DIR} && terraform validate
endif
terraform_format:
ifndef CICD_MODE
$(TFENV_EXEC) /bin/sh -c "cd ${CURRENT_DIR} && tfenv install"
$(TERRAFORM_EXEC) /bin/sh -c "cd ${CURRENT_DIR} && terraform fmt -recursive"
else
cd ${CURRENT_DIR} && $(TFENV_EXEC) install
cd ${CURRENT_DIR} && terraform fmt -recursive
endif
# Combination of Terraform commands to install a stack layer
terraform_install_commands:
ifneq (,$(wildcard ${CURRENT_DIR}/${CONFIG_FILE}))
ifdef CICD_MODE
cd ${CURRENT_DIR} && tfenv install
cd ${CURRENT_DIR} && terraform $(TERRAFORM_INIT)
cd ${CURRENT_DIR} && terraform plan ${VAR_PARAMETERS} -out ${PLAN_BINARY_FILE}
cd ${CURRENT_DIR} && terraform apply ${PLAN_BINARY_FILE}
else
$(TFENV_EXEC) /bin/sh -c "cd ${CURRENT_DIR} && tfenv install"
$(TERRAFORM_EXEC) /bin/sh -c "cd ${CURRENT_DIR} && terraform $(TERRAFORM_INIT)"
$(TERRAFORM_EXEC) /bin/sh -c "cd ${CURRENT_DIR} && terraform apply -compact-warnings ${VAR_PARAMETERS}"
endif
endif
# Combination of Terraform commands to install a stack layer
terraform_init_commands:
ifneq (,$(wildcard ${CURRENT_DIR}/${CONFIG_FILE}))
ifdef CICD_MODE
cd ${CURRENT_DIR} && tfenv install
cd ${CURRENT_DIR} && terraform $(TERRAFORM_INIT)
else
$(TFENV_EXEC) /bin/sh -c "cd ${CURRENT_DIR} && tfenv install"
$(TERRAFORM_EXEC) /bin/sh -c "cd ${CURRENT_DIR} && terraform $(TERRAFORM_INIT)"
endif
endif
# Combination of Terraform commands to install a stack layer
terraform_plan_commands:
ifneq (,$(wildcard ${CURRENT_DIR}/${CONFIG_FILE}))
ifdef CICD_MODE
cd ${CURRENT_DIR} && $(TFENV_EXEC) install
cd ${CURRENT_DIR} && terraform $(TERRAFORM_INIT)
cd ${CURRENT_DIR} && terraform plan ${VAR_PARAMETERS} -out ${PLAN_BINARY_FILE}
cd ${CURRENT_DIR} && terraform show -json ${PLAN_BINARY_FILE} > ${PLAN_JSON_FILE}
else
$(TFENV_EXEC) /bin/sh -c "cd ${CURRENT_DIR} && tfenv install"
$(TERRAFORM_EXEC) /bin/sh -c "cd ${CURRENT_DIR} && terraform $(TERRAFORM_INIT)"
$(TERRAFORM_EXEC) /bin/sh -c "cd ${CURRENT_DIR} && terraform plan -compact-warnings ${VAR_PARAMETERS} -out ${PLAN_BINARY_FILE}"
$(TERRAFORM_EXEC) /bin/sh -c "cd ${CURRENT_DIR} && terraform show -json ${PLAN_BINARY_FILE} > ${PLAN_JSON_FILE}"
endif
endif
terraform_lint:
$(TFLINT_RUN) ${CURRENT_DIR}
# Terraform commands to delete a stack layer
terraform_destroy_commands:
ifneq (,$(wildcard ${CURRENT_DIR}/${CONFIG_FILE}))
ifdef CICD_MODE
cd ${CURRENT_DIR} && tfenv install
cd ${CURRENT_DIR} && terraform destroy ${VAR_PARAMETERS}
else
$(TERRAFORM_EXEC) /bin/sh -c "cd ${CURRENT_DIR} && tfenv install"
$(TERRAFORM_EXEC) /bin/sh -c "cd ${CURRENT_DIR} && terraform destroy ${VAR_PARAMETERS}"
endif
endif
########################################################################################################################
# LOCAL DEV DOCKER
########################################################################################################################
init: ## Generate .env file
init:
if [ ! -d .backup ] ; then mkdir .backup ; fi
if [ -f .env ] ; then cp .env .backup/.env-${cur_date}.bck ; else touch .env ; fi
cp configure.yaml automation/jinja2/variables/
# Hack: use only for first run
$(DOCKER_COMPOSE_DEV_TOOLS) run --rm jinja2docker .env.dist.j2 /variables/configure.yaml
$(DOCKER_COMPOSE_DEV_TOOLS) run --rm jinja2docker .env.dist.j2 /variables/configure.yaml | tee .env
generate: ## Generate from template gitlab-ci.yml and Makefile
generate:
@$(MAKE) init
@$(MAKE) generate_makefile
if [ "${GENERATE_GITLAB_CI}" == "True" ]; then "$(MAKE)" generate_gitlab_ci; fi
generate_makefile: ## Generate Makefile
generate_makefile:
if [ ! -d .backup ] ; then mkdir .backup ; fi
cp Makefile .backup/Makefile-${cur_date}.bck
# Hack: use only for first run
$(DOCKER_COMPOSE_DEV_TOOLS) run --rm jinja2docker make.mk.j2 /variables/vars.yml
$(DOCKER_COMPOSE_DEV_TOOLS) run --rm jinja2docker make.mk.j2 /variables/vars.yml | tee makeplan.mk
./automation/Makefile/delete_automatic_content.sh
cat makeplan.mk >> Makefile
generate_gitlab_ci: ## Generate GitlabCI
generate_gitlab_ci:
if [ ! -d .backup ] ; then mkdir .backup ; fi
if [ -f .gitlab-ci.yml ] ; then cp .gitlab-ci.yml .backup/.gitlab-ci.yml-${cur_date}.bck ; else touch .gitlab-ci.yml ; fi
cp configure.yaml automation/jinja2/variables/
$(DOCKER_COMPOSE_DEV_TOOLS) run jinja2docker .gitlab-ci.yml.j2 /variables/configure.yaml | tee .gitlab-ci.yml
tr -d "\r" < .gitlab-ci.yml>.gitlab-ci.yml.tmp
mv .gitlab-ci.yml.tmp .gitlab-ci.yml
start: ## Start the project
start: init generate
$(DOCKER_COMPOSE) up -d
# $(TERRAFORM_EXEC) apk add --no-cache python3 py3-pip
stop: ## Stop the project
stop:
$(DOCKER_COMPOSE) stop
down: ## stop containers
down:
$(DOCKER_COMPOSE) down -v
kill: ## Destroy all containers
kill:
$(DOCKER_COMPOSE) kill
$(DOCKER_COMPOSE) down --volumes --remove-orphans
console: ## Connect Terraform Docker
console:
$(TERRAFORM_EXEC) /bin/sh
restart: ## Restart the Terraform stack
restart: stop start
logout: ## Remove assumed role
logout:
rm -f .env
########################################################################################################################
# QUALITY CHECKS
########################################################################################################################
precommit: ## Launch precommit hooks
precommit:
$(PRECOMMIT_RUN) run -a --config=./.config/.pre-commit-config.yaml
dotenv_lint: ## Lint dotenv files
dotenv_lint:
$(DOTENV_LINTER) --skip UnorderedKey --skip LowercaseKey
markdown_lint: ## Lint Markdown files files
markdown_lint:
echo $(MD_LINT)
$(MD_LINT) .
shell_lint: ## Lint shell files
shell_lint:
$(SHELL_LINT) **/*/*.sh
yaml_lint: ## Lint yaml files
yaml_lint:
$(YAML_LINT) -c ./.config/.yamllintrc --no-warnings .
terrascan_docker: ## Terrascan Docker
terrascan_docker:
$(DOCKER_COMPOSE_DEV_TOOLS) run terrascan scan -d automation -i docker --verbose --config-path=./.config/.terrascan_config.toml
powershell_lint: ## PowerShell Linter
powershell_lint:
$(DOCKER_COMPOSE_DEV_TOOLS) run powershell_lint "Invoke-ScriptAnalyzer -Recurse -Path ."
quality-checks: ## run quality checks
quality-checks: dotenv_lint format validate lint precommit markdown_lint shell_lint yaml_lint trivy terrascan_docker terraform_terrascan
########################################################################################################################
# INSTALL / DELETE PLANS
########################################################################################################################
# Automatic Content Generated
generate_documentation: ## Generate Terraform Documentation
generate_documentation:
$(DOCKER_COMPOSE_DEV_TOOLS) run --rm --remove-orphans terraform_docs terraform/demo --config=./.config/.terraform-docs.yml
terraform_terrascan: ## Terrascan Terraform
terraform_terrascan:
$(TERRASCAN_RUN) scan -i terraform --verbose --config-path=./.terrascan_config.toml --iac-dir=terraform/demo
format: ## Format all Terraform files using "terraform fmt"
format:
@$(MAKE) --no-print-directory terraform_format CURRENT_DIR="terraform/demo"
trivy: ## Terraform Trivy
trivy:
$(TRIVY_RUN) config terraform/demo --config=./.config/.trivy.yaml --skip-dirs .terraform
validate: ## Validate all Terraform files using "terraform validate"
validate:
@$(MAKE) --no-print-directory terraform_validate CURRENT_DIR="terraform/demo"
lint: ## Check that good naming practices are respected in Terraform files (using tflint)
lint:
$(TFLINT_RUN) --init
@$(MAKE) --no-print-directory terraform_lint CURRENT_DIR="terraform/demo"
init_terraform_demo: ## Init AWS terraform/demo layer
init_terraform_demo:
@$(MAKE) --no-print-directory CURRENT_DIR=terraform/demo terraform_init_commands
plan_terraform_demo: ## Plan AWS terraform/demo layer
plan_terraform_demo:
@$(MAKE) --no-print-directory CURRENT_DIR=terraform/demo terraform_plan_commands
install_terraform_demo: ## Install AWS terraform/demo layer
install_terraform_demo:
@$(MAKE) --no-print-directory CURRENT_DIR=terraform/demo terraform_install_commands
destroy_terraform_demo: ## Uninstall AWS terraform/demo layer
destroy_terraform_demo:
@$(MAKE) --no-print-directory CURRENT_DIR=terraform/demo terraform_destroy_commands
init_all: ## Init all AWS layers
init_all:
@$(MAKE) --no-print-directory init_terraform_demo
plan_all: ## Plan all AWS layers
plan_all:
@$(MAKE) --no-print-directory plan_terraform_demo
install_all: ## Install all AWS layers
install_all: install_terraform_demo
destroy_all: ## Uninstall all layers
destroy_all: delete_terraform_demo

476
README.md Normal file
View file

@ -0,0 +1,476 @@
# Terraform StarterKit
The StarterKit is a tool developed to simplify the process of deploying resources on Amazon Web Services (AWS) using Terraform. Terraform is an infrastructure as code (IaC) tool that allows you to define and provision infrastructure resources in a declarative manner.
The purpose of the StarterKit is to provide a pre-configured template or framework that helps users quickly get started with AWS and Terraform. It typically includes a set of predefined Terraform configurations, scripts, tools, and best practices tailored for common AWS use cases.
The StarterKit utilizes several technologies to facilitate the deployment of resources on AWS with Terraform. These technologies include:
1. Docker: Docker is a containerization platform that allows for the creation and management of lightweight, isolated environments called containers. In the context of the starter kit, Docker is used to provide a consistent and reproducible development environment. It helps ensure that the required dependencies and tools are readily available without conflicts.
2. Makefile: Makefile is a build automation tool that is commonly used to define and execute tasks in software development projects. In the context of the starter kit, Makefile is used to define and automate common tasks such as initializing the project, deploying resources, running tests, and cleaning up.
3. Jinja: Jinja is a powerful templating engine for Python. In the context of the StarterKit, Jinja is used for templating purposes to generate configuration files or scripts dynamically. It allows for the inclusion of variables, conditionals, loops, and other programming constructs within templates, enabling dynamic generation of Terraform configurations based on user-defined parameters.
These technologies work together to provide an efficient and streamlined workflow for deploying resources on AWS using Terraform. Docker ensures consistent and isolated environments, the Makefile automates common tasks, and Jinja enables flexible and dynamic templating.
## Objectives and Benefits
Here are some key objectives and benefits of an AWS StarterKit:
1. **Accelerating AWS adoption:** By providing a ready-to-use StarterKit, users can quickly onboard AWS and start deploying infrastructure resources without spending significant time on initial setup and configuration.
2. **Simplifying Terraform usage:** The StarterKit abstracts away some of the complexities and provides a simplified interface for provisioning resources (via Makefile), making it easier for users to get up and running with Terraform.
3. **Standardized infrastructure:** The StarterKit promotes standardization and consistency in infrastructure deployments. It enforces best practices, naming conventions, and predefined configurations, ensuring that resources are provisioned in a consistent and reliable manner.
4. **Modularity and reusability:** A well-designed StarterKit encourages modular and reusable infrastructure code. It may include modules or templates that can be easily customized and extended to meet specific requirements, promoting code reusability and reducing duplication.
5. **Security and compliance:** The StarterKit can include security configurations and guidelines to ensure that the provisioned resources adhere to AWS security best practices. It may also provide compliance-focused templates to help users meet specific regulatory requirements.
6. **Documentation and guidance:** Along with the StarterKit, documentation and guidance materials are often provided to assist users in understanding the tool's features, how to customize configurations, and how to troubleshoot common issues.
Overall, an AWS StarterKit aims to streamline the process of deploying resources on AWS using Terraform, reducing the learning curve and accelerating the time to value for users. It provides a solid foundation and best practices to help users start their infrastructure-as-code journey on AWS with confidence.
## pre-requisite
To install the StarterKit, several tools are required on the user's computer:
1. **Docker**: [Docker](https://docs.docker.com/engine/install/) is a platform that allows you to package, distribute, and run applications in isolated containers. It provides a consistent environment for running the starter kit's components and dependencies.
2. **Docker Compose Plugin**: [Docker Compose Plugin](https://docs.docker.com/compose/install/linux/) is an extension for Docker that simplifies the management of multi-container applications. It enables you to define and run multi-container setups required by the starter kit.
3. **Makefile**: [Makefile](https://www.gnu.org/software/make/manual/make.html) is a build automation tool that helps manage and organize complex workflows. In the context of the starter kit, Makefile provides a convenient way to define and execute common tasks and commands for setting up and deploying AWS resources.
4. **AWS CLI**: [AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html) (Command Line Interface) is a unified tool for interacting with various AWS services through the command line. It allows you to configure your AWS credentials, manage resources, and automate tasks required by the starter kit.
5. **jq**: [jq](https://jqlang.github.io/jq/) is a lightweight and flexible command-line JSON processor. It enables you to manipulate and extract data from JSON files and API responses, which can be useful for processing and transforming data within the starter kit.
6. **Git**: [Git](https://jqlang.github.io/jq/) is a distributed version control system used for tracking changes in source code during software development.
By having these tools installed, users can seamlessly set up and utilize the StarterKit for deploying resources on AWS with Terraform.
### Initialize a new project
#### Step 1
To start using the StarterKit, follow these steps:
1. Create a new folder on your computer. You can choose a suitable location for the folder based on your preference.
2. Download or clone the StarterKit repository to your local machine.
3. Locate the "terraform" folder within the StarterKit repository.
4. Copy or move the "terraform" folder into the newly created folder on your computer.
By completing these steps, you will have the necessary "terraform" folder from the StarterKit in your designated folder.
#### Step 2
Launch the `get-starter-kit.sh` script from the `terraform` folder, you need to set the version of the StarterKit you want to use and provide Git HTTP credentials to download the StarterKit. Here's how you can do it:
1. Open the `get-starter-kit.sh` script in a text editor.
2. Look for a variable or section in the script where you can set the version of the StarterKit. Update the variable or field with the desired version. It might look like this:
```bash
# Set the version of the StarterKit
STARTER_KIT_VERSION="v1.2.3"
```
Replace `v1.2.3` with the specific version you want to use.
3. Next, you'll need to set the Git HTTP credentials for downloading the StarterKit. Look for a section in the script where you can specify the credentials. It may resemble the following:
```bash
# Set GitLab HTTP token
GITLAB_TOKEN="your_gitlab_http_token"
```
4. Save the changes to the `get-starter-kit.sh` script.
5. Open a terminal or command prompt and navigate to the `terraform` folder.
6. Run the `get-starter-kit.sh` script by executing the following command:
```bash
./get-starter-kit.sh
```
This will execute the script and initiate the process of downloading the specified version of the StarterKit using the provided Git HTTP Token.
Ensure that you have the necessary permissions and access rights to download the StarterKit from the Git repository.
#### Step 3
1. Locate the `configure.yaml.dist` file in the StarterKit directory.
2. Make a copy of the `configure.yaml.dist` file and rename it as `configure.yaml`. You can do this by executing the following command in the terminal or command prompt:
```bash
cp configure.yaml.dist configure.yaml
```
This creates a new file named `configure.yaml` with the same content as `configure.yaml.dist`.
3. Open the `configure.yaml` file in a text editor.
4. Inside the `configure.yaml` file, you'll find various parameters that need to be updated according to your requirements.
5. Update the parameters in the `configure.yaml` file with your desired values. Make sure to follow the instructions or comments provided in the file to correctly configure each parameter.
6. Save the changes to the `configure.yaml` file.
#### Step4
[Export AWS credentials as environment variables in your path.](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-envvars.html)
1. Open a terminal or command prompt.
2. Set the AWS access key ID as an environment variable by executing the following command:
```bash
export AWS_ACCESS_KEY_ID="your_access_key_id"
```
Replace `"your_access_key_id"` with your actual AWS access key ID.
3. Set the AWS secret access key as an environment variable by executing the following command:
```bash
export AWS_SECRET_ACCESS_KEY="your_secret_access_key"
```
Replace `"your_secret_access_key"` with your actual AWS secret access key.
4. (Optional) If you have an AWS session token, you can set it as an environment variable by executing the following command:
```bash
export AWS_SESSION_TOKEN="your_session_token"
```
Replace `"your_session_token"` with your actual AWS session token.
5. (Optional) If you have an AWS Default Region, you can set it as an environment variable by executing the following command:
```bash
export AWS_REGION="your_session_region"
```
Replace `"your_session_region"` with your actual AWS Region parameter.
By setting these environment variables, AWS CLI and other AWS-related tools will be able to access your AWS credentials from the environment, allowing you to interact with AWS services using those credentials.
**Please note that exporting credentials as environment variables may not be the most secure method, especially in shared environments.**
### Start the project
Once you have set your AWS credentials in the path and modified the `configure.yaml` file to fit your needs, you can start the StarterKit using the command `make start`. This command will execute the `make init` command, which performs the following steps:
1. It converts the `configure.yaml` file to a `.env` file. The `.env` file is used to store environment variables required by the StarterKit.
2. It generates a new `Makefile` based on the `configure.yaml` file. The `Makefile` contains predefined targets and commands that can be executed using the `make` command.
3. It generates a new `gitlab-ci.yml` file by leveraging the information present in the .env file and using a GitLab CI Jinja template file as a blueprint. The resulting gitlab-ci.yml file will reflect the specific configurations and values provided in the .env file, allowing for a customized and automated setup of your GitLab CI pipeline.
By executing `make start`, the StarterKit will be initialized with the provided configuration, and you can proceed with deploying resources on AWS using Terraform.
It's important to note that if your AWS credentials expire or change, you need to update the credentials in the environment variables or the AWS CLI configuration and then restart the StarterKit by running `make start` again. This ensures that the StarterKit uses the updated credentials for all AWS operations.
#### Help
To get help and list all the available commands in the StarterKit, you can use the `make help` command. This command will display the available targets and their descriptions from the `Makefile`. Here's how you can use it:
1. Open a terminal or command prompt.
2. Navigate to the StarterKit directory.
3. Run the following command:
```bash
make help
```
This will display the list of available targets and their descriptions, providing you with information about the available commands and their purposes.
Additionally, you can open the `Makefile` in a text editor to explore and understand the various targets and commands defined in it. The `Makefile` contains rules that define how the StarterKit is built, executed, and managed using the `make` command.
By using `make help` and referring to the `Makefile`, you can gain a better understanding of the available commands and utilize them effectively in your StarterKit workflow.
### Usage
Every available commands are described in the make file. Use the `make help` command to get all available commands.
```bash
help This help.
debug Print debug logs
generate Generate from template gitlab-ci.yml and Makefile
generate_makefile Generate Makefile
generate_gitlab_ci Generate GitlabCI
start Stop the project
stop Start the project
down stop containers
kill Destroy all containers
console Connect Terraform Docker
logout Remove assumed role
drfit Detect Drift
precommit Launch precommit hooks
dotenv_linter Lint dotenv files
markdown_lint Lint Markdown files files
shell_lint Lint shell files
yaml_lint Lint yaml files
trivy Terraform Trivy
assume-role Assume the given role
dashboard Launch Terradash on localhost:8080
quality-checks run quality checks
format Format all Terraform files using "terraform fmt"
validate Validate all Terraform files using "terraform validate"
lint Check that good naming practices are respected in Terraform files (using tflint)
plan_compute Plan AWS compute layer
install_compute Install AWS compute layer
delete_compute Uninstall AWS compute layer
plan_all Plan all AWS layers
install_all Install all AWS layers
delete_all Uninstall all layers
```
### Add a new Plan
To add a new Terraform plan to the project, you can follow these steps:
1. Open the `configure.yaml` file in a text editor. This file should be located in the project directory.
2. Locate the `plans` key in the `configure.yaml` file. This key contains a list of plan names.
3. Add the name of the new plan to the `plans` list. Each plan name should be a string.
For example, if you want to add a plan named "new-plan", the entry would look like:
```yaml
plans:
- plan1
- plan2
- new-plan
```
4. Save the `configure.yaml` file after adding the new plan name.
After adding the new Terraform plan to the `configure.yaml` file, you can relaunch the starter kit by executing the `make start` command.
This command will generate the necessary templates based on the newly added plan. Here are the steps:
1. Open a terminal or command prompt in the project directory.
2. Run the following command to start the starter kit:
```bash
make start
```
This command will trigger the execution of the StarterKit's Makefile, which includes the logic to generate the required templates.
3. The starter kit will read the `configure.yaml` file, identify the added plan, and generate the corresponding templates or configuration files based on the plan's specifications.
4. Once the process completes, you can proceed with using the generated templates for your Terraform deployment.
### Add a new tool
#### New Service in Docker
1. Open the `docker-compose-tools.yaml` file located in the project's directory.
2. Inside the file, you will find a list of services defined under the `services` section. Each service represents a specific tool or component used in the starter kit.
3. To add a new tool, you can either use an existing Docker community image or create your own Dockerfile.
- Using an existing Docker community image: Find the appropriate image for the tool you want to add on the [Docker Hub](https://hub.docker.com/). Copy the image name and version tag.
- Creating your own Dockerfile: If you prefer to create your own Dockerfile, you can place it in the `automation` folder of the project. Make sure to include the necessary instructions to build the Docker image.
4. Add a new service definition in the `docker-compose-tools.yaml` file for your tool. Follow the existing service definitions as a reference.
- If using an existing Docker community image, you can use the `image` property to specify the image name and version.
- If using a custom Dockerfile, you can use the `build` property to specify the path to the Dockerfile.
Customize other properties such as the container name, volumes, environment variables, and any additional configurations specific to the tool you are adding.
5. Save the `docker-compose-tools.yaml` file after adding the new service definition.
#### New command in Makefile
If the command you want to add to the starter kit does not depend on the configure.yaml file, you can directly add it to
the Makefile. However, if the command requires some dynamic configuration based on the configure.yaml file, you should add it to
the Jinja template in the automation folder.
## Tips
[Rebase from a fork repository](https://levelup.gitconnected.com/how-to-update-fork-repo-from-original-repo-b853387dd471)
Launch makefile without stopping on errors `make -k cmd` useful for the `quality-checks` target.
After adding a new Terraform Plan, launch the `make start` to update the `Makefile` and `.gitlab-ci.yml` file.
## Tools
- [TFEnv](https://github.com/tfutils/tfenv)
tfenv is a version manager specifically designed for Terraform. It provides a simple and convenient way to manage multiple
versions of Terraform on a single machine. With tfenv, you can easily switch between different versions of Terraform based on your project's requirements.
Using tfenv, you can install and manage multiple versions of Terraform side by side, ensuring compatibility with different projects or environments.
It allows you to easily switch between versions with a single command, making it effortless to work on different projects that may require different
versions of Terraform.
1. Create a file named `.terraform-version` in the root directory of your Terraform project.
2. Inside the `.terraform-version` file, specify the desired version of Terraform you want to use for your project. For example, you can write `0.15.4` to indicate that you want to use version 0.15.4.
3. Install tfenv on your machine if you haven't done so already. You can refer to the tfenv documentation for installation instructions specific to your operating system.
4. Once tfenv is installed, navigate to the root directory of your Terraform project using the command line.
5. Run the following command to let tfenv detect and switch to the version specified in the `.terraform-version` file
`tfenv install`
This command will check the `.terraform-version` file and automatically install the specified version of Terraform if it's not already installed.
If the desired version is already installed, tfenv will switch to that version.
6. Verify that the correct version of Terraform is now in use by running:
`terraform version`
You should see the version specified in the `.terraform-version` file displayed in the output.
By using the `.terraform-version` file, tfenv makes it easy to ensure that the correct version of Terraform is used for each specific project or directory. It simplifies the management of Terraform versions and helps maintain consistency across different projects.
If you have any further questions or need additional assistance, feel free to ask!
- [Trivy](https://github.com/aquasecurity/trivy)
Trivy is a security vulnerability scanner and open-source tool designed for container and application security. It helps
in identifying vulnerabilities in container images, as well as in software dependencies used by applications.
Trivy scans container images and provides detailed reports on any known vulnerabilities
found in the operating system packages, libraries, and other components.
It supports various platforms and package managers, making it a valuable tool for developers, DevOps teams, and security
professionals to proactively identify and address security risks in their containerized environments.
- [Pre-commit](https://pre-commit.com/)
Pre-commit is a lightweight and highly customizable framework used for setting up and enforcing code quality checks and
pre-commit hooks in software development projects. It allows developers to define a set of hooks that automatically run
before committing their code changes, ensuring that certain checks and validations are performed. These checks can
include formatting code, linting, running tests, and more.
Pre-commit helps maintain code consistency, improves code quality, and catches potential issues early in the development
process, enhancing collaboration and reducing the likelihood of introducing bugs into the codebase.
- [pre-commit-terraform](https://github.com/antonbabenko/pre-commit-terraform).
- terraform_validate
- terraform_fmt
- terraform_docs
- terrascan
- terraform_tflint
- terraform_tfsec
- [Terradocs](https://github.com/terraform-docs/terraform-docs)
Terradocs is a documentation generation tool specifically designed for Terraform configurations. It analyzes Terraform
code and generates comprehensive documentation that helps users understand and visualize the infrastructure and
resources defined in their Terraform projects. Terradocs extracts information from the Terraform configuration files,
such as resource definitions, variable descriptions, input and output values, and outputs them in a user-friendly format.
Terradocs provides an automated way to keep documentation in sync with the Terraform codebase, making it easier to
maintain and share up-to-date documentation. It enhances collaboration among team members by providing clear and
structured documentation that can be easily understood by developers, operators, and stakeholders.
- [terraform_tflint](https://github.com/terraform-linters/tflint)
Terraform linter is a static analysis tool used to enforce coding best practices, maintain consistency, and identify
potential issues in Terraform code. It analyzes the Terraform configuration files and provides feedback on code quality,
style violations, and potential errors or misconfigurations.
By using a set of predefined rules or custom configurations, the Terraform linter can catch common mistakes, deprecated
syntax, unused variables, and other code smells. It helps developers write cleaner and more reliable Terraform code by
highlighting problematic areas and suggesting improvements.
- [terrascan](https://runterrascan.io/)
Terrascan is a static code analysis tool designed specifically for Terraform configurations. It helps identify potential
security vulnerabilities, compliance violations, and misconfigurations in Terraform code. Terrascan scans the Terraform
files and compares them against a set of predefined security policies and best practices.
By leveraging the power of static analysis, Terrascan can detect security risks such as overly permissive IAM policies,
insecure storage configurations, and unencrypted sensitive data. It provides detailed reports that highlight the specific
vulnerabilities and non-compliant configurations found in the code.
- [tfsec](https://github.com/aquasecurity/tfsec)
tfsec is a security scanner and static analysis tool specifically built for Terraform code. It helps identify potential
security risks, best practice violations, and misconfigurations in Terraform configurations.
tfsec analyzes the Terraform code and provides feedback on security-related issues, allowing developers to proactively
address them.
By scanning the Terraform files, tfsec checks for common security vulnerabilities such as open security group rules,
missing encryption settings, and insecure access control configurations. It provides detailed reports and recommendations
to help developers remediate security issues and ensure a more robust and secure infrastructure.
- [dotenv linter](https://github.com/dotenv-linter/dotenv-linter)
dotenv-linter is a tool used for linting and validating .env files. It helps ensure that environment variable files are
well-formatted and adhere to best practices. dotenv-linter scans .env files and provides feedback on potential issues
and inconsistencies in the file content.
By using dotenv-linter, developers can catch common mistakes such as missing or duplicated keys, invalid or incomplete
values, and improper formatting. It enforces guidelines for maintaining clean and error-free .env files, making
it easier to manage environment configurations across different environments and deployments.
- [markdown_lint](https://github.com/markdownlint/markdownlint)
markdownlint is a linter and style checker specifically designed for Markdown documents. It helps ensure consistency,
readability, and adherence to best practices in Markdown files. markdownlint analyzes Markdown content and provides
feedback on formatting, style violations, and potential errors.
By using markdownlint, developers and writers can catch common mistakes such as inconsistent heading levels,
trailing spaces, excessive line lengths, and incorrect link formatting. It enforces guidelines for maintaining
well-structured and visually appealing Markdown documents, improving the overall quality of documentation and written content.
- [shell_lint](https://www.shellcheck.net/)
shellcheck is a static analysis tool used to lint and validate shell scripts. It helps identify potential issues, errors,
and best practice violations in shell scripts. shellcheck analyzes shell script files and provides feedback on code quality,
potential bugs, and security vulnerabilities.
By using shellcheck, developers can catch common mistakes such as syntax errors, undefined variables, incorrect command usage,
and unsafe code patterns. It enforces guidelines for writing robust and portable shell scripts, ensuring better code reliability and maintainability.
- [yaml_lint](https://www.yamllint.com/)
yamllint is a linter and validator specifically designed for YAML files. It helps ensure the correctness, consistency,
and adherence to best practices in YAML files. yamllint analyzes YAML content and provides feedback on syntax errors, formatting issues,
and potential problems.
By using yamllint, developers and configuration authors can catch common mistakes such as indentation errors, incorrect syntax,
duplicate keys, and inconsistent formatting. It enforces guidelines for maintaining clean and error-free YAML files, improving the overall
quality and reliability of configuration files.
- [powershell_lint](https://github.com/cypher0n3/psscriptanalyzer-docker)
PowerShell Script Analyzer (PSScriptAnalyzer) is a static analysis tool specifically built for PowerShell scripts.
It helps identify potential issues, coding style violations, and best practice violations in PowerShell scripts.
PSScriptAnalyzer analyzes PowerShell script files and provides feedback on code quality, potential bugs, and maintainability.
By using PSScriptAnalyzer, developers can catch common mistakes such as syntax errors, undefined variables,
unused variables, incorrect parameter usage, and overall script quality.
- [Terraform docs](https://terraform-docs.io/)
- Terraform Docs is a tool used for linting and validating Terraform module documentation. It helps ensure that module
documentation is accurate, well-formatted, and follows best practices. Terraform Docs analyzes the documentation
files associated with Terraform modules and provides feedback on formatting, content, and potential issues.
By using Terraform Docs, developers can catch common mistakes such as missing or incomplete descriptions, inconsistent
formatting, and incorrect usage examples. It enforces guidelines for maintaining clear, concise, and informative module
documentation, improving the overall quality of module documentation and enhancing collaboration among team members.

View file

@ -0,0 +1,10 @@
#ts:skip=AC_DOCKER_0041 Skip
ARG DRIFTCTL_IMAGE_TAG
FROM snyk/driftctl:${DRIFTCTL_IMAGE_TAG}
RUN apk add --no-cache aws-cli
RUN apk add --no-cache jq
USER app

25
automation/Driftctl/drifctl.sh Executable file
View file

@ -0,0 +1,25 @@
#!/usr/bin/env sh
#
# // Software Name : AWSTerraformStarterKit
# // SPDX-FileCopyrightText: Copyright (c) 2023 Orange Business
# // SPDX-License-Identifier: BSD-3-Clause
# //
# // This software is distributed under the BSD License;
# // see the LICENSE file for more details.
# //
# // Author: AWS Practice Team <awspractice.core@orange.com>
#
set -e
apk add --no-cache aws-cli
apk add --no-cache jq
printenv
aws sts get-caller-identity
STS_CREDS=$(aws sts assume-role --role-arn "${ROLE_TO_ASSUME}" --role-session-name "${AWS_ROLE_SESSION_NAME}")
unset AWS_ACCESS_KEY_ID AWS_SECRET_ACCESS_KEY AWS_SESSION_TOKEN
AWS_ACCESS_KEY_ID=$(echo "$STS_CREDS" | jq -r '.Credentials.AccessKeyId')
AWS_SECRET_ACCESS_KEY=$(echo "$STS_CREDS" | jq -r '.Credentials.SecretAccessKey')
AWS_SESSION_TOKEN=$(echo "$STS_CREDS" | jq -r '.Credentials.SessionToken')
export AWS_ACCESS_KEY_ID AWS_SECRET_ACCESS_KEY AWS_SESSION_TOKEN
aws sts get-caller-identity
driftctl scan --only-managed --from tfstate+s3://"${BACKEND_BUCKET_NAME}"/*.tfstate

View file

@ -0,0 +1,15 @@
#!/usr/bin/env sh
#
# // Software Name : AWSTerraformStarterKit
# // SPDX-FileCopyrightText: Copyright (c) 2023 Orange Business
# // SPDX-License-Identifier: BSD-3-Clause
# //
# // This software is distributed under the BSD License;
# // see the LICENSE file for more details.
# //
# // Author: AWS Practice Team <awspractice.core@orange.com>
#
set -e
sed -i '/# Automatic Content Generated/Q' Makefile
sed -i '${/^[[:space:]]*$/d;}' Makefile

View file

@ -0,0 +1,32 @@
ARG BASH_VERSION
FROM "docker.io/bash:${BASH_VERSION}"
# Runtime dependencies
RUN apk add --no-cache --purge \
curl \
python3 py3-pip \
git \
wget \
;
ARG TFENV_VERSION
RUN wget -O /tmp/tfenv.tar.gz "https://github.com/tfutils/tfenv/archive/refs/tags/v${TFENV_VERSION}.tar.gz" \
&& tar -C /tmp -xf /tmp/tfenv.tar.gz \
&& mv "/tmp/tfenv-${TFENV_VERSION}/bin"/* /usr/local/bin/ \
&& mkdir -p /usr/local/lib/tfenv \
&& mv "/tmp/tfenv-${TFENV_VERSION}/lib" /usr/local/lib/tfenv/ \
&& mv "/tmp/tfenv-${TFENV_VERSION}/libexec" /usr/local/lib/tfenv/ \
&& mkdir -p /usr/local/share/licenses \
&& mv "/tmp/tfenv-${TFENV_VERSION}/LICENSE" /usr/local/share/licenses/tfenv \
&& rm -rf /tmp/tfenv* \
&& mkdir /var/tfenv \
&& chmod 777 /var/tfenv \
;
ENV TFENV_ROOT /usr/local/lib/tfenv
ENV TFENV_CONFIG_DIR /var/tfenv
VOLUME /var/tfenv
ENTRYPOINT ["/usr/local/bin/terraform"]

View file

@ -0,0 +1,16 @@
FROM alpine:3.18.0
ARG TFLINT_VERSION TFLINT_OS TFLINT_ARCH
RUN apk add --no-curl --purge wget
RUN wget -O /tmp/tflint.zip https://github.com/terraform-linters/tflint/releases/download/v${TFLINT_VERSION}/tflint_${TFLINT_OS}_${TFLINT_ARCH}.zip \
&& unzip /tmp/tflint.zip \
&& mv tflint /usr/local/bin \
&& chmod +x /usr/local/bin/tflint \
&& rm /tmp/tflint* \
&& mkdir /workdir \
&& chmod 777 /workdir
ENTRYPOINT ["tflint"]
WORKDIR /workdir

View file

@ -0,0 +1,16 @@
FROM alpine:3.18.0
ARG TERRAFORM_DOCS_VERSION TERRAFORM_DOCS_OS TERRAFORM_DOCS_ARCH
RUN apk add --no-cache --purge wget
RUN wget -O /tmp/terraform-docs.tar.gz https://github.com/terraform-docs/terraform-docs/releases/download/v${TERRAFORM_DOCS_VERSION}/terraform-docs-v${TERRAFORM_DOCS_VERSION}-${TERRAFORM_DOCS_OS}-${TERRAFORM_DOCS_ARCH}.tar.gz \
&& tar -xvzf /tmp/terraform-docs.tar.gz \
&& mv terraform-docs /usr/local/bin \
&& chmod +x /usr/local/bin/terraform-docs \
&& rm /tmp/terraform-docs* \
&& mkdir /workdir \
&& chmod 777 /workdir
ENTRYPOINT ["terraform-docs"]
WORKDIR /workdir

View file

@ -0,0 +1,26 @@
#ts:skip=AC_DOCKER_0041 Skip
ARG IMAGE_VERSION=3.11
FROM alpine:${IMAGE_VERSION}
# Install python3 and other deps
RUN apk add --no-cache python3
RUN pip3 --no-cache-dir install pip==22.2.2 --no-cache
# Set needed env vars
ENV SCRIPTS_DIR /scripts
ENV TEMPLATES_DIR /templates
ENV VARIABLES_DIR /variables
# Create folders
RUN mkdir /templates/
RUN mkdir /variables/
RUN mkdir /scripts/
RUN pip3 --no-cache-dir install jinja2-cli[yaml,toml,xml]==0.8.2
RUN pip3 --no-cache-dir install Jinja2==3.1.2
# Copy extra scripts: embedded render
COPY ./render.py $SCRIPTS_DIR/entities/render.py
RUN chmod +x $SCRIPTS_DIR/entities/render.py
ENTRYPOINT ["jinja2"]

68
automation/jinja2/render.py Executable file
View file

@ -0,0 +1,68 @@
#!/usr/bin/env python3
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
"""
Software Name : AWSTerraformStarterKit
SPDX-FileCopyrightText: Copyright (c) 2023 Orange Business
SPDX-License-Identifier: BSD-3-Clause
This software is distributed under the BSD License;
see the LICENSE file for more details.
Author: AWS Practice Team <awspractice.core@orange.com>
"""
import os
import sys
import jinja2
import yaml
class Render:
TEMPLATES_DIR = os.environ.get('TEMPLATES_DIR') if os.environ.get('TEMPLATES_DIR') is not None else "/templates"
def __init__(self, template_name=None, variables_path=None):
self.template_name = template_name
self.variables_path = variables_path
self.env = jinja2.Environment(
loader=jinja2.FileSystemLoader(self.TEMPLATES_DIR),
extensions=['jinja2.ext.loopcontrols'],
autoescape=True,
lstrip_blocks=True,
newline_sequence="\n",
trim_blocks=True,
keep_trailing_newline=True
)
def yaml_filter(value):
return yaml.dump(value, Dumper=yaml.RoundTripDumper, indent=4)
def rend_template(self):
with open(self.variables_path, closefd=True) as f:
data = yaml.full_load(f)
self.env.filters['yaml'] = self.yaml_filter
self.env.globals["environ"] = lambda key: os.environ.get(key)
try:
rendered = self.env.get_template(self.template_name).render(data)
except Exception as e:
raise e
return rendered
def main():
min_args = 3
if len(sys.argv) < min_args:
raise Exception(
"Error: Expecting at least {} args. Got {}, args={}".format(min_args, len(sys.argv), sys.argv))
sys.stdout.write(Render(sys.argv[1], sys.argv[2]).rend_template())
if __name__ == '__main__':
main()

View file

@ -0,0 +1,94 @@
########################################################################################################################
# ENV Variables used for local development
# Use the command $make dotenv_linter to validate this file
########################################################################################################################
COMPOSE_PROJECT_NAME={{ COMPOSE_PROJECT_NAME }}
PROJECT_NAME={{ PROJECT_NAME }}
REGION={{ REGION }}
AWS_DEFAULT_REGION={{ REGION }}
# Name of the output of the terraform plan
PLAN_BINARY_FILE={{ PLAN_BINARY_FILE }}
PLAN_JSON_FILE={{ PLAN_JSON_FILE }}
########################################################################################################################
# GITLAB CI
# Use to validate the .gitlab-ci.yml file with the command $make gitlab_linter
########################################################################################################################
GENERATE_GITLAB_CI={{ GENERATE_GITLAB_CI | default(False) }}
{% if GITLAB_API_URL %}
GITLAB_API_URL={{ GITLAB_API_URL }}
{% endif %}
{% if GITLAB_TOKEN %}
# https://docs.gitlab.com/ee/user/project/settings/project_access_tokens.html
# SCOPE api and read_api
# Create a token for each project
GITLAB_TOKEN={{ GITLAB_TOKEN }}
{% endif %}
{% if CICD_RUNNER_TAGS %}
# Gitlab-ci runner tags, seperate tags by comma (,)
CICD_RUNNER_TAGS={{ CICD_RUNNER_TAGS | join(',') }}
{% endif %}
{% if CICD_ROLE_NAME %}
CICD_ROLE_NAME={{ CICD_ROLE_NAME }}
{% endif %}
########################################################################################################################
# Docker Compose image tags to use
########################################################################################################################
TFENV_IMAGE_TAG={{ TFENV_IMAGE_TAG }}
PRECOMMIT_IMAGE_TAG={{ PRECOMMIT_IMAGE_TAG }}
TFLINT_IMAGE_TAG={{ TFLINT_IMAGE_TAG }}
DRIFTCTL_IMAGE_TAG={{ DRIFTCTL_IMAGE_TAG }}
DOTENV_LINTER_IMAGE_TAG={{ DRIFTCTL_IMAGE_TAG }}
MARKDOWN_LINTER_IMAGE_TAG={{ MARKDOWN_LINTER_IMAGE_TAG }}
SHELL_LINTER_IMAGE_TAG={{ SHELL_LINTER_IMAGE_TAG }}
YAML_LINTER_IMAGE_TAG={{ YAML_LINTER_IMAGE_TAG }}
POWERSHELL_LINTER_IMAGE_TAG={{ POWERSHELL_LINTER_IMAGE_TAG }}
JSON_LINTER_IMAGE_TAG={{ JSON_LINTER_IMAGE_TAG }}
TRIVY_IMAGE_TAG={{ TRIVY_IMAGE_TAG }}
TERRASCAN_IMAGE_TAG={{ TERRASCAN_IMAGE_TAG }}
########################################################################################################################
# Terraform Logs
########################################################################################################################
TF_LOG={{ TF_LOG }}
TF_LOG_PATH={{ TF_LOG_PATH }}
########################################################################################################################
# Terraform S3 Backend Configuration
########################################################################################################################
TF_VAR_backend_bucket_name={{ TF_VAR_backend_bucket_name }}
TF_VAR_backend_bucket_region={{ TF_VAR_backend_bucket_region }}
TF_VAR_backend_dynamodb_table={{ TF_VAR_backend_dynamodb_table }}
TF_VAR_backend_bucket_access_role={{ TF_VAR_backend_bucket_access_role }}
########################################################################################################################
# LOCAL DEVELOPMENT
########################################################################################################################
LOCAL_ROLE_NAME={{ LOCAL_ROLE_NAME }}
TF_VAR_assume_role={{ TF_VAR_assume_role }}
########################################################################################################################
# PLANS
########################################################################################################################
# List the folder for each Terraform Plan to install, respect the order of installation
PLAN_INSTALL_LIST={{ plans | join(',') }}
# List the folder for each Terraform Plan to uninstall, respect the order of uninstallation
PLAN_DELETE_LIST={{ plans | reverse | join(',') }}
# AWS Account ID in which your plan are deployed and containing the backend bucket
ACCOUNT_ID={{ ACCOUNT_ID }}
########################################################################################################################
# PROXY
########################################################################################################################
HTTP_PROXY={{ HTTP_PROXY }}
HTTPS_PROXY={{ HTTPS_PROXY }}
MY_UID={{ MY_UID }}
MY_GID={{ MY_UID }}

View file

@ -0,0 +1,468 @@
---
variables:
PHASE: BUILD
CICD_MODE: "true"
GIT_SUBMODULE_STRATEGY: "recursive"
PRINT_DEBUG: "false"
{% if TF_TOKEN_MODULE_ACCESS %}
{{ TF_TOKEN_MODULE_ACCESS_KEY }}: {{ TF_TOKEN_MODULE_ACCESS_VALUE }}
{% endif %}
# Docker Image Tag
TFLINT_IMAGE_TAG: {{ environ('TFLINT_IMAGE_TAG') }}
PRECOMMIT_IMAGE_TAG: {{ environ('PRECOMMIT_IMAGE_TAG') }}
TFENV_IMAGE_TAG: {{ environ('TFENV_IMAGE_TAG') }}
DRIFTCTL_IMAGE_TAG: {{ environ('DRIFTCTL_IMAGE_TAG') }}
# NEED TO BE CHANGED FOR EACH PROJECT
PROJECT_NAME: {{ environ('PROJECT_NAME') }}
REGION: {{ environ('REGION') }}
TF_VAR_assume_role: {{ environ('CICD_ROLE_NAME') }}
TF_VAR_backend_bucket_name: {{ environ('TF_VAR_backend_bucket_name') }}
TF_VAR_backend_bucket_region: {{ environ('REGION') }}
TF_VAR_backend_dynamodb_table: {{ environ('TF_VAR_backend_dynamodb_table')}}
TF_VAR_backend_bucket_access_role: "arn:aws:iam::{{ environ('ACCOUNT_ID') }}:role/{{ environ('CICD_ROLE_NAME') }}"
PLAN_BINARY_FILE: {{ environ('PLAN_BINARY_FILE') }}
PLAN_JSON_FILE: {{ environ('PLAN_JSON_FILE') }}
ACCOUNT_ID: {{ environ('ACCOUNT_ID') }}
CICD_ROLE_NAME: {{ environ('CICD_ROLE_NAME') }}
{%- set plans_install = environ('PLAN_INSTALL_LIST').split(',') -%}
{%- set plans_delete = environ('PLAN_DELETE_LIST').split(',') %}
{%- set runners = environ('CICD_RUNNER_TAGS').split(',') %}
cache:
key: $CI_COMMIT_REF_SLUG
paths:
- creds.env
stages:
- aws-creds
- quality-checks
- drift
- plan
- tests
- apply
- delete
- clean-cache
########################################################################################################################
# FUNCTIONS
########################################################################################################################
.aws-cli:
before_script:
- yum install -y jq
image:
name: amazon/aws-cli:latest
entrypoint:
- '/usr/bin/env'
- 'PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin'
tags:
{% for runner in runners %}
- {{ runner }}
{% endfor %}
.aws_get_creds: &aws_get_creds
script: |
#!/usr/bin/env bash
echo "Getting temporary credentials associated to assume role"
STS_CREDS=$(aws sts assume-role --role-arn arn:aws:iam::903534291474:role/Vocalcom-CiCd-CrossAccountRole --role-session-name ${CI_COMMIT_SHA})
AWS_ACCESS_KEY_ID=$(echo $STS_CREDS | jq -r '.Credentials.AccessKeyId')
AWS_SECRET_ACCESS_KEY=$(echo $STS_CREDS | jq -r '.Credentials.SecretAccessKey')
AWS_SESSION_TOKEN=$(echo $STS_CREDS | jq -r '.Credentials.SessionToken')
echo "export AWS_ACCESS_KEY_ID=$AWS_ACCESS_KEY_ID" > creds.env
echo "export AWS_SECRET_ACCESS_KEY=$AWS_SECRET_ACCESS_KEY" >> creds.env
echo "export AWS_SESSION_TOKEN=$AWS_SESSION_TOKEN" >> creds.env
.terraform-base:
before_script:
- rm -rf .terraform
image:
name: marmarama/tfenv:$TFENV_IMAGE_TAG
entrypoint:
- '/usr/bin/env'
- 'PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/opt/tfenv/bin/'
tags:
{% for runner in runners %}
- {{ runner }}
{% endfor %}
{% if GITLAB_JOBS["terraform-trivy"] %}
.terraform-trivy:
before_script:
- rm -rf .terraform
- . ./creds.env
- apk --no-cache --update add make
image:
name: aquasec/trivy:latest
entrypoint:
- '/usr/bin/env'
- 'PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/opt/tfenv/bin/'
tags:
{% for runner in runners %}
- {{ runner }}
{% endfor %}
{% endif %}
{% if GITLAB_JOBS["terraform-lint"] %}
.terraform-lint:
image:
name: ghcr.io/terraform-linters/tflint:${TFLINT_IMAGE_TAG}
entrypoint:
- '/usr/bin/env'
- 'PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin'
before_script:
- apk --no-cache --update add make
tags:
{% for runner in runners %}
- {{ runner }}
{% endfor %}
{% endif %}
{% if GITLAB_JOBS["shell-lint"] %}
.shelllint:
image:
name: pipelinecomponents/shellcheck:latest
entrypoint:
- '/usr/bin/env'
- 'PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin'
tags:
{% for runner in runners %}
- {{ runner }}
{% endfor %}
{% endif %}
{% if GITLAB_JOBS["precommit"] %}
.precommit:
image:
name: ghcr.io/antonbabenko/pre-commit-terraform:${PRECOMMIT_IMAGE_TAG}
entrypoint:
- '/usr/bin/env'
- 'PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin'
tags:
{% for runner in runners %}
- {{ runner }}
{% endfor %}
{% endif %}
{% if GITLAB_JOBS["yaml-lint"] %}
.yamllint:
image:
name: pipelinecomponents/yamllint:latest
entrypoint:
- '/usr/bin/env'
- 'PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin'
before_script:
- apk --no-cache --update add make
tags:
{% for runner in runners %}
- {{ runner }}
{% endfor %}
{% endif %}
{% if GITLAB_JOBS["terraform-compliance"] %}
.terraform-compliance:
image:
name: eerkunt/terraform-compliance:latest
entrypoint:
- '/usr/bin/env'
- 'PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin'
before_script:
- apt-get update && apt-get install -y make
- . ./creds.env
tags:
{% for runner in runners %}
- {{ runner }}
{% endfor %}
{% endif %}
{% if GITLAB_JOBS["terraform-terrascan"] %}
.terraform-terrascan:
image:
name: tenable/terrascan:latest
entrypoint:
- '/usr/bin/env'
- 'PATH=/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin'
before_script:
- . ./creds.env
tags:
{% for runner in runners %}
- {{ runner }}
{% endfor %}
{% endif %}
{% if GITLAB_JOBS["md-lint"] %}
.md_lint:
image:
name: pipelinecomponents/markdownlint:latest
entrypoint:
- '/usr/bin/env'
- 'PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/app/bin'
before_script:
- apk --no-cache --update add make
- . ./creds.env
tags:
{% for runner in runners %}
- {{ runner }}
{% endfor %}
{% endif %}
########################################################################################################################
# COMMONS
########################################################################################################################
{% if GITLAB_JOBS["aws-creds"] %}
aws-creds:
extends: .aws-cli
stage: aws-creds
<<: *aws_get_creds
{% endif %}
{% if GITLAB_JOBS["terraform-compliance"] %}
########################################################################################################################
# TESTS
########################################################################################################################
terraform-compliance:
needs: [plan_all]
extends: .terraform-compliance
allow_failure: true
stage: tests
script:
- make terraform_compliance
{% endif %}
########################################################################################################################
# QUALITY CHECKS
########################################################################################################################
{% if GITLAB_JOBS["terraform-lint"] %}
terraform-lint:
needs: []
extends: .terraform-lint
allow_failure: true
stage: quality-checks
script:
- make lint
{% endif %}
{% if GITLAB_JOBS["precommit"] %}
precommit:
needs: []
extends: .precommit
allow_failure: true
stage: quality-checks
before_script:
- apk --no-cache --update add make
script:
- make precommit
{% endif %}
{% if GITLAB_JOBS["terraform-format"] %}
terraform-format:
needs: []
extends: .terraform-base
allow_failure: true
stage: quality-checks
script:
- make format
{% endif %}
{% if GITLAB_JOBS["terraform-validate"] %}
terraform-validate:
needs: []
extends: .terraform-base
allow_failure: true
before_script:
- . ./creds.env
stage: quality-checks
script:
- make validate
{% endif %}
{% if GITLAB_JOBS["terraform-terrascan"] %}
terraform-terrascan:
needs: []
extends: .terraform-terrascan
allow_failure: true
stage: quality-checks
script:
- terrascan scan -i terraform --verbose --config-path=./.terrascan_config.toml {% for plan_name in plans_install %} --iac-dir={{ plan_name }}{% endfor %}
{% endif %}
{% if GITLAB_JOBS["md-lint"] %}
md-lint:
needs: []
extends: .md_lint
allow_failure: true
stage: quality-checks
script:
- make markdown_lint
{% endif %}
{% if GITLAB_JOBS["shell-lint"] %}
shell-lint:
needs: []
extends: .shelllint
allow_failure: true
before_script:
- . ./creds.env
- apk --no-cache --update add make
stage: quality-checks
script:
- make shell_lint
{% endif %}
{% if GITLAB_JOBS["yaml-lint"] %}
yaml-lint:
needs: []
extends: .yamllint
allow_failure: true
stage: quality-checks
script:
- make yaml_lint
{% endif %}
{% if GITLAB_JOBS["terraform-trivy"] %}
terraform-trivy:
needs: []
extends: .terraform-trivy
allow_failure: true
stage: quality-checks
script:
- make trivy
{% endif %}
{% if GITLAB_JOBS["driftctl"] %}
########################################################################################################################
# DRIFT Detection
########################################################################################################################
driftctl:
stage: drift
needs: [aws-creds]
allow_failure: true
image:
name: snyk/driftctl:$DRIFTCTL_IMAGE_TAG
entrypoint: [""]
variables:
AWS_DEFAULT_REGION: $REGION
ROLE_TO_ASSUME: ${TF_VAR_backend_bucket_access_role}
AWS_ROLE_SESSION_NAME: "sessiondrifctl"
before_script:
- . ./creds.env
- apk add --no-cache aws-cli
- apk add --no-cache jq
script: |
#!/usr/bin/env bash
echo "Getting temporary credentials associated to assume role"
aws sts get-caller-identity
STS_CREDS=$(aws sts assume-role --role-arn ${ROLE_TO_ASSUME} --role-session-name ${AWS_ROLE_SESSION_NAME})
AWS_ACCESS_KEY_ID=$(echo $STS_CREDS | jq -r '.Credentials.AccessKeyId')
AWS_SECRET_ACCESS_KEY=$(echo $STS_CREDS | jq -r '.Credentials.SecretAccessKey')
AWS_SESSION_TOKEN=$(echo $STS_CREDS | jq -r '.Credentials.SessionToken')
aws sts get-caller-identity
driftctl scan --only-managed --from tfstate+s3://"${TF_VAR_backend_bucket_name}"/*.tfstate
{% endif %}
########################################################################################################################
# PLAN
########################################################################################################################
.plan_job: &plan_job
extends: .terraform-base
stage: plan
needs: [aws-creds]
allow_failure: false
before_script:
- . ./creds.env
{% for plan_name in plans_install +%}
{% set path = plan_name.split('/') %}
{% set slug = plan_name.replace('/',"_") %}
plan_{{ slug }}:
<<: *plan_job
script:
- make plan_{{ slug }}
artifacts:
paths:
- {{ plan_name }}/{{ environ('PLAN_BINARY_FILE') }}
- {{ plan_name }}/{{ environ('PLAN_JSON_FILE') }}
only:
changes:
- {{ plan_name }}/**/*
{% endfor %}
{% if GITLAB_JOBS["plan_all"] %}
plan_all:
<<: *plan_job
script:
- make plan_all
artifacts:
paths:
- ./**/{{ environ('PLAN_BINARY_FILE') }}
- ./**/{{ environ('PLAN_JSON_FILE') }}
{% endif %}
########################################################################################################################
# APPLY
########################################################################################################################
.apply_job: &apply_job
extends: .terraform-base
stage: apply
allow_failure: false
when: manual
before_script:
- . ./creds.env
{% for plan_name in plans_install +%}
{% set path = plan_name.split('/') %}
{% set slug = plan_name.replace('/',"_") %}
apply_{{ slug }}:
<<: *apply_job
needs: [plan_{{ slug }}]
script:
- make install_{{ slug }}
only:
{% if TF_APPLY_ONLY_MAIN %}
refs:
- main
{% endif %}
changes:
- {{ plan_name }}/**/*
{% endfor %}
{% if GITLAB_JOBS["apply_all"] %}
apply_all:
<<: *apply_job
needs: [plan_all]
script:
- make install_all
{% if TF_APPLY_ONLY_MAIN %}
only:
refs:
- main
{% endif %}
{% endif %}
########################################################################################################################
# DELETE
########################################################################################################################
.delete_job: &delete_job
extends: .terraform-base
allow_failure: false
stage: delete
when: manual
before_script:
- . ./creds.env
only:
variables:
- $PHASE == "DESTROY"
{% for plan_name in plans_delete +%}
{% set path = plan_name.split('/') %}
{% set slug = plan_name.replace('/',"_") %}
delete_{{ slug}}:
<<: *delete_job
script:
- make delete_{{ slug }}
{% endfor %}
{% if GITLAB_JOBS["delete_all"] %}
delete_all:
<<: *delete_job
script:
- make delete_all
{% endif %}

View file

@ -0,0 +1,96 @@
{%- set plans_install = environ('PLAN_INSTALL_LIST').split(',') -%}
{%- set plans_delete = environ('PLAN_DELETE_LIST').split(',') %}
# Automatic Content Generated
generate_documentation: ## Generate Terraform Documentation
generate_documentation:
{% for plan_name in plans_install %}
$(DOCKER_COMPOSE_DEV_TOOLS) run --rm --remove-orphans terraform_docs {{ plan_name }} --config=./.config/.terraform-docs.yml
{% endfor %}
terraform_terrascan: ## Terrascan Terraform
terraform_terrascan:
$(TERRASCAN_RUN) scan -i terraform --verbose --config-path=./.terrascan_config.toml {% for plan_name in plans_install %} --iac-dir={{ plan_name }} {% endfor %}
format: ## Format all Terraform files using "terraform fmt"
format:
{% for plan_name in plans_install %}
@$(MAKE) --no-print-directory terraform_format CURRENT_DIR="{{ plan_name }}"
{% endfor %}
trivy: ## Terraform Trivy
trivy:
{% for plan_name in plans_install %}
$(TRIVY_RUN) config {{ plan_name }} --config=./.config/.trivy.yaml --skip-dirs .terraform
{% endfor %}
validate: ## Validate all Terraform files using "terraform validate"
validate:
{% for plan_name in plans_install %}
@$(MAKE) --no-print-directory terraform_validate CURRENT_DIR="{{ plan_name }}"
{% endfor %}
lint: ## Check that good naming practices are respected in Terraform files (using tflint)
lint:
$(TFLINT_RUN) --init
{% for plan_name in plans_install %}
@$(MAKE) --no-print-directory terraform_lint CURRENT_DIR="{{ plan_name }}"
{% endfor %}
{% for plan_name in plans_install %}
{% set path = plan_name.split('/') %}
{% set slug = plan_name.replace('/',"_") %}
init_{{ slug}}: ## Init AWS {{ plan_name }} layer
init_{{ slug }}:
@$(MAKE) --no-print-directory CURRENT_DIR={{ plan_name }} terraform_init_commands
{% endfor %}
{% for plan_name in plans_install %}
{% set path = plan_name.split('/') %}
{% set slug = plan_name.replace('/',"_") %}
plan_{{ slug}}: ## Plan AWS {{ plan_name }} layer
plan_{{ slug }}:
@$(MAKE) --no-print-directory CURRENT_DIR={{ plan_name }} terraform_plan_commands
{% endfor %}
{% for plan_name in plans_install %}
{% set path = plan_name.split('/') %}
{% set slug = plan_name.replace('/',"_") %}
install_{{ slug }}: ## Install AWS {{ plan_name }} layer
install_{{ slug }}:
@$(MAKE) --no-print-directory CURRENT_DIR={{ plan_name }} terraform_install_commands
{% endfor %}
{% for plan_name in plans_install %}
{% set path = plan_name.split('/') %}
{% set slug = plan_name.replace('/',"_") %}
destroy_{{ slug }}: ## Uninstall AWS {{ plan_name }} layer
destroy_{{ slug }}:
@$(MAKE) --no-print-directory CURRENT_DIR={{ plan_name }} terraform_destroy_commands
{% endfor %}
init_all: ## Init all AWS layers
init_all:
{% for plan_name in plans_install %}
{% set path = plan_name.split('/') %}
{% set slug = plan_name.replace('/',"_") %}
@$(MAKE) --no-print-directory init_{{ slug }}
{% endfor %}
plan_all: ## Plan all AWS layers
plan_all:
{% for plan_name in plans_install %}
{% set path = plan_name.split('/') %}
{% set slug = plan_name.replace('/',"_") %}
@$(MAKE) --no-print-directory plan_{{ slug }}
{% endfor %}
install_all: ## Install all AWS layers
install_all: {% for plan_name in plans_install %}{% set slug = plan_name.replace('/',"_") %}{% set path = plan_name.split('/') %}install_{{ slug }} {% endfor %}
destroy_all: ## Uninstall all layers
destroy_all: {% for plan_name in plans_delete %}{% set slug = plan_name.replace('/',"_") %}{% set path = plan_name.split('/') %}delete_{{ slug }} {% endfor %}

View file

@ -0,0 +1,2 @@
---
comment: “This is a fake variable”

111
configure.yaml.dist Normal file
View file

@ -0,0 +1,111 @@
########################################################################################################################
# ENV Variables used for local development
# Use the command $make dotenv_linter to validate this file
########################################################################################################################
PROJECT_NAME: starterkit
COMPOSE_PROJECT_NAME: starterkit
REGION: eu-west-3
# Name of the output of the terraform plan
PLAN_BINARY_FILE: tfplan.binary
PLAN_JSON_FILE: tfplan.json
########################################################################################################################
# Docker Compose image tags to use
########################################################################################################################
TFENV_IMAGE_TAG: latest
TFLINT_IMAGE_TAG: v0.39.3
PRECOMMIT_IMAGE_TAG: v1.74.1
DRIFTCTL_IMAGE_TAG: latest
DOTENV_LINTER_IMAGE_TAG: latest
MARKDOWN_LINTER_IMAGE_TAG: latest
SHELL_LINTER_IMAGE_TAG: latest
YAML_LINTER_IMAGE_TAG: latest
POWERSHELL_LINTER_IMAGE_TAG: latest
JSON_LINTER_IMAGE_TAG: latest
TRIVY_IMAGE_TAG: latest
TERRASCAN_IMAGE_TAG: latest
########################################################################################################################
# GITLAB CI
# Use to validate the .gitlab-ci.yml file with the command $make gitlab_linter
########################################################################################################################
GENERATE_GITLAB_CI: False
GITLAB_API_URL:
# https://docs.gitlab.com/ee/user/project/settings/project_access_tokens.html
# SCOPE api and read_api
# Create a token for each project
GITLAB_TOKEN:
CICD_RUNNER_TAGS:
- aws
GITLAB_JOBS:
aws-creds: True
terraform-lint: True
precommit: True
terraform-format: True
terraform-validate: True
terraform-terrascan: True
md-lint: True
shell-lint: True
yaml-lint: True
terraform-trivy: True
driftctl: True
plan_all: True
apply_all: True
delete_all: True
CICD_ROLE_NAME: Vocalcom-CiCd-CrossAccountRole
# Run Terraform apply only on main branch
TF_APPLY_ONLY_MAIN: True
# Set Terraform Token key and value to access Terraform Module stored onGitLab
TF_TOKEN_MODULE_ACCESS: False
TF_TOKEN_MODULE_ACCESS_KEY:
TF_TOKEN_MODULE_ACCESS_VALUE:
########################################################################################################################
# Terraform Logs
########################################################################################################################
TF_LOG: TRACE
TF_LOG_PATH: /tmp/terraform_logs
########################################################################################################################
# Terraform S3 Backend Configuration
########################################################################################################################
TF_VAR_backend_bucket_name: tfstate-sso-900894545262-primary
TF_VAR_backend_bucket_region: eu-west-3
TF_VAR_backend_dynamodb_table: tfstate-terraformstarterkit-lock
TF_VAR_backend_bucket_access_role: arn:aws:iam::538728450418:role/vcl-platformadmin-role
########################################################################################################################
# LOCAL DEVELOPMENT
########################################################################################################################
LOCAL_ROLE_NAME: vcl-platformadmin-role
TF_VAR_assume_role: vcl-platformadmin-role
########################################################################################################################
# PLANS
########################################################################################################################
# List the folder for each Terraform Plan to install, respect the order of installation
plans:
- terraform/demo
# AWS Account ID in which your plan are deployed and containing the backend bucket
# SSO Account
ACCOUNT_ID: 900894545262
########################################################################################################################
# PROXY
########################################################################################################################
HTTP_PROXY: ""
HTTPS_PROXY: ""
########################################################################################################################
# Unix User ID and Group ID the Dockers should run with
########################################################################################################################
MY_UID: 1000
MY_GID: 1000

140
docker-compose-tools.yml Normal file
View file

@ -0,0 +1,140 @@
---
services:
precommit:
container_name: ${COMPOSE_PROJECT_NAME}_precommit
image: ghcr.io/antonbabenko/pre-commit-terraform:${PRECOMMIT_IMAGE_TAG}
user: root
environment:
PRE_COMMIT_HOME: /tmp
volumes:
- ./:/lint
- /tmp:/tmp
working_dir: /lint
lint:
container_name: ${COMPOSE_PROJECT_NAME}_lint
build:
dockerfile: automation/TFlint/Dockerfile
context: .
args:
- TFLINT_VERSION=0.46.1
- TFLINT_OS=linux
- TFLINT_ARCH=amd64
image: tflint:0.46.1
user: ${MY_UID}:${MY_GID}
environment:
- HOME=/workdir
env_file:
- .env
volumes:
- /etc/passwd:/etc/passwd:ro
- /etc/group:/etc/group:ro
- ./:/workdir
working_dir: /workdir
dotenv-linter:
container_name: ${COMPOSE_PROJECT_NAME}_dotenv_linter
image: dotenvlinter/dotenv-linter:${DOTENV_LINTER_IMAGE_TAG}
volumes:
- .env:/app/.env
working_dir: /app
markdown_lint:
container_name: ${COMPOSE_PROJECT_NAME}_markdown_lint
image: pipelinecomponents/markdownlint:${MARKDOWN_LINTER_IMAGE_TAG}
volumes:
- ./:/md
working_dir: /md
shell_lint:
container_name: ${COMPOSE_PROJECT_NAME}_shell_lint
image: pipelinecomponents/shellcheck:${SHELL_LINTER_IMAGE_TAG}
user: ${MY_UID}:${MY_GID}
environment:
- HOME=/app
env_file:
- .env
volumes:
- /etc/passwd:/etc/passwd:ro
- /etc/group:/etc/group:ro
- ./:/app
working_dir: /app
yaml_lint:
container_name: ${COMPOSE_PROJECT_NAME}_yaml_lint
image: pipelinecomponents/yamllint:${YAML_LINTER_IMAGE_TAG}
user: ${MY_UID}:${MY_GID}
environment:
- HOME=/app
env_file:
- .env
volumes:
- /etc/passwd:/etc/passwd:ro
- /etc/group:/etc/group:ro
- ./:/code
working_dir: /code
powershell_lint:
container_name: ${COMPOSE_PROJECT_NAME}_powershell_lint
image: cypher0n3/psscriptanalyzer-docker:${POWERSHELL_LINTER_IMAGE_TAG}
volumes:
- ./:/app
working_dir: /app
json_lint:
container_name: ${COMPOSE_PROJECT_NAME}_json_lint
image: pipelinecomponents/jsonlint:${JSON_LINTER_IMAGE_TAG}
volumes:
- ./:/code
working_dir: /code
trivy:
container_name: ${COMPOSE_PROJECT_NAME}_trivy
image: aquasec/trivy:${TRIVY_IMAGE_TAG}
volumes:
- ./:/myapp
working_dir: /myapp
terrascan:
container_name: ${COMPOSE_PROJECT_NAME}_terrascan
image: tenable/terrascan:${TERRASCAN_IMAGE_TAG}
volumes:
- ./:/myapp
working_dir: /myapp
jinja2docker:
container_name: jinja2docker
build:
dockerfile: Dockerfile
context: ./automation/jinja2/
hostname: jinja2docker
entrypoint: python3 /scripts/entities/render.py
#entrypoint: tail -f /etc/alpine-release
env_file:
- .env
volumes:
- ./automation/jinja2/templates:/templates
- ./automation/jinja2/variables:/variables
- ./automation/jinja2/render.py:/script/entities/render.py
terraform_docs:
container_name: ${COMPOSE_PROJECT_NAME}_terraform_docs
build:
dockerfile: automation/TerraformDocs/Dockerfile
context: .
args:
- TERRAFORM_DOCS_VERSION=0.16.0
- TERRAFORM_DOCS_OS=linux
- TERRAFORM_DOCS_ARCH=amd64
image: terraforms-docs:0.16.0
user: ${MY_UID}:${MY_GID}
environment:
HOME: /workdir
env_file:
- .env
volumes:
- /etc/passwd:/etc/passwd:ro
- /etc/group:/etc/group:ro
- ./:/workdir
working_dir: /workdir

26
docker-compose.yml Normal file
View file

@ -0,0 +1,26 @@
---
services:
terraform:
container_name: ${COMPOSE_PROJECT_NAME}_terraform
build:
dockerfile: automation/TFEnv/Dockerfile
args:
- TFENV_VERSION=3.0.0
- BASH_VERSION=5
image: tfenv:3.0.0
user: ${MY_UID}:${MY_GID}
environment:
AWS_ACCESS_KEY_ID: ${AWS_ACCESS_KEY_ID}
AWS_SECRET_ACCESS_KEY: ${AWS_SECRET_ACCESS_KEY}
AWS_SESSION_TOKEN: ${AWS_SESSION_TOKEN}
env_file:
- .env
volumes:
- /etc/passwd:/etc/passwd:ro
- /etc/group:/etc/group:ro
- ./:/workdir
- $HOME/.terraformrc:$HOME/.terraformrc
- $HOME/.terraform.d:$HOME/.terraform.d
working_dir: /workdir
entrypoint: [ "tail", "-f", "/dev/null" ]

33
get-starter-kit.sh Executable file
View file

@ -0,0 +1,33 @@
#!/usr/bin/env bash
#
# // Software Name : AWSTerraformStarterKit
# // SPDX-FileCopyrightText: Copyright (c) 2023 Orange Business
# // SPDX-License-Identifier: BSD-3-Clause
# //
# // This software is distributed under the BSD License;
# // see the LICENSE file for more details.
# //
# // Author: AWS Practice Team <awspractice.core@orange.com>
#
set -o errexit -o nounset -o pipefail
# Set Starterkit version
STARTER_KIT_VERSION="v0.0.8"
# Set GitLab URL and GitLab project ID (AWS Terraform StarterKit porject ID)
GITLAB_URL="git.mydomain.com"
GITLAB_PROJECT_ID="27"
# Set GitLab HTTP token
GITLAB_TOKEN="YOUR_GITLAB_HTTP_TOKEN"
curl --header "PRIVATE-TOKEN: ${GITLAB_TOKEN}" \
"https://${GITLAB_URL}/api/v4/projects/${GITLAB_PROJECT_ID}/repository/archive.zip?sha=${STARTER_KIT_VERSION}" \
-o /tmp/archive.zip
unzip /tmp/archive.zip -d .
cp -r awsterraformstarterkit-*/. .
rm -rf awsterraformstarterkit-*
rm /tmp/archive.zip

16
remove-starter-kit.sh Executable file
View file

@ -0,0 +1,16 @@
#!/usr/bin/env bash
#
# // Software Name : AWSTerraformStarterKit
# // SPDX-FileCopyrightText: Copyright (c) 2023 Orange Business
# // SPDX-License-Identifier: BSD-3-Clause
# //
# // This software is distributed under the BSD License;
# // see the LICENSE file for more details.
# //
# // Author: AWS Practice Team <awspractice.core@orange.com>
#
set -o errexit -o nounset -o pipefail
ls -a | grep -xvi ".gitignore\|.git\|.idea\|terraform\|README.md\|configure.yaml\|configure.yaml.dist" | xargs rm -rfv

View file

@ -0,0 +1 @@
1.4.2

38
terraform/demo/README.md Normal file
View file

@ -0,0 +1,38 @@
# Demo project plan
<!-- BEGINNING OF PRE-COMMIT-TERRAFORM DOCS HOOK -->
## Requirements
| Name | Version |
|------|---------|
| <a name="requirement_terraform"></a> [terraform](#requirement\_terraform) | ~> 1.4.0 |
| <a name="requirement_aws"></a> [aws](#requirement\_aws) | >= 4.20.0 |
| <a name="requirement_random"></a> [random](#requirement\_random) | 3.4.2 |
## Providers
| Name | Version |
|------|---------|
| <a name="provider_aws"></a> [aws](#provider\_aws) | 5.1.0 |
## Modules
No modules.
## Resources
| Name | Type |
|------|------|
| [aws_caller_identity.current](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/caller_identity) | data source |
| [aws_ssoadmin_instances.ssoadmin](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/ssoadmin_instances) | data source |
## Inputs
| Name | Description | Type | Default | Required |
|------|-------------|------|---------|:--------:|
| <a name="input_region"></a> [region](#input\_region) | The name of the region. | `string` | n/a | yes |
## Outputs
No outputs.
<!-- END OF PRE-COMMIT-TERRAFORM DOCS HOOK -->

3
terraform/demo/main.tf Normal file
View file

@ -0,0 +1,3 @@
data "aws_ssoadmin_instances" "ssoadmin" {}
data "aws_caller_identity" "current" {}

View file

@ -0,0 +1,7 @@
provider "aws" {
region = var.region
default_tags {
}
}

View file

@ -0,0 +1,10 @@
/*terraform {
backend "s3" {
bucket = ""
region = ""
key = ""
encrypt = true
dynamodb_table = ""
}
}
*/

View file

@ -0,0 +1,4 @@
variable "region" {
type = string
description = "The name of the region."
}

View file

@ -0,0 +1,10 @@
terraform {
required_version = "~> 1.4.0"
required_providers {
aws = ">= 4.20.0"
random = {
source = "hashicorp/random"
version = "3.4.2"
}
}
}