Merge branch 'MON-494_mutualize_scripts' into 'master'

Resolve MON-494 "Mutualize scripts"

Closes MON-494

See merge request claranet/pt-monitoring/projects/datadog/terraform/monitors!101
This commit is contained in:
Quentin Manfroi 2019-08-26 19:57:14 +02:00
commit 29a5cdb4fb
14 changed files with 7 additions and 558 deletions

View File

@ -1,5 +1,8 @@
image: alpine:latest
variables:
GIT_SUBMODULE_STRATEGY: recursive
stages:
- test
- build

3
.gitmodules vendored Normal file
View File

@ -0,0 +1,3 @@
[submodule "scripts"]
path = scripts
url = ../scripts

1
scripts Submodule

@ -0,0 +1 @@
Subproject commit 90ac42971e081a8142592a471eec5254a9e7dc85

View File

@ -1,51 +0,0 @@
#!/bin/bash
source "$(dirname $0)/utils.sh"
init
echo "Check requirements"
function check_command() {
local cmd="$1"
if ! command -v ${cmd} > /dev/null 2>&1; then
echo "This requires ${cmd} command, please install it first."
exit 1
fi
}
function verlte() {
[ "$1" = "$(echo -e "$1\n$2" | sort -V | head -n1)" ]
}
function verlt() {
[ "$1" = "$2" ] && return 1 || verlte $1 $2
}
function check_version() {
if [[ "$1" == "terraform" ]]; then
tmp_dir=$(mktemp -d)
cd ${tmp_dir}
set +o pipefail # terraform fails on command piping when not last version
cur_ver=$(terraform version | head -n 1 | cut -d' ' -f2)
set -o pipefail
cur_ver=${cur_ver#"v"}
cd - > /dev/null
rm -fr ${tmp_dir}
req_ver=$(grep required_version README.md | awk '{print $4}')
req_ver=${req_ver%'"'}
elif [[ "$1" == "terraform-docs" ]]; then
req_ver="0.6.0"
cur_ver=$(terraform-docs --version)
else
return 0
fi
if ! verlte $req_ver $cur_ver; then
echo "This requires at least version ${req_ver} of $1, please upgrade (current version is ${cur_ver})"
exit 2
fi
}
for cmd in terraform terraform-docs terraform-config-inspect jq; do
echo -e "\t- Check command \"$cmd\" exists and in right version"
check_command $cmd
check_version $cmd
done

View File

@ -1,27 +0,0 @@
#!/bin/bash
source "$(dirname $0)/utils.sh"
init
echo "Generate terraform outputs.tf files for every monitors modules"
# loop over every modules
for module in $(browse_modules "$(get_scope ${1:-})" 'monitors-*.tf'); do
echo -e "\t- Generate outputs.tf for module: ${module}"
cd ${module}
# empty outputs
> outputs.tf
# gather a information line splitted with "|" for every monitor
for row in $(terraform-config-inspect --json | jq -c -r '.managed_resources | map([.name] | join("|")) | join("\n")'); do
# split line for each info one variable
IFS='|' read monitor type < <(echo $row)
# create output block for current monitor
cat >> outputs.tf <<EOF
output "${monitor}_id" {
description = "id for monitor ${monitor}"
value = datadog_monitor.${monitor}.*.id
}
EOF
done
cd - >> /dev/null
done

View File

@ -1,28 +0,0 @@
#!/bin/bash
source "$(dirname $0)/utils.sh"
init
echo "Update global README.md"
# only keep current README from begining to "Monitors summary" section (delete monitors list)
sed -i '/### Monitors summary ###/q' README.md
# add a newline after listing section
echo >> README.md
# loop over path of modules tree
for path in $(find -mindepth 1 -type d ! -path '*/.*' ! -path './scripts*' -print | sort -fdbi); do
# split path in directories
directories=($(list_dirs $path))
# loop over directories in path
for i in $(seq 1 $((${#directories[@]}-1))); do
## add tabulation for every subdirectory
echo -en "\t" >> README.md
done
# add link to list of monitors sets
echo -en "- [$(basename ${path})](https://git.fr.clara.net/claranet/pt-monitoring/projects/datadog/terraform/monitors/tree/master/" >> README.md
# add path to link
for directory in "${directories[@]}"; do
echo -en "${directory}/" >> README.md
done
# end of markdown link
echo ")" >> README.md
done

View File

@ -1,109 +0,0 @@
#!/bin/bash
source "$(dirname $0)/utils.sh"
init
echo "Update README.md for every monitors modules"
# download awk script to hack terraform-docs
TERRAFORM_AWK="/tmp/terraform-docs.awk"
curl -Lso ${TERRAFORM_AWK} "https://raw.githubusercontent.com/cloudposse/build-harness/master/bin/terraform-docs.awk"
# this is the pattern from where custom information is saved to be restored
PATTERN_DOC="Related documentation"
# loop over every modules
for module in $(browse_modules "$(get_scope ${1:-})" 'monitors-*.tf'); do
echo -e "\t- Generate README.md for module: ${module}"
cd ${module}
EXIST=0
if [ -f README.md ]; then
mv README.md README.md.bak
EXIST=1
fi
# module name from path
module_space=$(list_dirs ${module})
# module name with space as separator
module_upper=${module_space^^}
# module name with dash as separator
module_dash=${module_space//[ ]/-}
# module name with slash as separator
module_slash=${module_space//[ ]/\/}
# (re)generate README from scratch
cat <<EOF > README.md
# ${module_upper} DataDog monitors
## How to use this module
\`\`\`
module "datadog-monitors-${module_dash}" {
source = "git::ssh://git@git.fr.clara.net/claranet/pt-monitoring/projects/datadog/terraform/monitors.git//${module_slash}?ref={revision}"
environment = var.environment
message = module.datadog-message-alerting.alerting-message
EOF
# if README already exist
if [[ $EXIST -eq 1 ]]; then
# take all custom config in declaration module example after "message" and until the end of block to restore it
sed -n '/^[[:space:]]*message[[:space:]]*=.*/,/^\}/p' README.md.bak | tail -n +2 | head -n -1 >> README.md
fi
# close block and generate the next until list of monitors
cat <<EOF >> README.md
}
\`\`\`
## Purpose
Creates DataDog monitors with the following checks:
EOF
list=""
# gather a information line splitted with "|" for every monitor
for row in $(terraform-config-inspect --json | jq -c -r '.managed_resources | map([.pos.filename, .pos.line] | join("|")) | join("\n")' | sort -fdbi); do
# split line for each info one variable
IFS='|' read filename line < <(echo $row)
# gather all config HCL code for current monitor
set +o pipefail
config=$(tail -n +${line} ${filename} | sed '/^}/q')
set -o pipefail
# parse monitor's name
name=$(get_name "$(echo "${config}" | grep 'name[[:space:]]*=')")
# search if monitor is enabled
[[ "$(echo "${config}" | grep 'count[[:space:]]*=')" =~ ^[[:space:]]*count[[:space:]]*=[[:space:]]*var\.([a-z0-9_]*_enabled) ]] &&
# add "disabled by default" mention if not enabled
if ! grep -A4 "${BASH_REMATCH[1]}" inputs.tf | grep -q default.*true; then
name="${name} (disabled by default)"
fi
# append new line to list if not empty
if ! [ -z "${list}" ]; then
list="${list}\n"
fi
# append name to list and improve forecast naming
list="${list}- ${name/could reach/forecast}"
done
# write sorted list to readme appending newline to end
echo -e "$(echo -e "${list}" | sort -fdbi)\n" >> README.md
# hack for terraform-docs with terraform 0.12 / HCL2 support
tmp_tf=$(mktemp -d)
awk -f ${TERRAFORM_AWK} ./*.tf > ${tmp_tf}/main.tf
# auto generate terraform docs (inputs and outputs)
terraform-docs --with-aggregate-type-defaults md table ${tmp_tf}/ >> README.md
rm -fr ${tmp_tf}
# if README does not exist
if [[ $EXIST -eq 0 ]]; then
# Simply add empty documentation section
cat <<EOF >> README.md
## ${PATTERN_DOC}
EOF
else
# else restore the custom information saved before
grep -Pzo --color=never ".*${PATTERN_DOC}(.*\n)*" README.md.bak | head -n -1 >> README.md
rm -f README.md.bak
fi
# force unix format (I don't know why for now but you never know)
dos2unix README.md 2> /dev/null
cd - >> /dev/null
done

View File

@ -1,40 +0,0 @@
#!/bin/bash
source "$(dirname $0)/utils.sh"
init
echo "Generate outputs.tf files when does not exist for every monitors modules"
root=$(basename ${PWD})
# loop over every modules
for module in $(browse_modules "$(get_scope ${1:-})" 'monitors-*.tf'); do
cd ${module}
# get name of the monitors set directory
resource="$(basename ${module})"
# if modules.tf does not exist AND if this set respect our tagging convention
if ! [ -f modules.tf ] && grep -q filter_tags_use_defaults inputs.tf; then
echo -e "\t- Generate modules.tf for module: ${module}"
relative=""
current="${PWD}"
# iterate on path until we go back to root
while [[ "$(basename $current)" != "$root" ]]; do
# for each iteration add "../" to generate relative path
relative="${relative}../"
# remove last directory from current path
current="$(dirname $current)"
done
# add the filter tags module
cat > modules.tf <<EOF
module "filter-tags" {
source = "${relative}common/filter-tags"
environment = var.environment
resource = "$resource"
filter_tags_use_defaults = var.filter_tags_use_defaults
filter_tags_custom = var.filter_tags_custom
filter_tags_custom_excluded = var.filter_tags_custom_excluded
}
EOF
fi
cd - >> /dev/null
done

View File

@ -1,15 +0,0 @@
#!/bin/bash
source "$(dirname $0)/utils.sh"
init
echo "Check best practices respect"
echo -e "\t- Check only one notify_no_data set to true per module"
# loop over every modules
for module in $(browse_modules "$(get_scope ${1:-})" 'monitors-*.tf'); do
# check if there is more than 1 notify_no_data parameter set to true per set of monitors
if [[ $(cat ${module}/monitors-*.tf | grep -c notify_no_data.*true) -gt 1 ]]; then
echo "More than one notify_no_data set to true on $module"
exit 1
fi
done

View File

@ -1,47 +0,0 @@
#!/bin/bash
source "$(dirname $0)/utils.sh"
init
echo "Check terraform CI"
# Clean when exit
err() {
rm -f "${module}/tmp.tf"
}
trap 'err $LINENO' ERR TERM EXIT INT
provider_version=$(grep ^[[:space:]]*version[[:space:]]= README.md | awk '{print $3}')
# loop over every modules
for module in $(browse_modules "$(get_scope ${1:-})" 'inputs.tf'); do
echo -e "\t- Terraform validate on module: ${module}"
cat <<EOF > ${module}/tmp.tf
provider "datadog" {
version = $provider_version
api_key = var.datadog_api_key
app_key = var.datadog_app_key
}
variable "datadog_api_key" {
type = string
default = "xxx"
}
variable "datadog_app_key" {
type = string
default = "yyy"
}
EOF
if [ -f ${module}/test.tf.ci ]; then
cat ${module}/test.tf.ci >> ${module}/tmp.tf
fi
terraform init ${module} > /tmp/null
terraform validate ${module}
rm -f ${module}/tmp.tf
done
echo -e "\t- Terraform fmt recursive"
terraform fmt -recursive

View File

@ -1,54 +0,0 @@
# Datadog scripts
## Summary
This repository contains a `scripts` directory where there are multiple scripts helping to different things:
- help and automate for some boring and repetitive tasks.
- keep everything up to date and warn if you forget.
- compliant checks and ensure best practices are respected.
- code validation for continuous integration.
## Structure
There are two kinds of scripts naming:
- `[0-9][0-9]_script_name.sh`: will be automatically run by `auto_update.sh` wrapper.
- `script_name.sh`: should be run manually.
Here is a list of scripts and their purpose:
- `auto_update.sh`: is the most important and the one the must used. It is a simple wrapper which will calls every other `[0-9][0-9]*` scripts.
- It should be run by contributor after every change.
- The CI will also run it and it will fail if it detects any change compared to commit.
- "Children" scripts could be run individually if you know exactly what you need to update after a change.
- This script all "children" scripts takes one optional parameter to limit execution to a specific sub path. Else this will run on all directories.
- `00_requirements.sh`: check some requirements like `terraform` command exists before run other scripts.
- `10_update_output.sh`: will generate and update all `outputs.tf`.
- `20_update_global_readme.sh`: will update the main `README.md` file and generate the list of all modules browsing the repository.
- `20_update_modules_readmes.sh`: will create and update `README.md` for each module. It will save all manual changes below `## Related documentation` section.
- `30_update_module.sh`: will create `modules.tf` file per module when does not exist.
- `90_best_practices.sh`: will check compliance and best practices respect.
- `99_terraform.sh`: terraform CI (init & validate only while auto apply is done in another pipeline).
- `utils.sh`: contains useful functions common to multiple scripts. It is not attended to be run.
- `changelog.sh`: helper script to release a new version.
- generate and update `CHANGELOG.md` file from git history.
- filter to list only "done" issues from JIRA.
- close all issues on JIRA.
- fix version for all issues on JIRA.
- create release for current version on JIRA.
## Usage
After any change on this repo, you need to run the `./scripts/auto_update.sh [PATH_TO_MODULE]` command to make sure all is up to date otherwise the CI pipeline will fail:
The parameter is optional and it will limit the scripts execution on a specific path on the repository
On linux system it is possible to run the script directly while `terraform`, `terraform-docs`, `terraform-config-inspect`, `jq` commands are available in your PATH.
Else you can use [the same docker image as the CI](https://hub.docker.com/r/claranet/datadog-terraform) on every other platforms
With this command run from the root of the repository you will get exactly the same execution as the pipeline (and so the same result also):
```
$ docker run --rm -v "$PWD:/work" claranet/datadog-terraform /work/scripts/auto_update.sh
```

View File

@ -1,8 +0,0 @@
#!/bin/bash
source "$(dirname $0)/utils.sh"
init scripts
for script in [0-9][0-9]_*.sh; do
./${script} "$(get_scope ${1:-})"
done

View File

@ -1,122 +0,0 @@
#/bin/bash
set -euo pipefail
source "$(dirname $0)/utils.sh"
goto_root
## Check parameters and environment variables
# PARAMETER: The next version to release (i.e. v3.0.0)
# JIRA_API_TOKEN: the user api token to login jira
# JIRA_LOGIN: the user email to login on jira
if [ $# -eq 0 ]; then
echo "Target tag is required as parameter"
exit 1
fi
if [ -z ${JIRA_API_TOKEN:-} ]; then
echo "Environment variable JIRA_API_TOKEN needs to be defined: https://confluence.atlassian.com/cloud/api-tokens-938839638.html"
exit 2
fi
if [ -z ${JIRA_LOGIN:-} ]; then
echo "Environment variable JIRA_LOGIN needs to be defined: jira user email"
exit 3
fi
if ! command -v jira >/dev/null; then
echo "go-jira command is required: https://github.com/go-jira/jira#install"
fi
TAG_TARGET=$1
TAG_SOURCE=${TAG_SOURCE:-$(git describe --tags --abbrev=0)}
JIRA_ENDPOINT=${JIRA_ENDPOINT:-https://claranet-morea.atlassian.net}
JIRA_STATUS=${JIRA_STATUS:-Done Closed}
SAVEIFS=$IFS
IFS=$(echo -en "\n\b")
TMP=$(mktemp -d)
# Clean when exit
err() {
rm -fr ${TMP}
IFS=$SAVEIFS
}
trap 'err $LINENO' ERR TERM EXIT INT
# Create the go-jira template with issue, status and summary as CSV
mkdir -p ${HOME}/.jira.d/templates
cat > ${HOME}/.jira.d/templates/changelog <<EOF
{{ .fields.issuetype.name }};{{ .key }};{{if .fields.status -}}{{ .fields.status.name }}{{end -}};{{ .fields.summary }}
EOF
# Create the go-jira template to list allowed versions
cat > ${HOME}/.jira.d/templates/versions <<EOF
{{ range .fields.fixVersions.allowedValues }}{{.name}}
{{end}}
EOF
# Create the go-jira template to edit issue fixing version
cp -f ${HOME}/.jira.d/templates/edit ${HOME}/.jira.d/templates/fixversion
cat >> ${HOME}/.jira.d/templates/fixversion <<EOF
{{if .meta.fields.fixVersions -}}
{{if .meta.fields.fixVersions.allowedValues}}
fixVersions: # Values: {{ range .meta.fields.fixVersions.allowedValues }}{{.name}}, {{end}}{{if .overrides.fixVersions}}{{ range (split "," .overrides.fixVersions)}}
- name: {{.}}{{end}}{{else}}{{range .fields.fixVersions}}
- name: {{.name}}{{end}}{{end}}
{{- end -}}
{{- end -}}
EOF
# List all issues in commits from previous tag
TMP_ISSUES=$TMP/issues.txt
for commit in $(git log $(git describe --tags --abbrev=0)..HEAD --oneline); do
[[ $commit =~ ^.*(MON-[0-9]+).*$ ]] && echo "${BASH_REMATCH[1]}"
done | sort -u > $TMP_ISSUES
TMP_CHANGELOG=${TMP}/changelog.md
TMP_ALLOWED_ISSUES=${TMP}/allowed-issues.txt
# init changelog for next version
echo -e "\n# $TAG_TARGET ($(LANG=eng date +"%B %d, %Y"))" >> $TMP_CHANGELOG
for issue in $(cat $TMP_ISSUES); do
# retrieve jira information from go-jira template
IFS=';' read -r type issue status summary < <(jira --endpoint=${JIRA_ENDPOINT} --login=${JIRA_LOGIN} view $issue --template=changelog)
# Ignore if issue is not in required status
if [[ "$JIRA_STATUS" == *"$status"* ]]; then
# add line for type only once
if ! grep -q "^## $type" $TMP_CHANGELOG; then
echo -e "\n## $type\n" >> $TMP_CHANGELOG
fi
# add jira issue line to changelog
echo "* [[$issue](${JIRA_ENDPOINT}/browse/${issue})] - $summary" >> $TMP_CHANGELOG
echo $issue >> $TMP_ALLOWED_ISSUES
else
echo "Ignore $issue with status \"$status\" not in [$JIRA_STATUS] ($summary)"
fi
done
cat $TMP_CHANGELOG
# Ask for confirmation to update changelog
read -p 'Update CHANGELOG.md with this ? (y/n): ' -r answer
if [[ "$answer" == "y" ]]; then
separator='\n'
# Remove target tag changelog if already exist
if grep -q $TAG_TARGET CHANGELOG.md; then
prev_tag=$(grep '^# v' CHANGELOG.md | sed -n 2p)
sed -i "/${prev_tag}/,\$!d" CHANGELOG.md
separator="${separator}\n"
fi
# Add target tag changelog to final changelog
echo -e "$(cat $TMP_CHANGELOG)${separator}$(cat CHANGELOG.md)" > CHANGELOG.md
fi
read -p "Close all issues and fix version $TAG_TARGET ? (y/n): " -r answer
if [[ "$answer" == "y" ]]; then
# Create version if does not exists
one_issue=$(head -n 1 $TMP_ALLOWED_ISSUES)
auth_header=$(printf "${JIRA_LOGIN}:${JIRA_API_TOKEN}" | base64)
if ! jira --endpoint=${JIRA_ENDPOINT} --login=${JIRA_LOGIN} editmeta $one_issue --template=versions | grep -q $TAG_TARGET; then
curl -H "Authorization: Basic $auth_header" -H "Content-Type: application/json" -X POST -d "{\"name\": \"${TAG_TARGET}\",\"userReleaseDate\": \"$(echo -n $(LANG=eng date +'%-d/%b/%Y'))\",\"project\": \"$(echo -n $one_issue | cut -d'-' -f1)\",\"archived\": false,\"released\": true}" ${JIRA_ENDPOINT}/rest/api/latest/version
fi
for issue in $(cat $TMP_ALLOWED_ISSUES); do
jira --endpoint=${JIRA_ENDPOINT} --login=${JIRA_LOGIN} transition Close $issue
jira --endpoint=${JIRA_ENDPOINT} --login=${JIRA_LOGIN} edit $issue --template=fixversion --override fixVersions=${TAG_TARGET} --noedit
done
fi

View File

@ -1,57 +0,0 @@
#!/bin/bash
function goto_root() {
script_dir=$(dirname $0)
if [[ "$script_dir" == "." ]]; then
cd ..
else
cd "$(dirname $script_dir)"
fi
}
function init() {
set -euo pipefail
if [[ ${GITLAB_CI:-} == "true" ]]; then
set -x
fi
# MON-478 fix sort order behavior on case
export LC_COLLATE=C
goto_root
if ! [ -z ${1:-} ]; then
cd "$1"
fi
}
function get_scope() {
TO_PARSE="./"
if [ ! -z ${1+x} ] && [ $1 != "." ]; then
TO_PARSE="$1"
fi
if [[ $TO_PARSE != ./* ]]; then
TO_PARSE="./${TO_PARSE}"
fi
echo $TO_PARSE
}
function list_dirs() {
echo ${1} | awk -F '/' '{$1=""; print $0}' | cut -c 2-
}
function get_name() {
regex='^[[:space:]]+name[[:space:]]+=[[:space:]]+"\$.*\[.*\][[:space:]]+(.*)"$'
if [[ "${1}" =~ ${regex} ]]; then
name="${BASH_REMATCH[1]}"
else
echo "Error: impossible to parse monitor name"
return 42
fi
if [[ "${name}" =~ ^(.*)[[:space:]]\{\{#is_alert\}\}.*$ ]]; then
name="${BASH_REMATCH[1]}"
fi
echo $name
return 0
}
function browse_modules() {
find "$1" -name "$2" -exec dirname "{}" \; | sort -fdbiu
}