diff --git a/.bp-config/httpd/httpd.conf b/.bp-config/httpd/httpd.conf new file mode 100644 index 00000000..59c46fd4 --- /dev/null +++ b/.bp-config/httpd/httpd.conf @@ -0,0 +1,27 @@ +# Overrides https://github.com/cloudfoundry/php-buildpack/blob/master/defaults/config/httpd/httpd.conf +# to include our customization. +# See https://docs.cloudfoundry.org/buildpacks/php/gsg-php-config.html#engine-configurations for background + +ServerRoot "${HOME}/httpd" +Listen ${PORT} +ServerAdmin "${HTTPD_SERVER_ADMIN}" +ServerName "0.0.0.0" +DocumentRoot "${HOME}/#{WEBDIR}" +Include conf/extra/httpd-modules.conf +Include conf/extra/httpd-directories.conf +Include conf/extra/httpd-mime.conf +Include conf/extra/httpd-deflate.conf +Include conf/extra/httpd-logging.conf +Include conf/extra/httpd-mpm.conf +Include conf/extra/httpd-default.conf +Include conf/extra/httpd-remoteip.conf +Include conf/extra/httpd-php.conf + +# If they exist, include any user-provided customizations +IncludeOptional conf/user-provided/*.conf + + + LoadModule headers_module modules/mod_headers.so + + +RequestHeader unset Proxy early diff --git a/.bp-config/httpd/user-provided/httpd-drupalsupport.conf b/.bp-config/httpd/user-provided/httpd-drupalsupport.conf new file mode 100644 index 00000000..e8638579 --- /dev/null +++ b/.bp-config/httpd/user-provided/httpd-drupalsupport.conf @@ -0,0 +1,2 @@ +# Enable proxy_http for our s3fs module +LoadModule proxy_http_module modules/mod_proxy_http.so diff --git a/.bp-config/options.json b/.bp-config/options.json new file mode 100644 index 00000000..3d62bb1b --- /dev/null +++ b/.bp-config/options.json @@ -0,0 +1,11 @@ +{ + "COMPOSER_INSTALL_OPTIONS": [ + "--no-dev --optimize-autoloader --no-progress --no-interaction" + ], + "COMPOSER_VENDOR_DIR": "vendor", + "WEBDIR": "web", + "PHP_VERSION": "{PHP_83_LATEST}", + "ADDITIONAL_PREPROCESS_CMDS": [ + "source $HOME/scripts/bootstrap.sh" + ] +} diff --git a/.bp-config/php/php.ini.d/extensions.ini b/.bp-config/php/php.ini.d/extensions.ini new file mode 100644 index 00000000..dfa6195d --- /dev/null +++ b/.bp-config/php/php.ini.d/extensions.ini @@ -0,0 +1,9 @@ +extension=apcu +extension=igbinary +extension=imagick +extension=pdo_mysql +extension=redis +extension=sodium +extension=zip + +zend_extension=opcache.so diff --git a/.bp-config/php/php.ini.d/memory_limit.ini b/.bp-config/php/php.ini.d/memory_limit.ini new file mode 100644 index 00000000..8f744af1 --- /dev/null +++ b/.bp-config/php/php.ini.d/memory_limit.ini @@ -0,0 +1,3 @@ +; Maximum amount of memory a script may consume (128MB) +; http://php.net/memory-limit +memory_limit = 512M diff --git a/.github/workflows/build-and-deploy.yml b/.github/workflows/build-and-deploy.yml new file mode 100644 index 00000000..a6dc922e --- /dev/null +++ b/.github/workflows/build-and-deploy.yml @@ -0,0 +1,21 @@ +name: Build and deploy + +on: + push: + branches: + - develop + - main + - feature/dg-98-build-and-configure-pipeline + +permissions: + contents: read + packages: write + +jobs: + deploy-infra: + uses: ./.github/workflows/terraform-deploy-infra.yml + secrets: inherit + deploy-app: + uses: ./.github/workflows/cloudgov-deploy-app.yml + needs: deploy-infra + secrets: inherit diff --git a/.github/workflows/cloudgov-deploy-app.yml b/.github/workflows/cloudgov-deploy-app.yml new file mode 100644 index 00000000..45209f1d --- /dev/null +++ b/.github/workflows/cloudgov-deploy-app.yml @@ -0,0 +1,64 @@ +name: Deploy App + +on: + workflow_call: + +jobs: + updateInfrastructure: + name: Update Infrastructure + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Set enviornment + run: | + BRANCH=develop + #BRANCH=$(echo $GITHUB_REF | cut -d'/' -f 3) + COMPOSER_NO_DEV=0 + case ${BRANCH} in + develop) + CF_SPACE="dev" + DRUPAL_MEMORY=${{ vars.DEVELOP_CMS_MEMORY }} + DRUPAL_INSTANCES=${{ vars.DEVELOP_INSTANCES }} + ;; + main) + CF_SPACE="prod" + COMPOSER_NO_DEV=1 + DRUPAL_MEMORY=${{ vars.MAIN_CMS_MEMORY }} + DRUPAL_INSTANCES=${{ vars.MAIN_INSTANCES }} + ;; + esac + + echo "APP_NAME=drupal" | tee -a $GITHUB_ENV + echo "BRANCH=${BRANCH}" | tee -a $GITHUB_ENV + echo "BUILDPACK_PORT=${{ vars.BUILDPACK_PORT }}" | tee -a $GITHUB_ENV + echo "CF_SPACE=${CF_SPACE}" | tee -a $GITHUB_ENV + echo "COMPOSER_NO_DEV=${COMPOSER_NO_DEV}" | tee -a $GITHUB_ENV + echo "DRUPAL_INSTANCES=${DRUPAL_INSTANCES}" | tee -a $GITHUB_ENV + echo "DRUPAL_MEMORY=${DRUPAL_MEMORY}" | tee -a $GITHUB_ENV + echo "WAF_NAME=waf"| tee -a $GITHUB_ENV + - name: Set repo name + run: echo "REPO_NAME=${{ github.event.repository.name }}" >> $GITHUB_ENV + - name: Install basic dependancies + run: ./scripts/pipeline/deb-basic-deps.sh + - name: Install Cloudfoundry CLI + run: ./scripts/pipeline/deb-cf-install.sh + - name: Cloud.gov login + env: + CF_USER: "${{ secrets.CF_USER }}" + CF_PASSWORD: "${{ secrets.CF_PASSWORD }}" + CF_ORG: "${{ secrets.CF_ORG }}" + PROJECT: "${{ secrets.PROJECT }}" + run: | + source ./scripts/pipeline/cloud-gov-login.sh + cf target -s "${CF_SPACE}" >/dev/null 2>&1 + - name: Build theme + run: ./orch/build_node.sh + - name: Deploy App + env: + PROJECT: "${{ secrets.PROJECT }}" + run: source ./scripts/pipeline/cloud-gov-deploy.sh + - name: Post Deploy + env: + PROJECT: "${{ secrets.PROJECT }}" + run: source ./scripts/pipeline/cloud-gov-post-deploy.sh \ No newline at end of file diff --git a/.github/workflows/terraform-deploy-infra.yml b/.github/workflows/terraform-deploy-infra.yml new file mode 100644 index 00000000..c1c3790a --- /dev/null +++ b/.github/workflows/terraform-deploy-infra.yml @@ -0,0 +1,133 @@ +name: Update Infrastructure + +on: + workflow_call: + +jobs: + updateInfrastructure: + name: Update Infrastructure + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Set env.BRANCH + run: | + BRANCH=develop + #BRANCH=$(echo $GITHUB_REF | cut -d'/' -f 3) + case ${BRANCH} in + develop) + CF_SPACE="dev" + ;; + main) + CF_SPACE="prod" + ;; + esac + echo "BRANCH=${BRANCH}" >> $GITHUB_ENV + echo "CF_SPACE=${CF_SPACE}" >> $GITHUB_ENV + - name: Set repo name + run: echo "REPO_NAME=${{ github.event.repository.name }}" >> $GITHUB_ENV + - name: Install basic dependancies + run: ./scripts/pipeline/deb-basic-deps.sh + - name: Install Cloudfoundry CLI + run: ./scripts/pipeline/deb-cf-install.sh + - name: Cloud.gov login + env: + CF_USER: "${{ secrets.CF_USER }}" + CF_PASSWORD: "${{ secrets.CF_PASSWORD }}" + CF_ORG: "${{ secrets.CF_ORG }}" + PROJECT: "${{ secrets.PROJECT }}" + TF_BASTION: "${{ secrets.TF_BASTION }}" + TF_BACKEND_SPACE: "${{ secrets.TF_BACKEND_SPACE }}" + run: | + source ./scripts/pipeline/cloud-gov-login.sh + cf target -s "${TF_BACKEND_SPACE}" >/dev/null 2>&1 + - name: Start Bastion + env: + TF_BASTION: "${{ secrets.TF_BASTION }}" + run: | + cf start "${TF_BASTION}" >/dev/null 2>&1 + ./scripts/pipeline/cloud-gov-wait-for-app-start.sh "${TF_BASTION}" + - name: Cloud.gov bastion git checkout + env: + TF_BASTION: "${{ secrets.TF_BASTION }}" + run: | + #declare -a commands=("rm -rf ${REPO_NAME}" "git clone https://github.com/${GITHUB_REPOSITORY_OWNER}/${REPO_NAME}.git" "cd ${REPO_NAME}" "git checkout ${BRANCH}") + declare -a commands=("rm -rf ${REPO_NAME}" "git clone -b "feature/dg-98-build-and-configure-pipeline" https://github.com/${GITHUB_REPOSITORY_OWNER}/${REPO_NAME}.git") + for command in "${commands[@]}"; do + ./scripts/pipeline/cloud-gov-remote-command.sh "${TF_BASTION}" "${command}" 1 + done + - name: Build nginx WAF Plugin + env: + ubuntu_version: "${{ vars.UBUNTU_VERSION }}" + modsecurity_nginx_version: "${{ vars.MODSECURITY_NGINX_VERSION }}" + TF_BASTION: "${{ secrets.TF_BASTION }}" + run: | + source ./scripts/pipeline/cloud-gov-waf-version.sh + source ./scripts/pipeline/terraform-build-waf-plugin.sh + - name: Configure Terraform + env: + CF_USER: "${{ secrets.CF_USER }}" + CF_PASSWORD: "${{ secrets.CF_PASSWORD }}" + CF_ORG: "${{ secrets.CF_ORG }}" + PROJECT: "${{ secrets.PROJECT }}" + TF_BASTION: "${{ secrets.TF_BASTION }}" + run: | + CWD=$(pwd) + cd terraform/infra + envsubst < terraform.tfvars.tmpl > terraform.tfvars + ${CWD}/scripts/pipeline/cloud-gov-scp-file.sh "${TF_BASTION}" "terraform.tfvars" "${REPO_NAME}/terraform/infra/" + cd "${CWD}" + - name: Terraform Init + env: + TF_BASTION: "${{ secrets.TF_BASTION }}" + id: init + run: | + ./scripts/pipeline/cloud-gov-remote-command.sh "${TF_BASTION}" "tofu -chdir=${REPO_NAME}/terraform/infra init" 1 + ./scripts/pipeline/cloud-gov-remote-command.sh "${TF_BASTION}" "tofu -chdir=${REPO_NAME}/terraform/infra workspace new ${CF_SPACE} || exit 0" 1 + - name: Terraform Validate + env: + TF_BASTION: "${{ secrets.TF_BASTION }}" + id: validate + run: | + ./scripts/pipeline/cloud-gov-remote-command.sh ${TF_BASTION} "TF_WORKSPACE=${CF_SPACE} tofu -chdir=${REPO_NAME}/terraform/infra validate -no-color" 1 + - name: Terraform Plan + env: + TF_BASTION: "${{ secrets.TF_BASTION }}" + id: plan + run: | + ./scripts/pipeline/cloud-gov-remote-command.sh "${TF_BASTION}" "TF_WORKSPACE=${CF_SPACE} tofu -chdir=${REPO_NAME}/terraform/infra plan -no-color" 1 + - name: Terraform Apply + env: + TF_BASTION: "${{ secrets.TF_BASTION }}" + id: apply + run: | + ./scripts/pipeline/cloud-gov-remote-command.sh "${TF_BASTION}" "TF_WORKSPACE=${CF_SPACE} tofu -chdir=${REPO_NAME}/terraform/infra apply -auto-approve" 1 + stopBastion: + name: Stop Bastion + runs-on: ubuntu-latest + needs: updateInfrastructure + if: ${{ always() }} + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Set env.BRANCH + run: echo "BRANCH=dev" >> $GITHUB_ENV + - name: Install basic dependancies + run: ./scripts/pipeline/deb-basic-deps.sh + - name: Install Cloudfoundry CLI + run: ./scripts/pipeline/deb-cf-install.sh + - name: Cloud.gov login + env: + CF_USER: "${{ secrets.CF_USER }}" + CF_PASSWORD: "${{ secrets.CF_PASSWORD }}" + CF_ORG: "${{ secrets.CF_ORG }}" + PROJECT: "${{ secrets.PROJECT }}" + TF_BASTION: "${{ secrets.TF_BASTION }}" + TF_BACKEND_SPACE: "${{ secrets.TF_BACKEND_SPACE }}" + run: | + source ./scripts/pipeline/cloud-gov-login.sh + cf target -s "${TF_BACKEND_SPACE}" >/dev/null 2>&1 + - name: Stop Bastion + env: + TF_BASTION: "${{ secrets.TF_BASTION }}" + run: cf stop "${TF_BASTION}" >/dev/null 2>&1 diff --git a/.phpcs.xml.dist b/.phpcs.xml.dist new file mode 100644 index 00000000..a33f7b85 --- /dev/null +++ b/.phpcs.xml.dist @@ -0,0 +1,62 @@ + + + + PHP_CodeSniffer standards overrides. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + web/modules/custom + web/themes/custom + + */vendor/* + */node_modules/* + .github/ + web/modules/custom/tome + web/modules/custom/samlauth + + diff --git a/apt.yml b/apt.yml new file mode 100644 index 00000000..51a1af4b --- /dev/null +++ b/apt.yml @@ -0,0 +1,10 @@ +--- +#keys: +# - https://download.newrelic.com/548C16BF.gpg +#repos: + #- deb http://apt.newrelic.com/debian/ newrelic non-free +packages: + - awscli + - bsdmainutils + #- newrelic-php5 + - nano \ No newline at end of file diff --git a/manifest.yml b/manifest.yml new file mode 100644 index 00000000..ce518ceb --- /dev/null +++ b/manifest.yml @@ -0,0 +1,28 @@ +--- +default_config: &defaults + buildpacks: + - https://github.com/cloudfoundry/apt-buildpack + - php_buildpack + disk_quota: 2G + env: + COMPOSER_NO_DEV: ${COMPOSER_NO_DEV} + environment: ${CF_SPACE} + LD_LIBRARY_PATH: /home/vcap/app/php/lib/ + PHP_INI_SCAN_DIR: /home/vcap/app/php/etc/:/home/vcap/app/php/etc/php.ini.d/ + waf_external_endpoint: ${PROJECT}-${WAF_NAME}-${CF_SPACE}.app.cloud.gov + timeout: 180 + routes: + - route: ${PROJECT}-drupal-${CF_SPACE}.apps.internal + protocol: http2 + services: + - ${PROJECT}-mysql-${CF_SPACE} + - ${PROJECT}-static-${CF_SPACE} + - ${PROJECT}-storage-${CF_SPACE} + +applications: +- name: ${PROJECT}-drupal-${CF_SPACE} + <<: *defaults + memory: ${DRUPAL_MEMORY} + stack: cflinuxfs4 + instances: ${DRUPAL_INSTANCES} + random-route: false diff --git a/scripts/bash_exports.sh b/scripts/bash_exports.sh new file mode 100755 index 00000000..d56d71f8 --- /dev/null +++ b/scripts/bash_exports.sh @@ -0,0 +1,39 @@ +#! /bin/bash + +export SECRETS=$(echo $VCAP_SERVICES | jq -r '.["user-provided"][] | select(.name == "secrets") | .credentials') +export SECAUTHSECRETS=$(echo $VCAP_SERVICES | jq -r '.["user-provided"][] | select(.name == "secauthsecrets") | .credentials') + +export APP_NAME=$(echo $VCAP_APPLICATION | jq -r '.name') +export APP_ROOT=$(dirname "$0") +export APP_ID=$(echo "$VCAP_APPLICATION" | jq -r '.application_id') + +export DB_NAME=$(echo $VCAP_SERVICES | jq -r '.["aws-rds"][] | .credentials.db_name') +export DB_USER=$(echo $VCAP_SERVICES | jq -r '.["aws-rds"][] | .credentials.username') +export DB_PW=$(echo $VCAP_SERVICES | jq -r '.["aws-rds"][] | .credentials.password') +export DB_HOST=$(echo $VCAP_SERVICES | jq -r '.["aws-rds"][] | .credentials.host') +export DB_PORT=$(echo $VCAP_SERVICES | jq -r '.["aws-rds"][] | .credentials.port') + +export ADMIN_EMAIL=$(echo $SECRETS | jq -r '.ADMIN_EMAIL') + +export ENV=$(echo "$VCAP_APPLICATION" | jq -r '.space_name' | rev | cut -d- -f1 | rev) + +export S3_BUCKET=$(echo "$VCAP_SERVICES" | jq -r '.["s3"][]? | select(.name == "storage") | .credentials.bucket') +export S3_ENDPOINT=$(echo "$VCAP_SERVICES" | jq -r '.["s3"][]? | select(.name == "storage") | .credentials.fips_endpoint') + +export SPACE=$(echo $VCAP_APPLICATION | jq -r '.["space_name"]') +export WWW_HOST=${WWW_HOST:-$(echo $VCAP_APPLICATION | jq -r '.["application_uris"][]' | grep 'beta\|www' | tr '\n' ' ')} +export CMS_HOST=${CMS_HOST:-$(echo $VCAP_APPLICATION | jq -r '.["application_uris"][]' | grep cms | tr '\n' ' ')} +if [ -z "$WWW_HOST" ]; then + export WWW_HOST="*.app.cloud.gov" +elif [ -z "$CMS_HOST" ]; then + export CMS_HOST=$(echo $VCAP_APPLICATION | jq -r '.["application_uris"][]' | head -n 1) +fi + +export S3_ROOT_WEB=${S3_ROOT_WEB:-/web} +export S3_ROOT_CMS=${S3_ROOT_CMS:-/cms/public} +export S3_HOST=${S3_HOST:-$S3_BUCKET.$S3_ENDPOINT} +export S3_PROXY_WEB=${S3_PROXY_WEB:-$S3_HOST$S3_ROOT_WEB} +export S3_PROXY_CMS=${S3_PROXY_CMS:-$S3_HOST$S3_ROOT_CMS} +export S3_PROXY_PATH_CMS=${S3_PROXY_PATH_CMS:-/s3/files} + +export DNS_SERVER=${DNS_SERVER:-$(grep -i '^nameserver' /etc/resolv.conf|head -n1|cut -d ' ' -f2)} diff --git a/scripts/bootstrap.sh b/scripts/bootstrap.sh new file mode 100755 index 00000000..e0a86d2d --- /dev/null +++ b/scripts/bootstrap.sh @@ -0,0 +1,76 @@ +#!/bin/bash +set -uo pipefail + +## Export proxy servers. +export http_proxy=$(echo ${VCAP_SERVICES} | jq -r '."user-provided"[].credentials.proxy_uri') +export https_proxy=$(echo ${VCAP_SERVICES} | jq -r '."user-provided"[].credentials.proxy_uri') + +export home="/home/vcap" +export app_path="${home}/app" +export apt_path="${home}/deps/0/apt" + +echo $VCAP_SERVICES | jq -r '."user-provided"[].credentials.ca_certificate' | base64 -d > ${app_path}/ca_certificate.pem +echo $VCAP_SERVICES | jq -r '."user-provided"[].credentials.ca_key' | base64 -d > ${app_path}/ca_key.pem + +chmod 600 ${app_path}/ca_certificate.pem +chmod 600 ${app_path}/ca_key.pem + +if [ -z "${VCAP_SERVICES:-}" ]; then + echo "VCAP_SERVICES must a be set in the environment: aborting bootstrap"; + exit 1; +fi + +## NewRelic configuration +export newrelic_apt="${apt_path}/usr/lib/newrelic-php5" +export newrelic_app="${app_path}/newrelic/" + +rm -rf ${newrelic_app}/agent +ln -s ${newrelic_apt}/agent ${newrelic_app}/agent + +rm -f ${newrelic_app}/daemon/newrelic-daemon.x64 +ln -s ${apt_path}/usr/bin/newrelic-daemon ${newrelic_app}/daemon/newrelic-daemon.x64 + +rm -f ${app_path}/newrelic/scripts/newrelic-iutil.x64 +ln -s ${newrelic_apt}/scripts/newrelic-iutil.x64 ${newrelic_app}/scripts/newrelic-iutil.x64 + +echo 'newrelic.daemon.collector_host=gov-collector.newrelic.com' >> ${app_path}/php/etc/php.ini + +source ${app_path}/scripts/bash_exports.sh + +if [ ! -f ./container_start_timestamp ]; then + touch ./container_start_timestamp + chmod a+r ./container_start_timestamp + echo "$(date +'%s')" > ./container_start_timestamp +fi + +dirs=( "${HOME}/private" "${HOME}/web/sites/default/files" ) + +for dir in $dirs; do + if [ ! -d $dir ]; then + echo "Creating ${dir} directory ... " + mkdir $dir + chown vcap. $dir + fi +done + +## Updated ~/.bashrc to update $PATH when someone logs in. +[ -z $(cat ${home}/.bashrc | grep PATH) ] && \ + touch ${home}/.bashrc && \ + echo "export http_proxy=${http_proxy}" >> ${home}/.bashrc && \ + echo "export https_proxy=${https_proxy}" >> ${home}/.bashrc && \ + echo "alias nano=\"${home}/deps/0/apt/bin/nano\"" >> ${home}/.bashrc && \ + echo "PATH=$PATH:/home/vcap/app/php/bin:/home/vcap/app/vendor/drush/drush" >> /home/vcap/.bashrc + +source ${home}/.bashrc + +echo "Installing awscli..." +{ + curl -S "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "/tmp/awscliv2.zip" + unzip -qq /tmp/awscliv2.zip -d /tmp/ + /tmp/aws/install --bin-dir ${home}/deps/0/bin --install-dir ${home}/deps/0/usr/local/aws-cli + rm -rf /tmp/awscliv2.zip /tmp/aws +} >/dev/null 2>&1 + +# if [ "${CF_INSTANCE_INDEX:-''}" == "0" ]; then +# ${app_path}/scripts/post-deploy +# fi \ No newline at end of file diff --git a/scripts/download_backup.sh b/scripts/download_backup.sh new file mode 100755 index 00000000..e37b7132 --- /dev/null +++ b/scripts/download_backup.sh @@ -0,0 +1,113 @@ +#!/bin/bash + +set -e + +if [ "$(uname -s)" = "Darwin" ]; then + if ! hash brew 2>/dev/null ; then + echo "Please install Homebrew: + /bin/bash -c \"\$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)\"" + echo + echo "NOTE: You will need sudoer permission." + echo "Linux: https://linuxize.com/post/how-to-add-user-to-sudoers-in-ubuntu/" + echo "MacOS: https://osxdaily.com/2014/02/06/add-user-sudoers-file-mac/" + exit 1 + fi + + if ! hash gdate 2>/dev/null ; then + echo "Please install GNU coreutils: + Homebrew: + brew install coreutils" + exit 1 + fi +fi + +if ! hash cf 2>/dev/null ; then + echo "Please install cf version 8: + Linux: https://docs.cloudfoundry.org/cf-cli/install-go-cli.html + Homebrew: + brew tap cloudfoundry/tap + brew install cf-cli@8" + exit 1 +elif [[ "$(cf --version)" != *"cf version 8."* ]]; then + echo "Please install cf version 8: + Linux: https://docs.cloudfoundry.org/cf-cli/install-go-cli.html + Homebrew: + brew uninstall cf-cli + brew tap cloudfoundry/tap + brew install cf-cli@8" + exit 1 +fi + +if ! hash jq 2>/dev/null ; then + echo "Please install jq: + Linux: https://jqlang.github.io/jq/download/ + Homebrew: + brew install jq" + exit 1 +fi + +# change which date command is used based on host OS +date_command='' + +if [ "$(uname -s)" == "Darwin" ]; then + date_command=gdate +else + date_command=date +fi + +help(){ + echo "Usage: $0 [options]" >&2 + echo + echo " -b The name of the S3 bucket with the backup." + echo " -e Environment of backup to download." + echo " -s Name of the space the backup bucket is in." + echo " -d Date to retrieve backup from. Acceptable values + are 'latest' or in 'YYYY-MM-DD' format and no + more than 15 days ago." +} + +RED='\033[0;31m' +NC='\033[0m' + +while getopts 'b:e:s:d:' flag; do + case ${flag} in + b) backup_bucket=${OPTARG} ;; + e) env=${OPTARG} ;; + s) space=${OPTARG} ;; + d) retrieve_date=${OPTARG} ;; + *) help && exit 1 ;; + esac +done + +[[ -z "${backup_bucket}" ]] && help && echo -e "\n${RED}Error: Missing -b flag.${NC}" && exit 1 +[[ -z "${env}" ]] && help && echo -e "\n${RED}Error: Missing -e flag.${NC}" && exit 1 +[[ -z "${space}" ]] && help && echo -e "\n${RED}Error: Missing -s flag.${NC}" && exit 1 +[[ -z "${retrieve_date}" ]] && help && echo -e "\n${RED}Error: Missing -d flag.${NC}" && exit 1 + +echo "Getting backup bucket credentials..." +{ + cf target -s "${space}" + + export service="${backup_bucket}" + export service_key="${service}-key" + cf delete-service-key "${service}" "${service_key}" -f + cf create-service-key "${service}" "${service_key}" + sleep 2 + export s3_credentials=$(cf service-key "${service}" "${service_key}" | tail -n +2) + + export AWS_ACCESS_KEY_ID=$(echo "${s3_credentials}" | jq -r '.credentials.access_key_id') + export bucket=$(echo "${s3_credentials}" | jq -r '.credentials.bucket') + export AWS_DEFAULT_REGION=$(echo "${s3_credentials}" | jq -r '.credentials.region') + export AWS_SECRET_ACCESS_KEY=$(echo "${s3_credentials}" | jq -r '.credentials.secret_access_key') + +} >/dev/null 2>&1 + +echo "Downloading backup..." +{ + + aws s3 cp s3://${bucket}/${env}/${retrieve_date}.tar.gz . --no-verify-ssl 2>/dev/null + cf delete-service-key "${service}" "${service_key}" -f + +} >/dev/null 2>&1 + +echo "File saved: ${retrieve_date}.tar.gz" diff --git a/scripts/entrypoint b/scripts/entrypoint new file mode 100755 index 00000000..64ce35fb --- /dev/null +++ b/scripts/entrypoint @@ -0,0 +1,4 @@ +#!/bin/bash + +## Simple script to hold the container open. +while : ; do sleep 60m ; done \ No newline at end of file diff --git a/scripts/pipeline/app-backup.sh b/scripts/pipeline/app-backup.sh new file mode 100755 index 00000000..79ecdb3e --- /dev/null +++ b/scripts/pipeline/app-backup.sh @@ -0,0 +1,146 @@ +#!/bin/bash + +kill_pids() { + app=$1 + ids=$(ps aux | grep ${app} | grep -v grep | awk '{print $2}') + for id in ${ids}; do + kill -9 ${id} >/dev/null 2>&1 + done +} + +## Wait for the tunnel to finish connecting. +wait_for_tunnel() { + while : ; do + [ -n "$(grep 'Press Control-C to stop.' backup.txt)" ] && break + echo "Waiting for tunnel..." + sleep 1 + done +} + +date + +## Create a tunnel through the application to pull the database. +echo "Creating tunnel to database..." +if [[ ${BACKUP_ENV} = "prod" ]]; then + cf enable-ssh ${project}-drupal-${BACKUP_ENV} + cf restart --strategy rolling ${project}-drupal-${BACKUP_ENV} +fi +cf connect-to-service --no-client ${project}-drupal-${BACKUP_ENV} ${project}-mysql-${BACKUP_ENV} > backup.txt & + +wait_for_tunnel + +date + +## Create variables and credential file for MySQL login. +echo "Backing up '${BACKUP_ENV}' database..." +{ + host=$(cat backup.txt | grep -i host | awk '{print $2}') + port=$(cat backup.txt | grep -i port | awk '{print $2}') + username=$(cat backup.txt | grep -i username | awk '{print $2}') + password=$(cat backup.txt | grep -i password | awk '{print $2}') + dbname=$(cat backup.txt | grep -i '^name' | awk '{print $2}') + + mkdir ~/.mysql && chmod 0700 ~/.mysql + + echo "[mysqldump]" > ~/.mysql/mysqldump.cnf + echo "user=${username}" >> ~/.mysql/mysqldump.cnf + echo "password=${password}" >> ~/.mysql/mysqldump.cnf + chmod 400 ~/.mysql/mysqldump.cnf + + ## Exclude tables without data + declare -a excluded_tables=( + "cache_advagg_minify" + "cache_bootstrap" + "cache_config" + "cache_container" + "cache_data" + "cache_default" + "cache_discovery" + "cache_discovery_migration" + "cache_dynamic_page_cache" + "cache_entity" + "cache_menu" + "cache_migrate" + "cache_page" + "cache_render" + "cache_rest" + "cache_toolbar" + "sessions" + "watchdog" + "webprofiler" + ) + + ignored_tables_string='' + for table in "${excluded_tables[@]}" + do + ignored_tables_string+=" --ignore-table=${dbname}.${table}" + done + + ## Dump structure + mysqldump \ + --defaults-extra-file=~/.mysql/mysqldump.cnf \ + --host=${host} \ + --port=${port} \ + --protocol=TCP \ + --no-data \ + ${dbname} > backup_${BACKUP_ENV}.sql + + ## Dump content + mysqldump \ + --defaults-extra-file=~/.mysql/mysqldump.cnf \ + --host=${host} \ + --port=${port} \ + --protocol=TCP \ + --no-create-info \ + --skip-triggers \ + ${ignored_tables_string} \ + ${dbname} >> backup_${BACKUP_ENV}.sql + + ## Patch out any MySQL 'SET' commands that require admin. + sed -i 's/^SET /-- &/' backup_${BACKUP_ENV}.sql + +} >/dev/null 2>&1 + +date + +## Kill the backgrounded SSH tunnel. +echo "Cleaning up old connections..." +{ + kill_pids "connect-to-service" +} >/dev/null 2>&1 + +## Clean up. +if [ ${BACKUP_ENV} = "prod" ]; then + cf disable-ssh ${project}-drupal-${BACKUP_ENV} +fi +rm -rf backup.txt ~/.mysql + +date + +# Download media files. +backup_media="cms/public/media" + +echo "Downloading media files..." +{ + cf target -s "${cf_space}" + + service="${project}-storage-${BACKUP_ENV}" + service_key="${service}-key" + cf delete-service-key "${service}" "${service_key}" -f + cf create-service-key "${service}" "${service_key}" + sleep 2 + s3_credentials=$(cf service-key "${service}" "${service_key}" | tail -n +2) + + export AWS_ACCESS_KEY_ID=$(echo "${s3_credentials}" | jq -r '.credentials.access_key_id') + export bucket=$(echo "${s3_credentials}" | jq -r '.credentials.bucket') + export AWS_DEFAULT_REGION=$(echo "${s3_credentials}" | jq -r '.credentials.region') + export AWS_SECRET_ACCESS_KEY=$(echo "${s3_credentials}" | jq -r '.credentials.secret_access_key') + + rm -rf ${backup_media} + mkdir -p ${backup_media} + aws s3 sync --no-verify-ssl s3://${bucket}/${backup_media} ${backup_media}/ 2>/dev/null + + cf delete-service-key "${service}" "${service_key}" -f +} >/dev/null 2>&1 + +date diff --git a/scripts/pipeline/build-theme.sh b/scripts/pipeline/build-theme.sh new file mode 100755 index 00000000..8858a1ff --- /dev/null +++ b/scripts/pipeline/build-theme.sh @@ -0,0 +1,6 @@ +#!/bin/bash + +npm install -g gulp +cd web/themes/custom/digitalgov +npm install +npm run build diff --git a/scripts/pipeline/cloud-gov-deploy.sh b/scripts/pipeline/cloud-gov-deploy.sh new file mode 100755 index 00000000..3cd6410f --- /dev/null +++ b/scripts/pipeline/cloud-gov-deploy.sh @@ -0,0 +1,9 @@ +#!/bin/bash + +mv manifest.yml manifest.tmp +envsubst < manifest.tmp > manifest.yml + +cf push --strategy rolling + +cf add-network-policy "${PROJECT}-${APP_NAME}-${BRANCH}" "${PROJECT}-${WAF_NAME}-${BRANCH}" -s "${CF_SPACE}" --protocol tcp --port ${BUILDPACK_PORT} +cf add-network-policy "${PROJECT}-${WAF_NAME}-${BRANCH}" "${PROJECT}-${APP_NAME}-${BRANCH}" -s "${CF_SPACE}" --protocol tcp --port ${BUILDPACK_PORT} diff --git a/scripts/pipeline/cloud-gov-login.sh b/scripts/pipeline/cloud-gov-login.sh new file mode 100755 index 00000000..4ee5a5a1 --- /dev/null +++ b/scripts/pipeline/cloud-gov-login.sh @@ -0,0 +1,15 @@ +#!/bin/bash + +echo "Logging into Cloud.gov..." +{ + cf login \ + -a https://api.fr.cloud.gov \ + -u "${CF_USER}" \ + -p "${CF_PASSWORD}" \ + -o "${CF_ORG}" \ + -s "${CF_SPACE}" > login.log || login_error=1 +} >/dev/null 2>&1 + +[ -n "${login_error}" ] && echo "Error logging into Cloud.gov!" && exit 1 + +echo "Login successful!" \ No newline at end of file diff --git a/scripts/pipeline/cloud-gov-post-deploy-upkeep.sh b/scripts/pipeline/cloud-gov-post-deploy-upkeep.sh new file mode 100755 index 00000000..35409a3a --- /dev/null +++ b/scripts/pipeline/cloud-gov-post-deploy-upkeep.sh @@ -0,0 +1,24 @@ +#!/bin/bash + +set -e + +# Enable SSH if in prod +if [[ ${CIRCLE_BRANCH} = "prod" ]]; then + cf enable-ssh ${project}-drupal-${CIRCLE_BRANCH} + cf restart --strategy rolling ${project}-drupal-${CIRCLE_BRANCH} + + # Wait until drupal app is running + until cf app ${project}-drupal-${CIRCLE_BRANCH} | grep running + do + sleep 1 + done + +fi + +echo "Running upkeep..." +cf ssh ${project}-drupal-${CIRCLE_BRANCH} --command "ENV=${CIRCLE_BRANCH} PATH=/home/vcap/deps/1/bin:/home/vcap/deps/0/bin:/usr/local/bin:/usr/bin:/bin:/home/vcap/app/php/bin:/home/vcap/app/php/sbin:/home/vcap/app/php/bin:/home/vcap/app/vendor/drush/drush app/scripts/upkeep >/dev/null 2>&1 && echo 'Successfully completed upkeep!' || echo 'Failed to complete upkeep!'" + +## Clean up. +if [[ ${CIRCLE_BRANCH} = "prod" ]]; then + cf disable-ssh ${project}-drupal-${CIRCLE_BRANCH} +fi diff --git a/scripts/pipeline/cloud-gov-post-deploy.sh b/scripts/pipeline/cloud-gov-post-deploy.sh new file mode 100755 index 00000000..19bb7eb4 --- /dev/null +++ b/scripts/pipeline/cloud-gov-post-deploy.sh @@ -0,0 +1,30 @@ +#!/bin/bash + +set -e + +# Wait until drupal app is running +until cf app "${PROJECT}-drupal-${CF_SPACE}" | grep running +do + sleep 1 +done + +# Enable SSH if in prod +if [[ ${CF_SPACE} = "prod" ]]; then + cf enable-ssh "${PROJECT}-drupal-${CF_SPACE}" + cf restart --strategy rolling "${PROJECT}-drupal-${CF_SPACE}" + + # Wait until drupal app is running + until cf app "${PROJECT}-drupal-${CF_SPACE}" | grep running + do + sleep 1 + done + +fi + +echo "Running post deploy steps..." +cf ssh "${PROJECT}-drupal-${CF_SPACE}" --command "PATH=/home/vcap/deps/1/bin:/home/vcap/deps/0/bin:/usr/local/bin:/usr/bin:/bin:/home/vcap/app/php/bin:/home/vcap/app/php/sbin:/home/vcap/app/php/bin:/home/vcap/app/vendor/drush/drush app/scripts/post-deploy >/dev/null 2>&1 && echo 'Successfully completed post deploy!' || echo 'Failed to complete post deploy!'" + +## Clean up. +if [[ ${CF_SPACE} = "prod" ]]; then + cf disable-ssh "${PROJECT}-drupal-${CF_SPACE}" +fi diff --git a/scripts/pipeline/cloud-gov-remote-command.sh b/scripts/pipeline/cloud-gov-remote-command.sh new file mode 100755 index 00000000..dffe3082 --- /dev/null +++ b/scripts/pipeline/cloud-gov-remote-command.sh @@ -0,0 +1,35 @@ +#!/bin/bash + +APP_NAME=$1 +command=$2 +show_output=$3 + +APP_GUID=$(cf app "${APP_NAME}" --guid) +bin_path="/var/www/vendor/bin/:/home/vcap/deps/0/bin/" + +[ -z "${APP_NAME}" ] || [ -z "${command}" ] && echo "Command error! Valid format: ${0} " && exit 1 + +ssh_config=/tmp/ssh_config +ssh_passwd="/tmp/ssh_password" + +precommand="touch ~/exports.sh && source ~/exports.sh && PATH=\$PATH:${bin_path}" + +cat >${ssh_config} < ${ssh_passwd} + +if [ -z "${show_output}" ]; then + echo "Running command: '$(echo "${command}" | cut -d' ' -f1,2)'..." + { + sshpass -f "${ssh_passwd}" ssh -F "${ssh_config}" "ssh.fr.cloud.gov" "${precommand} ${command}" + } >/dev/null 2>&1 +else + sshpass -f "${ssh_passwd}" ssh -F "${ssh_config}" "ssh.fr.cloud.gov" "${precommand} ${command}" +fi \ No newline at end of file diff --git a/scripts/pipeline/cloud-gov-scp-file.sh b/scripts/pipeline/cloud-gov-scp-file.sh new file mode 100755 index 00000000..9aa62f1c --- /dev/null +++ b/scripts/pipeline/cloud-gov-scp-file.sh @@ -0,0 +1,30 @@ +#!/bin/bash + +APP_NAME=$1 +LOCAL=$2 +REMOTE=$3 + +APP_GUID=$(cf app "${APP_NAME}" --guid) + +ssh_config=/tmp/ssh_config +ssh_passwd="/tmp/ssh_password" + +cat >${ssh_config} < ${ssh_passwd} + +if [ -d "${LOCAL}" ]; then + echo "Uploading folder '${LOCAL}'..." + sshpass -f "${ssh_passwd}" scp -F ${ssh_config} -r "${LOCAL}" "ssh.fr.cloud.gov:${REMOTE}" +else + echo "Uploading file '${LOCAL}'..." + sshpass -f "${ssh_passwd}" scp -F ${ssh_config} "${LOCAL}" "ssh.fr.cloud.gov:${REMOTE}" +fi + +echo "Upload complete." \ No newline at end of file diff --git a/scripts/pipeline/cloud-gov-waf-version.sh b/scripts/pipeline/cloud-gov-waf-version.sh new file mode 100755 index 00000000..68d4bb2e --- /dev/null +++ b/scripts/pipeline/cloud-gov-waf-version.sh @@ -0,0 +1,59 @@ +#!/bin/bash + +function version { + echo "$@" | awk -F. '{ printf("%d%03d%03d%03d\n", $1,$2,$3,$4); }'; +} + +if [ -z "$(which pup)" ] ; then + if [ "$(whoami)" != "root" ] ; then + sudo wget -q --show-progress https://github.com/ericchiang/pup/releases/download/v0.4.0/pup_v0.4.0_linux_amd64.zip + sudo unzip pup_v0.4.0_linux_amd64.zip -d /usr/local/bin + else + wget -q --show-progress https://github.com/ericchiang/pup/releases/download/v0.4.0/pup_v0.4.0_linux_amd64.zip + unzip pup_v0.4.0_linux_amd64.zip -d /usr/local/bin + fi +fi + +declare CURRENT_BP_VERSION +CURRENT_BP_VERSION=$(cf app "${PROJECT}-waf-${BRANCH}" | grep nginx_buildpack | xargs | awk '{print $2}') + +declare NEW_BP_VERSION +NEW_BP_VERSION=$(cf buildpacks | grep nginx | grep cflinuxfs4 | awk '{print $NF}' | grep -Eo '[0-9]\.[0-9]?(.[0-9]+)') + +new_bp_version=$(version "${NEW_BP_VERSION}") +current_bp_version=$(version "${CURRENT_BP_VERSION}") + +curl -Ls "https://github.com/cloudfoundry/nginx-buildpack/releases/tag/v${CURRENT_BP_VERSION}" > /tmp/current_bp_version +declare current_nginx_version +current_nginx_version=$(cat /tmp/current_bp_version | pup 'table json{}' | jq -r '.[].children[].children[] | select(.children[].text == "nginx") | select(.children[].text == "cflinuxfs4") | .children[].text' | tr '\n' ' ' | sed -E 's/cflinuxfs4 /cflinuxfs4\n/g' | sort -r | head -n 1 | awk '{print $2}') + +curl -Ls "https://github.com/cloudfoundry/nginx-buildpack/releases/tag/v${NEW_BP_VERSION}" > /tmp/new_nginx_version +declare default_nginx_binary_version +default_nginx_binary_version=$(cat /tmp/new_nginx_version | pup 'table json{}' | jq -r '.[].children[].children[] | select(.children[].text == "nginx") | select(.children[].text | contains(".x")) | .children[].text' | grep -v nginx | sed 's/.\{1\}$//') + +declare new_nginx_version +new_nginx_version=$(cat /tmp/new_nginx_version | pup 'table json{}' | jq -r ".[].children[].children[] | select(.children[].text == \"nginx\") | select(.children[].text == \"cflinuxfs4\") | select(.children[].text | contains(\"${default_nginx_binary_version}\")) | .children[].text" | tr '\n' ' ' | sed -E 's/cflinuxfs4 /cflinuxfs4\n/g' | sort -r | head -n 1 | awk '{print $2}') + + +echo "new_nginx_version=${new_nginx_version}" | tee -a "${GITHUB_OUTPUT}" +echo "current_nginx_version=${current_nginx_version}" | tee -a "${GITHUB_OUTPUT}" +echo "current_bp_version=${CURRENT_BP_VERSION}" | tee -a "${GITHUB_OUTPUT}" +echo "new_bp_version=${NEW_BP_VERSION}" | tee -a "${GITHUB_OUTPUT}" + +export new_nginx_version=${new_nginx_version} +export current_nginx_version=${current_nginx_version} +export current_bp_version=${CURRENT_BP_VERSION} +export new_bp_version=${NEW_BP_VERSION} + +if [ "${new_bp_version}" -ne "${current_bp_version}" ]; then + echo "New version of buildpack found!" + echo "update=true" | tee -a "${GITHUB_OUTPUT}" + export update=true +else + echo "Running latest version of the buildpack!" +fi + +echo "new_nginx_version=${new_nginx_version}" +echo "current_nginx_version=${current_nginx_version}" +echo "current_bp_version=${CURRENT_BP_VERSION}" +echo "new_bp_version=${NEW_BP_VERSION}" diff --git a/scripts/pipeline/cloud-gov-wait-for-app-start.sh b/scripts/pipeline/cloud-gov-wait-for-app-start.sh new file mode 100755 index 00000000..176a7f81 --- /dev/null +++ b/scripts/pipeline/cloud-gov-wait-for-app-start.sh @@ -0,0 +1,13 @@ +#!/bin/bash + +app_name=$1 + +while : ; do + app_status=$( cf app "${app_name}" | grep "#0" | awk '{print $2}' ) + if [ "${app_status}" == "running" ]; then + break + else + echo "waiting for application to start..." + sleep 2 + fi +done \ No newline at end of file diff --git a/scripts/pipeline/composer-install.sh b/scripts/pipeline/composer-install.sh new file mode 100755 index 00000000..a333b607 --- /dev/null +++ b/scripts/pipeline/composer-install.sh @@ -0,0 +1,26 @@ +#!/bin/bash + +## To work for rootless and root images. +echo "Installing composer..." +{ + EXPECTED_CHECKSUM="$(php -r 'copy("https://composer.github.io/installer.sig", "php://stdout");')" + php -r "copy('https://getcomposer.org/installer', 'composer-setup.php');" + ACTUAL_CHECKSUM="$(php -r "echo hash_file('sha384', 'composer-setup.php');")" + if [ "$EXPECTED_CHECKSUM" != "$ACTUAL_CHECKSUM" ]; then + >&2 echo 'ERROR: Invalid installer checksum' + rm composer-setup.php + exit 1 + fi + + php composer-setup.php --quiet + RESULT=$? + rm composer-setup.php + + chmod +x composer.phar + if [ "$(whoami)" != "root" ]; then + sudo mv composer.phar /usr/local/bin/composer + else + mv composer.phar /usr/local/bin/composer + fi + exit $RESULT +} >/dev/null 2>&1 \ No newline at end of file diff --git a/scripts/pipeline/deb-awscli.sh b/scripts/pipeline/deb-awscli.sh new file mode 100755 index 00000000..43a9cf3c --- /dev/null +++ b/scripts/pipeline/deb-awscli.sh @@ -0,0 +1,17 @@ +#!/bin/bash + +## To work for rootless and root images. +echo "Installing AWS CLI..." +{ + if [ "$(whoami)" != "root" ]; then + curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip" + unzip awscliv2.zip + sudo ./aws/install --bin-dir /usr/local/bin --install-dir /usr/local/aws-cli --update + else + curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip" + unzip awscliv2.zip + ./aws/install --bin-dir /usr/local/bin --install-dir /usr/local/aws-cli --update + fi + +} >/dev/null 2>&1 +echo "Done!" diff --git a/scripts/pipeline/deb-basic-deps.sh b/scripts/pipeline/deb-basic-deps.sh new file mode 100755 index 00000000..bd01db0b --- /dev/null +++ b/scripts/pipeline/deb-basic-deps.sh @@ -0,0 +1,13 @@ +#!/bin/bash + +## To work for rootless and root images. +echo "Installing basic dependencies..." +{ + if [ "$(whoami)" != "root" ]; then + sudo apt-get update + sudo apt-get install -y curl gettext + else + apt-get update + apt-get install -y curl gettext + fi +} >/dev/null 2>&1 diff --git a/scripts/pipeline/deb-cf-install.sh b/scripts/pipeline/deb-cf-install.sh new file mode 100755 index 00000000..e8802512 --- /dev/null +++ b/scripts/pipeline/deb-cf-install.sh @@ -0,0 +1,11 @@ +#!/bin/bash + +echo "Installing CloudFoundry repository..." +{ + curl -L "https://packages.cloudfoundry.org/stable?release=linux64-binary&version=v8&source=github" | tar -zx + if [ "$(whoami)" != "root" ]; then + sudo mv cf cf8 /usr/local/bin + else + mv cf cf8 /usr/local/bin + fi +} >/dev/null 2>&1 diff --git a/scripts/pipeline/deb-mysql-client-install.sh b/scripts/pipeline/deb-mysql-client-install.sh new file mode 100755 index 00000000..57e68efa --- /dev/null +++ b/scripts/pipeline/deb-mysql-client-install.sh @@ -0,0 +1,13 @@ +#!/bin/bash + +echo "Installing MySQL client..." +{ + ## To work for rootless and root images. + if [ "$(whoami)" != "root" ]; then + sudo apt-get update + sudo apt-get install -y mysql-client-8.0 + else + apt-get update + apt-get install -y mysql-client-8.0 + fi +} >/dev/null 2>&1 diff --git a/scripts/pipeline/deb-php-install.sh b/scripts/pipeline/deb-php-install.sh new file mode 100755 index 00000000..a20319ab --- /dev/null +++ b/scripts/pipeline/deb-php-install.sh @@ -0,0 +1,13 @@ +#!/bin/bash + +## To work for rootless and root images. +echo "Installing PHP CLI..." +{ + if [ "$(whoami)" != "root" ]; then + sudo apt-get update + sudo apt-get install -y php-cli + else + apt-get update + apt-get install -y php-cli + fi +} >/dev/null 2>&1 \ No newline at end of file diff --git a/scripts/pipeline/downsync-preprod.sh b/scripts/pipeline/downsync-preprod.sh new file mode 100755 index 00000000..eb4d6451 --- /dev/null +++ b/scripts/pipeline/downsync-preprod.sh @@ -0,0 +1,112 @@ +#!/bin/bash +if [ ${RESTORE_ENV} = 'prod' ]; then + echo "Restoring to prod is not allowed." + exit 1 +fi + +kill_pids() { + app=$1 + ids=$(ps aux | grep ${app} | grep -v grep | awk '{print $2}') + for id in ${ids}; do + kill -9 ${id} >/dev/null 2>&1 + done +} + +## Wait for the tunnel to finish connecting. +wait_for_tunnel() { + while : ; do + [ -n "$(grep 'Press Control-C to stop.' restore.txt)" ] && break + echo "Waiting for tunnel..." + sleep 1 + done +} + +date + +## Download latest prod backup. +echo "Downloading latest prod database backup..." +{ + cf target -s "${project}-prod" >/dev/null 2>&1 + + export service="${project}-backup" + export service_key="${service}-key" + cf delete-service-key "${service}" "${service_key}" -f >/dev/null 2>&1 + cf create-service-key "${service}" "${service_key}" >/dev/null 2>&1 + sleep 2 + + export s3_credentials=$(cf service-key "${service}" "${service_key}" | tail -n +2) + + export AWS_ACCESS_KEY_ID=$(echo "${s3_credentials}" | jq -r '.credentials.access_key_id') + export bucket=$(echo "${s3_credentials}" | jq -r '.credentials.bucket') + export AWS_DEFAULT_REGION=$(echo "${s3_credentials}" | jq -r '.credentials.region') + export AWS_SECRET_ACCESS_KEY=$(echo "${s3_credentials}" | jq -r '.credentials.secret_access_key') + + # copy latest database from top level + aws s3 cp s3://${bucket}/prod/latest.sql.gz ./latest.sql.gz --no-verify-ssl >/dev/null 2>&1 && echo "Successfully copied latest.sql.gz from S3!" || echo "Failed to copy latest.sql.gz from S3!" + gunzip latest.sql.gz + + cf delete-service-key "${service}" "${service_key}" -f >/dev/null 2>&1 +} + +date + +## Create a tunnel through the application to restore the database. +echo "Creating tunnel to database..." +if [ ${RESTORE_ENV} = 'test' ]; then + cf target -s "${project}-dev" >/dev/null 2>&1 +else + cf target -s "${project}-${RESTORE_ENV}" >/dev/null 2>&1 +fi +cf connect-to-service --no-client ${project}-drupal-${RESTORE_ENV} ${project}-mysql-${RESTORE_ENV} > restore.txt & + +wait_for_tunnel + +date + +## Create variables and credential file for MySQL login. +echo "Restoring 'prod' database to '${RESTORE_ENV}'..." +{ + host=$(cat restore.txt | grep -i host | awk '{print $2}') + port=$(cat restore.txt | grep -i port | awk '{print $2}') + username=$(cat restore.txt | grep -i username | awk '{print $2}') + password=$(cat restore.txt | grep -i password | awk '{print $2}') + dbname=$(cat restore.txt | grep -i '^name' | awk '{print $2}') + + mkdir ~/.mysql && chmod 0700 ~/.mysql + + echo "[client]" > ~/.mysql/mysql.cnf + echo "user=${username}" >> ~/.mysql/mysql.cnf + echo "password=${password}" >> ~/.mysql/mysql.cnf + chmod 400 ~/.mysql/mysql.cnf + + mysql \ + --defaults-extra-file=~/.mysql/mysql.cnf \ + --host=${host} \ + --port=${port} \ + --protocol=TCP \ + --database=${dbname} < latest.sql + +} >/dev/null 2>&1 + +date + +## Kill the backgrounded SSH tunnel. +echo "Cleaning up old connections..." +{ + kill_pids "connect-to-service" +} >/dev/null 2>&1 + +## Clean up. +rm -rf restore.txt ~/.mysql latest.sql + +date + +echo "Running 'drush cr' on '${RESTORE_ENV}' database..." +source $(pwd $(dirname $0))/scripts/pipeline/cloud-gov-remote-command.sh "${project}-drupal-${RESTORE_ENV}" "drush cr" + +date + +echo "Running 'drush image-flush --all' on '${RESTORE_ENV}'..." +source $(pwd $(dirname $0))/scripts/pipeline/cloud-gov-remote-command.sh "${project}-drupal-${RESTORE_ENV}" "drush image-flush --all" + +date diff --git a/scripts/pipeline/phpcs-install.sh b/scripts/pipeline/phpcs-install.sh new file mode 100755 index 00000000..5dcfe85f --- /dev/null +++ b/scripts/pipeline/phpcs-install.sh @@ -0,0 +1,10 @@ +#!/bin/bash + +echo "Installing PHP CodeSniffer..." +{ + git clone --branch 8.3.x http://git.drupal.org/project/coder.git + cd coder + composer install + cd .. + export PATH="$PATH:$HOME/project/coder/vendor/bin" +} >/dev/null 2>&1 \ No newline at end of file diff --git a/scripts/pipeline/prod-db-backup.sh b/scripts/pipeline/prod-db-backup.sh new file mode 100755 index 00000000..f1e432ea --- /dev/null +++ b/scripts/pipeline/prod-db-backup.sh @@ -0,0 +1,144 @@ +#!/bin/bash +if [ ${CIRCLE_BRANCH} != 'prod' ]; then + echo "This script is for backing up the prod database only." + exit 1 +fi + +kill_pids() { + app=$1 + ids=$(ps aux | grep ${app} | grep -v grep | awk '{print $2}') + for id in ${ids}; do + kill -9 ${id} >/dev/null 2>&1 + done +} + +## Wait for the tunnel to finish connecting. +wait_for_tunnel() { + while : ; do + [ -n "$(grep 'Press Control-C to stop.' backup.txt)" ] && break + echo "Waiting for tunnel..." + sleep 1 + done +} + +date + +## Create a tunnel through the application to pull the database. +echo "Creating tunnel to database..." +cf enable-ssh ${project}-drupal-prod +cf restart --strategy rolling ${project}-drupal-prod +cf connect-to-service --no-client ${project}-drupal-prod ${project}-mysql-prod > backup.txt & + +wait_for_tunnel + +date + +## Create variables and credential file for MySQL login. +echo "Backing up 'prod' database..." +{ + host=$(cat backup.txt | grep -i host | awk '{print $2}') + port=$(cat backup.txt | grep -i port | awk '{print $2}') + username=$(cat backup.txt | grep -i username | awk '{print $2}') + password=$(cat backup.txt | grep -i password | awk '{print $2}') + dbname=$(cat backup.txt | grep -i '^name' | awk '{print $2}') + + mkdir ~/.mysql && chmod 0700 ~/.mysql + + echo "[mysqldump]" > ~/.mysql/mysqldump.cnf + echo "user=${username}" >> ~/.mysql/mysqldump.cnf + echo "password=${password}" >> ~/.mysql/mysqldump.cnf + chmod 400 ~/.mysql/mysqldump.cnf + + ## Exclude tables without data + declare -a excluded_tables=( + "cache_advagg_minify" + "cache_bootstrap" + "cache_config" + "cache_container" + "cache_data" + "cache_default" + "cache_discovery" + "cache_discovery_migration" + "cache_dynamic_page_cache" + "cache_entity" + "cache_menu" + "cache_migrate" + "cache_page" + "cache_render" + "cache_rest" + "cache_toolbar" + "sessions" + "watchdog" + "webprofiler" + ) + + ignored_tables_string='' + for table in "${excluded_tables[@]}" + do + ignored_tables_string+=" --ignore-table=${dbname}.${table}" + done + + ## Dump structure + mysqldump \ + --defaults-extra-file=~/.mysql/mysqldump.cnf \ + --host=${host} \ + --port=${port} \ + --protocol=TCP \ + --no-data \ + ${dbname} > backup_prod.sql + + ## Dump content + mysqldump \ + --defaults-extra-file=~/.mysql/mysqldump.cnf \ + --host=${host} \ + --port=${port} \ + --protocol=TCP \ + --no-create-info \ + --skip-triggers \ + ${ignored_tables_string} \ + ${dbname} >> backup_prod.sql + + ## Patch out any MySQL 'SET' commands that require admin. + sed -i 's/^SET /-- &/' backup_prod.sql + +} >/dev/null 2>&1 + +date + +## Kill the backgrounded SSH tunnel. +echo "Cleaning up old connections..." +{ + kill_pids "connect-to-service" +} >/dev/null 2>&1 + +## Disable ssh. +echo "Disabling ssh..." +cf disable-ssh ${project}-drupal-prod + +rm -rf backup.txt ~/.mysql + +echo "Saving to backup bucket..." +{ + cf target -s "${project}-prod" >/dev/null 2>&1 + + export service="${project}-backup" + export service_key="${service}-key" + cf delete-service-key "${service}" "${service_key}" -f >/dev/null 2>&1 + cf create-service-key "${service}" "${service_key}" >/dev/null 2>&1 + sleep 2 + + export s3_credentials=$(cf service-key "${service}" "${service_key}" | tail -n +2) + + export AWS_ACCESS_KEY_ID=$(echo "${s3_credentials}" | jq -r '.credentials.access_key_id') + export bucket=$(echo "${s3_credentials}" | jq -r '.credentials.bucket') + export AWS_DEFAULT_REGION=$(echo "${s3_credentials}" | jq -r '.credentials.region') + export AWS_SECRET_ACCESS_KEY=$(echo "${s3_credentials}" | jq -r '.credentials.secret_access_key') + + # copy latest database to top level + gzip backup_prod.sql + aws s3 cp ./backup_prod.sql.gz s3://${bucket}/prod/latest.sql.gz --no-verify-ssl >/dev/null 2>&1 && echo "Successfully copied latest.sql.gz to S3!" || echo "Failed to copy latest.sql.gz to S3!" + + cf delete-service-key "${service}" "${service_key}" -f >/dev/null 2>&1 +} + +date diff --git a/scripts/pipeline/scheduled-backup.sh b/scripts/pipeline/scheduled-backup.sh new file mode 100755 index 00000000..93ef4ea5 --- /dev/null +++ b/scripts/pipeline/scheduled-backup.sh @@ -0,0 +1,149 @@ +#!/bin/bash + +backup_space=$1 + +export BACKUP_ENV=${CIRCLE_BRANCH} + +export backup_folder=$(date "+%Y/%m/%d") +export now=$(date +"%H.%M.%S") +export today=$(date +%F) + +backup_media="cms/public/media" + +rm -rf scheduled_backup/ + +mkdir -p scheduled_backup +cd scheduled_backup + +date + +echo "Downloading media files..." +{ + cf target -s "${cf_space}" + + service="${project}-storage-${BACKUP_ENV}" + service_key="${service}-key" + cf delete-service-key "${service}" "${service_key}" -f + cf create-service-key "${service}" "${service_key}" + sleep 2 + s3_credentials=$(cf service-key "${service}" "${service_key}" | tail -n +2) + + export AWS_ACCESS_KEY_ID=$(echo "${s3_credentials}" | jq -r '.credentials.access_key_id') + export bucket=$(echo "${s3_credentials}" | jq -r '.credentials.bucket') + export AWS_DEFAULT_REGION=$(echo "${s3_credentials}" | jq -r '.credentials.region') + export AWS_SECRET_ACCESS_KEY=$(echo "${s3_credentials}" | jq -r '.credentials.secret_access_key') + + rm -rf ${BACKUP_ENV} + + aws s3 sync --delete --no-verify-ssl s3://${bucket}/${backup_media} ${backup_media}/ 2>/dev/null + tar czvf media_${now}.tar.gz ${backup_media} + + + cf delete-service-key "${service}" "${service_key}" -f +} >/dev/null 2>&1 + +date + +echo "Downloading static files..." +{ + cf target -s "${cf_space}" + + service="${project}-static-${BACKUP_ENV}" + service_key="${service}-key" + cf delete-service-key "${service}" "${service_key}" -f + cf create-service-key "${service}" "${service_key}" + sleep 2 + s3_credentials=$(cf service-key "${service}" "${service_key}" | tail -n +2) + + export AWS_ACCESS_KEY_ID=$(echo "${s3_credentials}" | jq -r '.credentials.access_key_id') + export bucket=$(echo "${s3_credentials}" | jq -r '.credentials.bucket') + export AWS_DEFAULT_REGION=$(echo "${s3_credentials}" | jq -r '.credentials.region') + export AWS_SECRET_ACCESS_KEY=$(echo "${s3_credentials}" | jq -r '.credentials.secret_access_key') + + rm -rf ${BACKUP_ENV} + + aws s3 sync --no-verify-ssl s3://${bucket}/ static_files/ 2>/dev/null + tar czvf static_${now}.tar.gz static_files/ + + + cf delete-service-key "${service}" "${service_key}" -f +} >/dev/null 2>&1 + +date + +echo "Downloading terraform state..." +{ + cf target -s "${backup_space}" + + service="${project}-terraform-backend" + service_key="${service}-key" + cf delete-service-key "${service}" "${service_key}" -f + cf create-service-key "${service}" "${service_key}" + + sleep 2 + s3_credentials=$(cf service-key "${service}" "${service_key}" | tail -n +2) + + export AWS_ACCESS_KEY_ID=$(echo "${s3_credentials}" | jq -r '.credentials.access_key_id') + export bucket=$(echo "${s3_credentials}" | jq -r '.credentials.bucket') + export AWS_DEFAULT_REGION=$(echo "${s3_credentials}" | jq -r '.credentials.region') + export AWS_SECRET_ACCESS_KEY=$(echo "${s3_credentials}" | jq -r '.credentials.secret_access_key') + + rm -rf "env:" + aws s3 cp --recursive --no-verify-ssl s3://${bucket}/ . 2>/dev/null + + tar czf terraform_state_${now}.tar.gz "env:" + + cf delete-service-key "${service}" "${service_key}" -f +} >/dev/null 2>&1 + +date + +echo "Saving to backup bucket..." +{ + cf target -s "${backup_space}" >/dev/null 2>&1 + + export service="${project}-backup" + export service_key="${service}-key" + cf delete-service-key "${service}" "${service_key}" -f >/dev/null 2>&1 + cf create-service-key "${service}" "${service_key}" >/dev/null 2>&1 + sleep 2 + + export s3_credentials=$(cf service-key "${service}" "${service_key}" | tail -n +2) + + export AWS_ACCESS_KEY_ID=$(echo "${s3_credentials}" | jq -r '.credentials.access_key_id') + export bucket=$(echo "${s3_credentials}" | jq -r '.credentials.bucket') + export AWS_DEFAULT_REGION=$(echo "${s3_credentials}" | jq -r '.credentials.region') + export AWS_SECRET_ACCESS_KEY=$(echo "${s3_credentials}" | jq -r '.credentials.secret_access_key') + + rm -f backup_${now}.sql + cp ../backup_${BACKUP_ENV}.sql backup_${now}.sql + gzip backup_${now}.sql + + aws s3 cp ./ s3://${bucket}/${BACKUP_ENV}/${backup_folder} --exclude "*" --include "*.sql.gz" --include "*.tar.gz" --recursive --no-verify-ssl >/dev/null 2>&1 + + tar czf latest.tar.gz *.gz + + # delete latest and backups older than 15 days in the env's top level directory + aws s3 rm s3://${bucket}/${BACKUP_ENV}/latest.tar.gz --no-verify-ssl >/dev/null 2>&1 + aws s3 ls s3://${bucket}/${BACKUP_ENV}/ | while read -r line; do + create_date=$(echo $line | awk {'print $1" "$2'}) + create_date=$(date --date "$create_date" +%s 2>/dev/null) + older_than=$(date --date "15 days ago" +%s) + if [[ $create_date -le $older_than ]]; then + filename=$(echo $line | awk {'print $4'}) + if [[ $filename != "" ]]; then + aws s3 rm s3://${bucket}/${BACKUP_ENV}/$filename --no-verify-ssl >/dev/null 2>&1 && echo "Successfully deleted $filename from S3!" || echo "Failed to delete $filename from S3!" + fi + fi + done; + + aws s3 cp ./latest.tar.gz s3://${bucket}/${BACKUP_ENV}/ --no-verify-ssl >/dev/null 2>&1 && echo "Successfully copied latest.tar.gz to S3!" || echo "Failed to copy latest.tar.gz to S3!" + aws s3 cp ./latest.tar.gz s3://${bucket}/${BACKUP_ENV}/${today}.tar.gz --no-verify-ssl >/dev/null 2>&1 && echo "Successfully copied ${today}.tar.gz to S3!" || echo "Failed to copy ${today}.tar.gz to S3!" + + # copy latest database to top level + aws s3 cp ./backup_${now}.sql.gz s3://${bucket}/${BACKUP_ENV}/latest.sql.gz --no-verify-ssl >/dev/null 2>&1 && echo "Successfully copied latest.sql.gz to S3!" || echo "Failed to copy latest.sql.gz to S3!" + + cf delete-service-key "${service}" "${service_key}" -f >/dev/null 2>&1 +} + +date diff --git a/scripts/pipeline/terraform-build-waf-plugin.sh b/scripts/pipeline/terraform-build-waf-plugin.sh new file mode 100755 index 00000000..710f1aef --- /dev/null +++ b/scripts/pipeline/terraform-build-waf-plugin.sh @@ -0,0 +1,26 @@ +#!/bin/bash + +[ -z "${new_nginx_version}" ] && echo "NGINX version not set!" && exit 1 +[ -z "${modsecurity_nginx_version}" ] && echo "Modsecurity version not set!" && exit 1 +[ -z "${ubuntu_version}" ] && echo "Ubuntu version not set!" && exit 1 + +## The current root path. +CWD=$(pwd) + +## Path to the WAF application. +APP_PATH=terraform/applications/nginx-waf + +## Change directory to the Dockerfile path. +cd "${APP_PATH}/.docker/" || exit 1 + +## Run make, which builds the module and moves it to '../modules/'. +make + +## Change directory back to the root path. +cd "${CWD}" || exit 1 + +## Make sure the module directory on the bastion exists. +./scripts/pipeline/cloud-gov-remote-command.sh "${TF_BASTION}" "mkdir -p ${REPO_NAME}/${APP_PATH}/modules/" + +## SCP the module to the bastion. +./scripts/pipeline/cloud-gov-scp-file.sh "${TF_BASTION}" "${APP_PATH}/modules/" "${REPO_NAME}/${APP_PATH}/" \ No newline at end of file diff --git a/scripts/post-deploy b/scripts/post-deploy new file mode 100755 index 00000000..d79e2648 --- /dev/null +++ b/scripts/post-deploy @@ -0,0 +1,17 @@ +#! /bin/bash + +echo "Updating drupal ... " +drush state:set system.maintenance_mode 1 -y +drush cr +# drush updatedb --no-cache-clear -y +# drush cim -y +# drush locale-check +# drush locale-update + +# echo "Uploading public files to S3 ..." +# drush s3fs-rc +# drush s3fs-cl -y --scheme=public --condition=newer + +#drush cr +drush state:set system.maintenance_mode 0 -y +echo "Post deploy finished!" diff --git a/scripts/start b/scripts/start new file mode 100755 index 00000000..ec348100 --- /dev/null +++ b/scripts/start @@ -0,0 +1,13 @@ +#! /bin/bash + +home="/home/vcap" +app_path="${home}/app" + +## Start PHP FPM +${app_path}/php/sbin/php-fpm -p "${app_path}/php/etc" -y "${app_path}/php/etc/php-fpm.conf" -c "${app_path}/php/etc" & + +## Start Apache +${app_path}/httpd/bin/apachectl -f "${app_path}/httpd/conf/httpd.conf" -k start -DFOREGROUND & + +## Run entry point +${app_path}/scripts/entrypoint & \ No newline at end of file diff --git a/scripts/upkeep b/scripts/upkeep new file mode 100755 index 00000000..bb76f31c --- /dev/null +++ b/scripts/upkeep @@ -0,0 +1,131 @@ +#!/bin/bash +set -e + +home="/home/vcap" +app_path="${home}/app" +html_path="${app_path}/html" + +source ${home}/.bashrc + +mkdir -p ${html_path} + +export PYTHONWARNINGS="ignore:Unverified HTTPS request" + +export application_uri=$(echo $VCAP_APPLICATION | jq -r '.application_uris[]') + +export AWS_ACCESS_KEY_ID=$(echo $VCAP_SERVICES | jq -r '.s3[] | select(.name | strings | test("static")).credentials.access_key_id') +export AWS_SECRET_ACCESS_KEY=$(echo $VCAP_SERVICES | jq -r '.s3[] | select(.name | strings | test("static")).credentials.secret_access_key') +export AWS_DEFAULT_REGION=$(echo $VCAP_SERVICES | jq -r '.s3[] | select(.name | strings | test("static")).credentials.region') + +export bucket_name=$(echo $VCAP_SERVICES | jq -r '.s3[] | select(.name | strings | test("static")).name') +export bucket=$(echo $VCAP_SERVICES | jq -r '.s3[] | select(.name | strings | test("static")).credentials.bucket') +export bucket_endpoint=$(echo $VCAP_SERVICES | jq -r '.s3[] | select(.name | strings | test("static")).credentials.endpoint') + +export ssg_endpoint="https://ssg-${environment}.vote.gov" +[ "${environment}" = "stage" ] && export ssg_endpoint="https://staging.vote.gov" +[ "${environment}" = "prod" ] && export ssg_endpoint="https://vote.gov" +export ssg_sitemap_endpoint=ssg_endpoint + +cd ${app_path} +echo "**************************************************" +echo "Running 'drush cron' in '${environment}'..." +echo "**************************************************" +drush --uri=${ssg_endpoint} cron +echo "'drush cron' task...completed!" +echo "" + +echo "**************************************************" +echo "Running 'drush tome:static' in '${environment}'..." +echo "**************************************************" +drush tome:static --uri=${ssg_endpoint} --path-count=1 --retry-count=3 -y +drush tome:static-export-path '/sitemap.xml,/sitemap_generator/default/sitemap.xsl' --uri=${ssg_sitemap_endpoint} --retry-count=3 -y +drush cr +echo "'drush tome:static' task...completed!" +echo "" + +echo "**************************************************" +echo "Performing find and replace tasks..." +echo "**************************************************" +echo "-- Replace s3-based file urls with relative urls" +echo "**************************************************" +find ${html_path} -type f -exec grep -l 'http[s]*://[^/]\+/s3/files' {} \; -exec sed -i 's#http[s]*://[^/]\+/s3/files#/s3/files#g' {} + +echo "**************************************************" +echo "-- Replace absolute urls with relative urls in generated files" +echo "**************************************************" +find ${html_path}/sites/default/files -type f -exec grep -l "${ssg_endpoint}/" {} \; -exec sed -i "s#${ssg_endpoint}/#/#g" {} + +echo "Performing find and replace tasks...completed!" +echo "" + +echo "**************************************************" +echo "Adding missing Core assets vendor directory..." +echo "**************************************************" +mkdir -p ${html_path}/core/assets +cp -rfp ${app_path}/web/core/assets/vendor ${html_path}/core/assets/ +echo "Missing Core assets files...completed!" +echo "" + +echo "**************************************************" +echo "Adding missing module files for Sitemap..." +echo "**************************************************" +mkdir -p ${html_path}/modules/contrib/simple_sitemap/xsl +cp -rfp ${app_path}/web/modules/contrib/simple_sitemap/xsl/* ${html_path}/modules/contrib/simple_sitemap/xsl/ +echo "Missing module files for Sitemap...completed!" +echo "" + +echo "**************************************************" +echo "Adding missing data files for NVRF..." +echo "**************************************************" +cp -rfp ${app_path}/web/data ${html_path}/data +echo "Missing data files for NVRF...completed!" +echo "" + +echo "**************************************************" +echo "Adding Vote.gov custom theme assets..." +echo "**************************************************" +mkdir -p ${html_path}/themes/custom/votegov +cp -rfp ${app_path}/web/themes/custom/votegov/dist ${html_path}/themes/custom/votegov/ +cp -rfp ${app_path}/web/themes/custom/votegov/fonts ${html_path}/themes/custom/votegov/ +cp -rfp ${app_path}/web/themes/custom/votegov/img ${html_path}/themes/custom/votegov/ +cp -rfp ${app_path}/web/themes/custom/votegov/json ${html_path}/themes/custom/votegov/ +echo "Adding Vote.gov custom theme assets...completed!" +echo "" + +echo "**************************************************" +echo "Deleting 'node' redirect files..." +echo "**************************************************" +find ${html_path} -type d -name "node" -print0 | while IFS= read -r -d '' node_dir; do + # Find index.html files within each 'node' directory that are a redirect. + find "$node_dir" -type f -path "*/index.html" -exec grep -q "http-equiv=\"refresh\"" {} \; -delete -exec dirname {} \; +done +echo "Deleting 'node' redirect files...completed!" +echo "" + +echo "**************************************************" +echo "Deleting taxonomy directories..." +echo "**************************************************" +rm -rf ${html_path}/taxonomy 2>/dev/null +echo "Deleting taxonomy directories...completed!" +echo "" + +echo "**************************************************" +echo "Removing miscellaneous files..." +echo "**************************************************" +rm -rf ${html_path}/disabled-state-mail-in-forms 2>/dev/null +echo "Removing miscellaneous files...completed!" +echo "" + +echo "**************************************************" +echo "Removing empty directories..." +echo "**************************************************" +find ${html_path} -type d -empty -delete +echo "Removing empty directories...completed!" +echo "" + +cd ${html_path} +echo "**************************************************" +echo "Copying static files to '${bucket_name}'..." +echo "**************************************************" +aws s3 sync . s3://${bucket} --delete --no-verify-ssl 2>/dev/null +aws s3 website s3://${bucket} --index-document index.html --error-document /404/index.html --no-verify-ssl 2>/dev/null +echo "Copy to '${bucket_name}'...completed!" +echo "" diff --git a/terraform/.gitignore b/terraform/.gitignore new file mode 100755 index 00000000..15b963fd --- /dev/null +++ b/terraform/.gitignore @@ -0,0 +1,53 @@ +## Terraform +**.terraform* +**terraform.tfstate.d* +**.tfvars* +**tfplan* +terraform.tfstate* + +## Other files/folders +*.bak +*.jq +*.log +*.txt +.DS_Store + +# Applications +*.app +*.exe +*.war + +# Large media files +*.mp4 +*.tiff +*.avi +*.flv +*.mov +*.wmv + +# Ignore override files as they are usually used to override resources locally and so +# are not checked in +override.tf +override.tf.json +*_override.tf +*_override.tf.json + +# Ignore CLI configuration files +.terraformrc +terraform.rc + +!.terraform.lock.hcl +!.terraform-docs* +!TERRAFORM.MD +!terraform.tfvars.tmpl + +## Caddy application +applications/caddy-proxy/caddy +applications/caddy-proxy/Caddyfile +applications/caddy-proxy/*.acl + +## WAF application +applications/nginx-waf/modules/* +applications/nginx-waf/nginx/snippets/*.conf +!applications/nginx-waf/nginx/snippets/owasp*.conf +!applications/nginx-waf/packages/* \ No newline at end of file diff --git a/terraform/README.md b/terraform/README.md new file mode 100644 index 00000000..00b6356e --- /dev/null +++ b/terraform/README.md @@ -0,0 +1,16 @@ +# Cloud.gov Drupal Infrastructure + +- Applications + - [Caddy Proxy](applications/caddy-proxy/README.md) + - [Web Application Firewall](applications/nginx-waf/README.md) + - [Terraform Bastion](applications/tf-bastion/README.md) +- [Scripts](docs/scripts.MD) +- Terraform + - modules + - [application](modules/application/README.md) + - [certificate](modules/circleci/README.md) + - [circleci](modules/github/README.md) + - [random](modules/random/README.md) + - [service](modules/service/README.md) + - [locals.tf](docs/locals.tf.MD) + - [Terraform](TERRAFORM.md) diff --git a/terraform/applications/caddy-proxy/.docker/Dockerfile b/terraform/applications/caddy-proxy/.docker/Dockerfile new file mode 100644 index 00000000..c9b63b38 --- /dev/null +++ b/terraform/applications/caddy-proxy/.docker/Dockerfile @@ -0,0 +1,11 @@ +# https://hub.docker.com/_/caddy/tags?page=1&name=builder + +FROM caddy:2.7-builder AS builder + +RUN xcaddy build \ +--with github.com/caddyserver/forwardproxy@master + +RUN cp /usr/bin/caddy /caddy + +HEALTHCHECK NONE +USER caddy diff --git a/terraform/applications/caddy-proxy/.docker/Makefile b/terraform/applications/caddy-proxy/.docker/Makefile new file mode 100644 index 00000000..dda09d36 --- /dev/null +++ b/terraform/applications/caddy-proxy/.docker/Makefile @@ -0,0 +1,7 @@ +caddy-v2-with-forwardproxy: Dockerfile + docker build --platform linux/amd64 --tag caddy . + docker create --name caddy-vol caddy + docker cp caddy-vol:/caddy ../caddy + docker rm -f caddy-vol + docker rmi -f caddy + \ No newline at end of file diff --git a/terraform/applications/caddy-proxy/Caddyfile.tmpl b/terraform/applications/caddy-proxy/Caddyfile.tmpl new file mode 100755 index 00000000..d466a391 --- /dev/null +++ b/terraform/applications/caddy-proxy/Caddyfile.tmpl @@ -0,0 +1,28 @@ +{ + debug + log { + format console + level INFO + } + auto_https off +} + +:8080 { + route { + forward_proxy { + basic_auth ${proxy_username} ${proxy_password} + acl { + deny_file deny.acl + allow_file allow.acl + deny all + } + ports 80 443 22 61443 + serve_pac + } + } + log { + format json + level INFO + output stdout + } +} diff --git a/terraform/applications/caddy-proxy/README.md b/terraform/applications/caddy-proxy/README.md new file mode 100644 index 00000000..63db67e7 --- /dev/null +++ b/terraform/applications/caddy-proxy/README.md @@ -0,0 +1,22 @@ +# Caddy Egress Proxy Server + +The Caddy egress proxy server is an application that facilitates communication to the internet from the Cloud.gov environment. + +## File structure + +- `Caddyfile.tmpl`: Caddy server configuration. Needs to be written to `Caddyfile`, after injecting the proxy username and password. Terraform does this when deploying the application. +- `.docker` + - `Dockerfile`: Builds the Caddy server binary, with the `forwardproxy` plugin. + - `Makefile`: Builds a new version of the Caddy binary, then copies the resulting binary to the directory above this one (`../`). +- `start`: Entrypoint script. Creates two files, `allow.acl` and `deny.acl` from two environmental variables `$PROXY_ALLOW` and `$PROXY_DENY`. The variables should be a space delimitated list of domain names. Caddy requires them to being with `*.`. + +## Docker + +The `.docker` directory, contains a `Makefile` and a `Dockerfile`. To build a new Caddy binary, run the following commands: + +``` +cd .docker +make +``` + +A file called `caddy` will be generated in the root directory. \ No newline at end of file diff --git a/terraform/applications/caddy-proxy/start b/terraform/applications/caddy-proxy/start new file mode 100755 index 00000000..8e62b629 --- /dev/null +++ b/terraform/applications/caddy-proxy/start @@ -0,0 +1,12 @@ +#!/bin/bash + +home="/home/vcap" +app_path="${home}/app" + +PATH=${app_path}:${PATH} + +echo ${PROXY_ALLOW} | tr " " "\n" > allow.acl +echo ${PROXY_DENY} | tr " " "\n" > deny.acl + +${app_path}/caddy fmt --overwrite +${app_path}/caddy run --config Caddyfile & diff --git a/terraform/applications/database-backup-bastion/apt.yml b/terraform/applications/database-backup-bastion/apt.yml new file mode 100755 index 00000000..98bf7a06 --- /dev/null +++ b/terraform/applications/database-backup-bastion/apt.yml @@ -0,0 +1,9 @@ +--- +packages: + - curl + - gettext + - nmap + - liblinear4 + - mysql-client + - postgresql-client + - wget diff --git a/terraform/applications/database-backup-bastion/start b/terraform/applications/database-backup-bastion/start new file mode 100755 index 00000000..e7c3b88d --- /dev/null +++ b/terraform/applications/database-backup-bastion/start @@ -0,0 +1,64 @@ +#!/bin/bash + +home="/home/vcap" +#app_path="${home}/app" + +echo "export PATH=${PATH}:${home}/deps/0/bin" > "${home}/exports.sh" + +AWS_ACCESS_KEY_ID=$(echo "${VCAP_SERVICES}" | jq '.s3[] | select(.name | contains("backup")) | .credentials.access_key_id') +AWS_SECRET_ACCESS_KEY=$(echo "${VCAP_SERVICES}" | jq '.s3[] | select(.name | contains("backup")) | .credentials.secret_access_key') +AWS_DEFAULT_REGION=$(echo "${VCAP_SERVICES}" | jq '.s3[] | select(.name | contains("backup")) | .credentials.region') +AWS_BUCKET=$(echo "${VCAP_SERVICES}" | jq '.s3[] | select(.name | contains("backup")) | .credentials.bucket') +AWS_ENDPOINT=$(echo "${VCAP_SERVICES}" | jq '.s3[] | select(.name | contains("backup")) | .credentials.endpoint') +AWS_FIPS_ENDPOINT=$(echo "${VCAP_SERVICES}" | jq '.s3[] | select(.name | contains("backup")) | .credentials.fips_endpoint') + +if [ -n "${AWS_ACCESS_KEY_ID}" ]; then + { + echo "export AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID}" + echo "export AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY}" + echo "export AWS_DEFAULT_REGION=${AWS_DEFAULT_REGION}" + echo "export AWS_BUCKET=${AWS_BUCKET}" + echo "export AWS_ENDPOINT=${AWS_ENDPOINT}" + echo "export AWS_FIPS_ENDPOINT=${AWS_FIPS_ENDPOINT}" + } >> "${home}/exports.sh" +fi + +MYSQL_CONN_STR=$(echo "${VCAP_SERVICES}" | jq '."aws-rds"[] | select(.plan | contains("mysql")) | .credentials.uri') +MYSQL_DATABASE=$(echo "${VCAP_SERVICES}" | jq '."aws-rds"[] | select(.plan | contains("mysql")) | .credentials.db_name') +MYSQL_HOST=$(echo "${VCAP_SERVICES}" | jq '."aws-rds"[] | select(.plan | contains("mysql")) | .credentials.host') +MYSQL_PASSWORD=$(echo "${VCAP_SERVICES}" | jq '."aws-rds"[] | select(.plan | contains("mysql")) | .credentials.password') +MYSQL_PORT=$(echo "${VCAP_SERVICES}" | jq '."aws-rds"[] | select(.plan | contains("mysql")) | .credentials.port') +MYSQL_USER=$(echo "${VCAP_SERVICES}" | jq '."aws-rds"[] | select(.plan | contains("mysql")) | .credentials.username') + +if [ -n "${MYSQL_CONN_STR}" ]; then + { + echo "export MYSQL_CONN_STR=${MYSQL_CONN_STR}" + echo "export MYSQL_DATABASE=${MYSQL_DATABASE}" + echo "export MYSQL_HOST=${MYSQL_HOST}" + echo "export MYSQL_PASSWORD=${MYSQL_PASSWORD}" + echo "export MYSQL_PORT=${MYSQL_PORT}" + echo "export MYSQL_USER=${MYSQL_USER}" + } >> "${home}/exports.sh" +fi + +PG_CONN_STR=$(echo "${VCAP_SERVICES}" | jq '."aws-rds"[] | select(.plan | contains("psql")) | .credentials.uri') +PGDATABASE=$(echo "${VCAP_SERVICES}" | jq '."aws-rds"[] | select(.plan | contains("psql")) | .credentials.db_name') +PGHOST=$(echo "${VCAP_SERVICES}" | jq '."aws-rds"[] | select(.plan | contains("psql")) | .credentials.host') +PGPASSWORD=$(echo "${VCAP_SERVICES}" | jq '."aws-rds"[] | select(.plan | contains("psql")) | .credentials.password') +PGPORT=$(echo "${VCAP_SERVICES}" | jq '."aws-rds"[] | select(.plan | contains("psql")) | .credentials.port') +PGUSER=$(echo "${VCAP_SERVICES}" | jq '."aws-rds"[] | select(.plan | contains("psql")) | .credentials.username') + +if [ -n "${PG_CONN_STR}" ]; then + { + echo "export PG_CONN_STR=${PG_CONN_STR}" + echo "export PGDATABASE=${PGDATABASE}" + echo "export PGHOST=${PGHOST}" + echo "export PGPASSWORD=${PGPASSWORD}" + echo "export PGPORT=${PGPORT}" + echo "export PGUSER=${PGUSER}" + } >> "${home}/exports.sh" +fi + +echo "source exports.sh" >> "${home}/.bashrc" + +while : ; do sleep 500 ; done diff --git a/terraform/applications/nginx-waf/.docker/Dockerfile b/terraform/applications/nginx-waf/.docker/Dockerfile new file mode 100644 index 00000000..1cf0d7d9 --- /dev/null +++ b/terraform/applications/nginx-waf/.docker/Dockerfile @@ -0,0 +1,64 @@ +ARG modsecurity_nginx_version="1.0.3" +ARG nginx_version="1.25.4" +ARG ubuntu_version="jammy" + +FROM docker.io/ubuntu:${ubuntu_version} + +ARG modsecurity_nginx_version +ARG nginx_version +ARG ubuntu_version + +RUN sed -i 's/^# deb-src./deb-src /' /etc/apt/sources.list + +RUN apt-get update && \ + apt-get install -y \ + autoconf \ + automake \ + build-essential \ + doxygen \ + dpkg-dev \ + gcc \ + gettext \ + libcurl4 \ + libcurl4-openssl-dev \ + libfuzzy-dev \ + libgeoip-dev \ + liblua5.3-dev \ + libmodsecurity3 \ + libmodsecurity-dev \ + libpcre2-dev \ + libpcre3 \ + libpcre3-dev \ + libtool \ + libxml2 \ + libxml2-dev \ + libyajl-dev \ + make \ + pkg-config \ + software-properties-common \ + ssdeep \ + uuid-dev \ + wget + +ARG modsecurity_path=/usr/local/src/modsecurity_nginx +RUN mkdir -p ${modsecurity_path} +WORKDIR ${modsecurity_path} +RUN wget https://github.com/SpiderLabs/ModSecurity-nginx/archive/refs/tags/v${modsecurity_nginx_version}.tar.gz +RUN tar -xf v${modsecurity_nginx_version}.tar.gz && \ + rm -f v${modsecurity_nginx_version}.tar.gz + +WORKDIR /usr/share/nginx/src/ +RUN apt-get build-dep -y nginx + +RUN wget https://nginx.org/download/nginx-${nginx_version}.tar.gz && \ + tar -xf nginx-${nginx_version}.tar.gz && \ + rm -f nginx-${nginx_version}.tar.gz + +WORKDIR /usr/share/nginx/src/nginx-${nginx_version} + +RUN ./configure --add-dynamic-module=${modsecurity_path}/ModSecurity-nginx-${modsecurity_nginx_version} --with-compat && \ + make modules && \ + cp ./objs/ngx_http_modsecurity_module.so /ngx_http_modsecurity_module.so + +HEALTHCHECK NONE +USER nginx diff --git a/terraform/applications/nginx-waf/.docker/Makefile b/terraform/applications/nginx-waf/.docker/Makefile new file mode 100644 index 00000000..0019a526 --- /dev/null +++ b/terraform/applications/nginx-waf/.docker/Makefile @@ -0,0 +1,7 @@ +nginx-waf-with-modsecurity: Dockerfile + docker build --platform linux/amd64 --tag nginx-modsecurity --build-arg=modsecurity_nginx_version=${modsecurity_nginx_version} --build-arg=nginx_version=${new_nginx_version} --build-arg=ubuntu_version=${ubuntu_version} . + docker create --name nginx-vol nginx-modsecurity + mkdir -p ../modules + docker cp nginx-vol:/ngx_http_modsecurity_module.so ../modules + docker rm -f nginx-vol + docker rmi -f nginx-modsecurity diff --git a/terraform/applications/nginx-waf/README.md b/terraform/applications/nginx-waf/README.md new file mode 100644 index 00000000..c946efef --- /dev/null +++ b/terraform/applications/nginx-waf/README.md @@ -0,0 +1,36 @@ +# WAF (Nginx) Server + +The WAF (Nginx) server is an ingress proxy, routing traffic to various internal applications based on the URI. + +## File structure + +- `.docker` + - `Dockerfile`: Builds the Nginx `modsecurity` plugin. + - `Makefile`: Builds a new version of the Caddy binary, then copies the resulting binary to the `modules` directory above this one (`../modules`). +- `modsecurity`: modsecurity configuration, utilizing OWASP CRS. + - https://github.com/SpiderLabs/ModSecurity + - https://github.com/SpiderLabs/ModSecurity-nginx + - https://github.com/coreruleset/coreruleset/ +- `modules`: Contains the compiled Nginx modsecurity binary. +- `nginx`: Contains Nginx configuration files. + - `conf.d`: The main configuration file to load. + - `snippets`: Contains .tmpl files for Nginx, which have variable replacements, along with the main owasp modsecurity configuration. +- `packages`: Contains the `corereuleset` tar.gz archive and the `libmodsecurity3` Debian file. +- `public`: Basic index web page the default buildpack configuration uses. +- `apt.yml`: Installs additional packages into the buildpack during staging. +- `entrypoint`: Sleeps to hold a process open. +- `init`: Configures the buildpack to function as a WAF instead of a basic web server. +- `modsecurity.conf`: The main configuration file for modsecurity. +- `nginx.conf`: The main configuration file for Nginx. +- `start`: The buildpack entrypoint. Runs the `init` script, starts Nginx, then runs `entrypoint` to keep the container open. + +## Docker + +The `.docker` directory, contains a `Makefile` and a `Dockerfile`. To build a new Caddy binary, run the following commands: + +``` +cd .docker +make +``` + +A file called `ngx_http_modsecurity_module.so` will be generated in directory `modules`, in the root directory. \ No newline at end of file diff --git a/terraform/applications/nginx-waf/apt.yml b/terraform/applications/nginx-waf/apt.yml new file mode 100755 index 00000000..086cddff --- /dev/null +++ b/terraform/applications/nginx-waf/apt.yml @@ -0,0 +1,6 @@ +--- +packages: + - gettext + - libmodsecurity3 + - libpcre2-8-0 + - modsecurity-crs \ No newline at end of file diff --git a/terraform/applications/nginx-waf/entrypoint b/terraform/applications/nginx-waf/entrypoint new file mode 100755 index 00000000..64ce35fb --- /dev/null +++ b/terraform/applications/nginx-waf/entrypoint @@ -0,0 +1,4 @@ +#!/bin/bash + +## Simple script to hold the container open. +while : ; do sleep 60m ; done \ No newline at end of file diff --git a/terraform/applications/nginx-waf/init b/terraform/applications/nginx-waf/init new file mode 100755 index 00000000..cfc584dd --- /dev/null +++ b/terraform/applications/nginx-waf/init @@ -0,0 +1,89 @@ +#!/bin/bash + +declare http_proxy +export http_proxy=$(echo "${VCAP_SERVICES}" | jq -r '."user-provided"[].credentials.proxy_uri') + +declare https_proxy +export https_proxy=$(echo "${VCAP_SERVICES}" | jq -r '."user-provided"[].credentials.proxy_uri') + +## The home directory of the buildpack. +home="/home/vcap" + +## The path to the application. +app_path="${home}/app" + +## The path to modsecurity. +modsecurity_path="${app_path}/modsecurity/" + +## THe path to the nginx binary. +nginx_path="${home}/deps/1/nginx/sbin" + +## Modify the path to add nginx and other installed binarys from the apt buildpack. +PATH="${PATH}:$(dirname ${nginx_path}):${home}/deps/0/bin/" + +echo "Configuring .bashrc..." +## Updated ~/.bashrc to update $PATH when someone logs in. +[ -z "$(cat ${home}/.bashrc | grep PATH)" ] && \ + touch ${home}/.bashrc && \ + echo "PATH=${PATH}" >> ${home}/.bashrc && \ + echo "export http_proxy=${http_proxy}" >> ${home}/.bashrc && \ + echo "export https_proxy=${https_proxy}" >> ${home}/.bashrc && \ + echo "alias vi=\"VIMRUNTIME=${home}/deps/0/apt/usr/share/vim/vim82 ${home}/deps/0/bin/vim.basic\"" >> ${home}/.bashrc && \ + echo "alias vim=\"VIMRUNTIME=${home}/deps/0/apt/usr/share/vim/vim82 ${home}/deps/0/bin/vim.basic\"" >> ${home}/.bashrc + +## Load .bashrc into this script. +source ${home}/.bashrc + +echo "Configuring Nginx..." +## Remove the comment from the load module now that the buildpack has started. +[ -f "/home/vcap/app/modules/ngx_http_modsecurity_module.so" ] && sed -i 's/^#load_module./load_module /' ${app_path}/nginx.conf + +## Remove the comment for the reverse proxy configuration. +sed -i 's/^ #include./include /' ${app_path}/nginx.conf + +sed -i 's/^ server./# server /' ${app_path}/nginx.conf +sed -i 's/^ listen./# listen /' ${app_path}/nginx.conf +sed -i 's/^ root./# root /' ${app_path}/nginx.conf +sed -i 's/^ index./# index /' ${app_path}/nginx.conf +sed -i 's/^ }/# }/' ${app_path}/nginx.conf + +## Get rules for modsecurity. +echo "Installing OWSAP Core Rule Set rules..." +mkdir -p /tmp/owasp-crs ${modsecurity_path}/crs +tar -xf "${app_path}/packages/${CRS_RULES}" --strip-components=1 -C /tmp/owasp-crs +mv -f /tmp/owasp-crs/rules/* ${modsecurity_path}/crs +rm -rf /tmp/owasp-crs + +if [ -n "${MODSECURITY_UPDATE}" ]; then + modsecurity_version=$(echo "${MODSECURITY_UPDATE}" | cut -d '_' -f2 | cut -d '-' -f1) + echo "Updating libmodsecurity..." + #current_path=$(pwd) + + dpkg-deb -R "${app_path}/packages/${MODSECURITY_UPDATE}" ${home}/deps/0/apt/ + ln -s "${home}/deps/0/apt/usr/lib/x86_64-linux-gnu/libmodsecurity.so.${modsecurity_version}" "libmodsecurity.so.${modsecurity_version}" +fi + +## Removing the current modsecurity directory. +rm -rf ${home}/deps/0/apt/etc/modsecurity + +## Linking the ~/app/modsecurity path to the apt install path. +ln -s ${modsecurity_path} ${home}/deps/0/apt/etc/ + +echo "Configuring WAF ACLs..." +## Insert the approved IP's into the WAF's configuration. +declare IPS_ALLOWED_CMS +export IPS_ALLOWED_CMS=$(echo "${ALLOWED_IPS_CMS}" | base64 -d | jq -rc '.[] | @sh' | tr -d \' | sed 's/^/ /') +PATH=${PATH}; envsubst < ${app_path}/nginx/snippets/ip-restrict-cms.conf.tmpl > ${app_path}/nginx/snippets/ip-restrict-cms.conf + +## Insert the denied IP's into the WAF's configuration. +declare IPS_DENYED_STATIC +export IPS_DENYED_STATIC=$(echo "${DENYED_IPS_STATIC}" | base64 -d | jq -rc '.[] | @sh' | tr -d \' | sed 's/^/ /') +PATH=${PATH}; envsubst < ${app_path}/nginx/snippets/ip-restrict-static.conf.tmpl > ${app_path}/nginx/snippets/ip-restrict-static.conf + +## Move the default.conf to a temp file so it can be modified. +mv ${app_path}/nginx/conf.d/default.conf ${app_path}/nginx/conf.d/default.conf.tmpl + +## Replace the default listen port to the applications port. +PATH=${PATH}; envsubst '$VCAP_APP_PORT' < ${app_path}/nginx/conf.d/default.conf.tmpl > ${app_path}/nginx/conf.d/default.conf + +echo "Intialization complete!" \ No newline at end of file diff --git a/terraform/applications/nginx-waf/mime.types b/terraform/applications/nginx-waf/mime.types new file mode 100755 index 00000000..156ea629 --- /dev/null +++ b/terraform/applications/nginx-waf/mime.types @@ -0,0 +1,78 @@ +types { + text/html html htm shtml; + text/css css; + text/xml xml; + image/gif gif; + image/jpeg jpeg jpg; + application/x-javascript js; + application/atom+xml atom; + application/rss+xml rss; + font/ttf ttf; + font/woff woff; + font/woff2 woff2; + text/mathml mml; + text/plain txt; + text/vnd.sun.j2me.app-descriptor jad; + text/vnd.wap.wml wml; + text/x-component htc; + text/cache-manifest manifest; + image/png png; + image/tiff tif tiff; + image/vnd.wap.wbmp wbmp; + image/x-icon ico; + image/x-jng jng; + image/x-ms-bmp bmp; + image/svg+xml svg svgz; + image/webp webp; + application/java-archive jar war ear; + application/mac-binhex40 hqx; + application/msword doc; + application/pdf pdf; + application/postscript ps eps ai; + application/rtf rtf; + application/vnd.ms-excel xls; + application/vnd.ms-powerpoint ppt; + application/vnd.wap.wmlc wmlc; + application/vnd.google-earth.kml+xml kml; + application/vnd.google-earth.kmz kmz; + application/x-7z-compressed 7z; + application/x-cocoa cco; + application/x-java-archive-diff jardiff; + application/x-java-jnlp-file jnlp; + application/x-makeself run; + application/x-perl pl pm; + application/x-pilot prc pdb; + application/x-rar-compressed rar; + application/x-redhat-package-manager rpm; + application/x-sea sea; + application/x-shockwave-flash swf; + application/x-stuffit sit; + application/x-tcl tcl tk; + application/x-x509-ca-cert der pem crt; + application/x-xpinstall xpi; + application/xhtml+xml xhtml; + application/zip zip; + application/octet-stream bin exe dll; + application/octet-stream deb; + application/octet-stream dmg; + application/octet-stream eot; + application/octet-stream iso img; + application/octet-stream msi msp msm; + application/json json; + audio/midi mid midi kar; + audio/mpeg mp3; + audio/ogg ogg; + audio/x-m4a m4a; + audio/x-realaudio ra; + video/3gpp 3gpp 3gp; + video/mp4 mp4; + video/mpeg mpeg mpg; + video/quicktime mov; + video/webm webm; + video/x-flv flv; + video/x-m4v m4v; + video/x-mng mng; + video/x-ms-asf asx asf; + video/x-ms-wmv wmv; + video/x-msvideo avi; +} \ No newline at end of file diff --git a/terraform/applications/nginx-waf/modsecurity.conf b/terraform/applications/nginx-waf/modsecurity.conf new file mode 100755 index 00000000..827749e2 --- /dev/null +++ b/terraform/applications/nginx-waf/modsecurity.conf @@ -0,0 +1,285 @@ +# -- Rule engine initialization ---------------------------------------------- + +# Enable ModSecurity, attaching it to every transaction. Use detection +# only to start with, because that minimises the chances of post-installation +# disruption. +# +SecRuleEngine On + + +# -- Request body handling --------------------------------------------------- + +# Allow ModSecurity to access request bodies. If you don't, ModSecurity +# won't be able to see any POST parameters, which opens a large security +# hole for attackers to exploit. +# +SecRequestBodyAccess On + + +# Enable XML request body parser. +# Initiate XML Processor in case of xml content-type +# +SecRule REQUEST_HEADERS:Content-Type "^(?:application(?:/soap\+|/)|text/)xml" \ + "id:'200000',phase:1,t:none,t:lowercase,pass,nolog,ctl:requestBodyProcessor=XML" + +# Enable JSON request body parser. +# Initiate JSON Processor in case of JSON content-type; change accordingly +# if your application does not use 'application/json' +# +SecRule REQUEST_HEADERS:Content-Type "^application/json" \ + "id:'200001',phase:1,t:none,t:lowercase,pass,nolog,ctl:requestBodyProcessor=JSON" + +# Sample rule to enable JSON request body parser for more subtypes. +# Uncomment or adapt this rule if you want to engage the JSON +# Processor for "+json" subtypes +# +#SecRule REQUEST_HEADERS:Content-Type "^application/[a-z0-9.-]+[+]json" \ +# "id:'200006',phase:1,t:none,t:lowercase,pass,nolog,ctl:requestBodyProcessor=JSON" + +# Maximum request body size we will accept for buffering. If you support +# file uploads then the value given on the first line has to be as large +# as the largest file you are willing to accept. The second value refers +# to the size of data, with files excluded. You want to keep that value as +# low as practical. +# +SecRequestBodyLimit 13107200 +SecRequestBodyNoFilesLimit 131072 + +# What to do if the request body size is above our configured limit. +# Keep in mind that this setting will automatically be set to ProcessPartial +# when SecRuleEngine is set to DetectionOnly mode in order to minimize +# disruptions when initially deploying ModSecurity. +# +SecRequestBodyLimitAction Reject + +# Maximum parsing depth allowed for JSON objects. You want to keep this +# value as low as practical. +# +SecRequestBodyJsonDepthLimit 512 + +# Maximum number of args allowed per request. You want to keep this +# value as low as practical. The value should match that in rule 200007. +SecArgumentsLimit 1000 + +# If SecArgumentsLimit has been set, you probably want to reject any +# request body that has only been partly parsed. The value used in this +# rule should match what was used with SecArgumentsLimit +SecRule &ARGS "@ge 1000" \ +"id:'200007', phase:2,t:none,log,deny,status:400,msg:'Failed to fully parse request body due to large argument count',severity:2" + +# Verify that we've correctly processed the request body. +# As a rule of thumb, when failing to process a request body +# you should reject the request (when deployed in blocking mode) +# or log a high-severity alert (when deployed in detection-only mode). +# +SecRule REQBODY_ERROR "!@eq 0" \ +"id:'200002', phase:2,t:none,log,deny,status:400,msg:'Failed to parse request body.',logdata:'%{reqbody_error_msg}',severity:2" + +# By default be strict with what we accept in the multipart/form-data +# request body. If the rule below proves to be too strict for your +# environment consider changing it to detection-only. You are encouraged +# _not_ to remove it altogether. +# +SecRule MULTIPART_STRICT_ERROR "!@eq 0" \ +"id:'200003',phase:2,t:none,log,deny,status:400, \ +msg:'Multipart request body failed strict validation: \ +PE %{REQBODY_PROCESSOR_ERROR}, \ +BQ %{MULTIPART_BOUNDARY_QUOTED}, \ +BW %{MULTIPART_BOUNDARY_WHITESPACE}, \ +DB %{MULTIPART_DATA_BEFORE}, \ +DA %{MULTIPART_DATA_AFTER}, \ +HF %{MULTIPART_HEADER_FOLDING}, \ +LF %{MULTIPART_LF_LINE}, \ +SM %{MULTIPART_MISSING_SEMICOLON}, \ +IQ %{MULTIPART_INVALID_QUOTING}, \ +IP %{MULTIPART_INVALID_PART}, \ +IH %{MULTIPART_INVALID_HEADER_FOLDING}, \ +FL %{MULTIPART_FILE_LIMIT_EXCEEDED}'" + +# Did we see anything that might be a boundary? +# +# Here is a short description about the ModSecurity Multipart parser: the +# parser returns with value 0, if all "boundary-like" line matches with +# the boundary string which given in MIME header. In any other cases it returns +# with different value, eg. 1 or 2. +# +# The RFC 1341 descript the multipart content-type and its syntax must contains +# only three mandatory lines (above the content): +# * Content-Type: multipart/mixed; boundary=BOUNDARY_STRING +# * --BOUNDARY_STRING +# * --BOUNDARY_STRING-- +# +# First line indicates, that this is a multipart content, second shows that +# here starts a part of the multipart content, third shows the end of content. +# +# If there are any other lines, which starts with "--", then it should be +# another boundary id - or not. +# +# After 3.0.3, there are two kinds of types of boundary errors: strict and permissive. +# +# If multipart content contains the three necessary lines with correct order, but +# there are one or more lines with "--", then parser returns with value 2 (non-zero). +# +# If some of the necessary lines (usually the start or end) misses, or the order +# is wrong, then parser returns with value 1 (also a non-zero). +# +# You can choose, which one is what you need. The example below contains the +# 'strict' mode, which means if there are any lines with start of "--", then +# ModSecurity blocked the content. But the next, commented example contains +# the 'permissive' mode, then you check only if the necessary lines exists in +# correct order. Whit this, you can enable to upload PEM files (eg "----BEGIN.."), +# or other text files, which contains eg. HTTP headers. +# +# The difference is only the operator - in strict mode (first) the content blocked +# in case of any non-zero value. In permissive mode (second, commented) the +# content blocked only if the value is explicit 1. If it 0 or 2, the content will +# allowed. +# + +# +# See #1747 and #1924 for further information on the possible values for +# MULTIPART_UNMATCHED_BOUNDARY. +# +SecRule MULTIPART_UNMATCHED_BOUNDARY "@eq 1" \ + "id:'200004',phase:2,t:none,log,deny,msg:'Multipart parser detected a possible unmatched boundary.'" + + +# PCRE Tuning +# We want to avoid a potential RegEx DoS condition +# +SecPcreMatchLimit 1000 +SecPcreMatchLimitRecursion 1000 + +# Some internal errors will set flags in TX and we will need to look for these. +# All of these are prefixed with "MSC_". The following flags currently exist: +# +# MSC_PCRE_LIMITS_EXCEEDED: PCRE match limits were exceeded. +# +SecRule TX:/^MSC_/ "!@streq 0" \ + "id:'200005',phase:2,t:none,deny,msg:'ModSecurity internal error flagged: %{MATCHED_VAR_NAME}'" + + +# -- Response body handling -------------------------------------------------- + +# Allow ModSecurity to access response bodies. +# You should have this directive enabled in order to identify errors +# and data leakage issues. +# +# Do keep in mind that enabling this directive does increases both +# memory consumption and response latency. +# +SecResponseBodyAccess On + +# Which response MIME types do you want to inspect? You should adjust the +# configuration below to catch documents but avoid static files +# (e.g., images and archives). +# +SecResponseBodyMimeType text/plain text/html text/xml + +# Buffer response bodies of up to 512 KB in length. +SecResponseBodyLimit 524288 + +# What happens when we encounter a response body larger than the configured +# limit? By default, we process what we have and let the rest through. +# That's somewhat less secure, but does not break any legitimate pages. +# +SecResponseBodyLimitAction ProcessPartial + + +# -- Filesystem configuration ------------------------------------------------ + +# The location where ModSecurity stores temporary files (for example, when +# it needs to handle a file upload that is larger than the configured limit). +# +# This default setting is chosen due to all systems have /tmp available however, +# this is less than ideal. It is recommended that you specify a location that's private. +# +SecTmpDir /tmp/ + +# The location where ModSecurity will keep its persistent data. This default setting +# is chosen due to all systems have /tmp available however, it +# too should be updated to a place that other users can't access. +# +SecDataDir /tmp/ + + +# -- File uploads handling configuration ------------------------------------- + +# The location where ModSecurity stores intercepted uploaded files. This +# location must be private to ModSecurity. You don't want other users on +# the server to access the files, do you? +# +#SecUploadDir /opt/modsecurity/var/upload/ + +# By default, only keep the files that were determined to be unusual +# in some way (by an external inspection script). For this to work you +# will also need at least one file inspection rule. +# +#SecUploadKeepFiles RelevantOnly + +# Uploaded files are by default created with permissions that do not allow +# any other user to access them. You may need to relax that if you want to +# interface ModSecurity to an external program (e.g., an anti-virus). +# +#SecUploadFileMode 0600 + + +# -- Debug log configuration ------------------------------------------------- + +# The default debug log configuration is to duplicate the error, warning +# and notice messages from the error log. +# +#SecDebugLog /opt/modsecurity/var/log/debug.log +#SecDebugLogLevel 3 + + +# -- Audit log configuration ------------------------------------------------- + +# Log the transactions that are marked by a rule, as well as those that +# trigger a server error (determined by a 5xx or 4xx, excluding 404, +# level response status codes). +# +SecAuditEngine RelevantOnly +SecAuditLogRelevantStatus "^(?:5|4(?!04))" + +# Log everything we know about a transaction. +SecAuditLogParts ABCEFHJKZ + +# Use a single file for logging. This is much easier to look at, but +# assumes that you will use the audit log only ocassionally. +# +SecAuditLogType Serial +SecAuditLog /var/log/modsec_audit.log + +# Specify the path for concurrent audit logging. +#SecAuditLogStorageDir /opt/modsecurity/var/audit/ + + +# -- Miscellaneous ----------------------------------------------------------- + +# Use the most commonly used application/x-www-form-urlencoded parameter +# separator. There's probably only one application somewhere that uses +# something else so don't expect to change this value. +# +SecArgumentSeparator & + +# Settle on version 0 (zero) cookies, as that is what most applications +# use. Using an incorrect cookie version may open your installation to +# evasion attacks (against the rules that examine named cookies). +# +SecCookieFormat 0 + +# Specify your Unicode Code Point. +# This mapping is used by the t:urlDecodeUni transformation function +# to properly map encoded data to your language. Properly setting +# these directives helps to reduce false positives and negatives. +# +SecUnicodeMapFile unicode.mapping 20127 + +# Improve the quality of ModSecurity by sharing information about your +# current ModSecurity version and dependencies versions. +# The following information will be shared: ModSecurity version, +# Web Server version, APR version, PCRE version, Lua version, Libxml2 +# version, Anonymous unique id for host. +SecStatusEngine On + diff --git a/terraform/applications/nginx-waf/modsecurity/crs-setup.conf b/terraform/applications/nginx-waf/modsecurity/crs-setup.conf new file mode 100644 index 00000000..b200579a --- /dev/null +++ b/terraform/applications/nginx-waf/modsecurity/crs-setup.conf @@ -0,0 +1,836 @@ +# ------------------------------------------------------------------------ +# OWASP ModSecurity Core Rule Set ver.3.3.2 +# Copyright (c) 2006-2020 Trustwave and contributors. All rights reserved. +# +# The OWASP ModSecurity Core Rule Set is distributed under +# Apache Software License (ASL) version 2 +# Please see the enclosed LICENSE file for full details. +# ------------------------------------------------------------------------ + + +# +# -- [[ Introduction ]] -------------------------------------------------------- +# +# The OWASP ModSecurity Core Rule Set (CRS) is a set of generic attack +# detection rules that provide a base level of protection for any web +# application. They are written for the open source, cross-platform +# ModSecurity Web Application Firewall. +# +# See also: +# https://coreruleset.org/ +# https://github.com/SpiderLabs/owasp-modsecurity-crs +# https://www.owasp.org/index.php/Category:OWASP_ModSecurity_Core_Rule_Set_Project +# + + +# +# -- [[ System Requirements ]] ------------------------------------------------- +# +# CRS requires ModSecurity version 2.8.0 or above. +# We recommend to always use the newest ModSecurity version. +# +# The configuration directives/settings in this file are used to control +# the OWASP ModSecurity CRS. These settings do **NOT** configure the main +# ModSecurity settings (modsecurity.conf) such as SecRuleEngine, +# SecRequestBodyAccess, SecAuditEngine, SecDebugLog, and XML processing. +# +# The CRS assumes that modsecurity.conf has been loaded. It is bundled with +# ModSecurity. If you don't have it, you can get it from: +# 2.x: https://raw.githubusercontent.com/SpiderLabs/ModSecurity/v2/master/modsecurity.conf-recommended +# 3.x: https://raw.githubusercontent.com/SpiderLabs/ModSecurity/v3/master/modsecurity.conf-recommended +# +# The order of file inclusion in your webserver configuration should always be: +# 1. modsecurity.conf +# 2. crs-setup.conf (this file) +# 3. rules/*.conf (the CRS rule files) +# +# Please refer to the INSTALL file for detailed installation instructions. +# + + +# +# -- [[ Mode of Operation: Anomaly Scoring vs. Self-Contained ]] --------------- +# +# The CRS can run in two modes: +# +# -- [[ Anomaly Scoring Mode (default) ]] -- +# In CRS3, anomaly mode is the default and recommended mode, since it gives the +# most accurate log information and offers the most flexibility in setting your +# blocking policies. It is also called "collaborative detection mode". +# In this mode, each matching rule increases an 'anomaly score'. +# At the conclusion of the inbound rules, and again at the conclusion of the +# outbound rules, the anomaly score is checked, and the blocking evaluation +# rules apply a disruptive action, by default returning an error 403. +# +# -- [[ Self-Contained Mode ]] -- +# In this mode, rules apply an action instantly. This was the CRS2 default. +# It can lower resource usage, at the cost of less flexibility in blocking policy +# and less informative audit logs (only the first detected threat is logged). +# Rules inherit the disruptive action that you specify (i.e. deny, drop, etc). +# The first rule that matches will execute this action. In most cases this will +# cause evaluation to stop after the first rule has matched, similar to how many +# IDSs function. +# +# -- [[ Alert Logging Control ]] -- +# In the mode configuration, you must also adjust the desired logging options. +# There are three common options for dealing with logging. By default CRS enables +# logging to the webserver error log (or Event viewer) plus detailed logging to +# the ModSecurity audit log (configured under SecAuditLog in modsecurity.conf). +# +# - To log to both error log and ModSecurity audit log file, use: "log,auditlog" +# - To log *only* to the ModSecurity audit log file, use: "nolog,auditlog" +# - To log *only* to the error log file, use: "log,noauditlog" +# +# Examples for the various modes follow. +# You must leave one of the following options enabled. +# Note that you must specify the same line for phase:1 and phase:2. +# + +# Default: Anomaly Scoring mode, log to error log, log to ModSecurity audit log +# - By default, offending requests are blocked with an error 403 response. +# - To change the disruptive action, see RESPONSE-999-EXCLUSION-RULES-AFTER-CRS.conf.example +# and review section 'Changing the Disruptive Action for Anomaly Mode'. +# - In Apache, you can use ErrorDocument to show a friendly error page or +# perform a redirect: https://httpd.apache.org/docs/2.4/custom-error.html +# +SecDefaultAction "phase:1,pass,log,tag:'modsecurity'" +SecDefaultAction "phase:2,pass,log,tag:'${MODSEC_TAG}'" + +# Example: Anomaly Scoring mode, log only to ModSecurity audit log +# - By default, offending requests are blocked with an error 403 response. +# - To change the disruptive action, see RESPONSE-999-EXCLUSION-RULES-AFTER-CRS.conf.example +# and review section 'Changing the Disruptive Action for Anomaly Mode'. +# - In Apache, you can use ErrorDocument to show a friendly error page or +# perform a redirect: https://httpd.apache.org/docs/2.4/custom-error.html +# +# SecDefaultAction "phase:1,nolog,auditlog,pass" +# SecDefaultAction "phase:2,nolog,auditlog,pass" + +# Example: Self-contained mode, return error 403 on blocking +# - In this configuration the default disruptive action becomes 'deny'. After a +# rule triggers, it will stop processing the request and return an error 403. +# - You can also use a different error status, such as 404, 406, et cetera. +# - In Apache, you can use ErrorDocument to show a friendly error page or +# perform a redirect: https://httpd.apache.org/docs/2.4/custom-error.html +# +# SecDefaultAction "phase:1,log,auditlog,deny,status:403" +# SecDefaultAction "phase:2,log,auditlog,deny,status:403" + +# Example: Self-contained mode, redirect back to homepage on blocking +# - In this configuration the 'tag' action includes the Host header data in the +# log. This helps to identify which virtual host triggered the rule (if any). +# - Note that this might cause redirect loops in some situations; for example +# if a Cookie or User-Agent header is blocked, it will also be blocked when +# the client subsequently tries to access the homepage. You can also redirect +# to another custom URL. +# SecDefaultAction "phase:1,log,auditlog,redirect:'http://%{request_headers.host}/',tag:'Host: %{request_headers.host}'" +# SecDefaultAction "phase:2,log,auditlog,redirect:'http://%{request_headers.host}/',tag:'Host: %{request_headers.host}'" + + +# +# -- [[ Paranoia Level Initialization ]] --------------------------------------- +# +# The Paranoia Level (PL) setting allows you to choose the desired level +# of rule checks that will add to your anomaly scores. +# +# With each paranoia level increase, the CRS enables additional rules +# giving you a higher level of security. However, higher paranoia levels +# also increase the possibility of blocking some legitimate traffic due to +# false alarms (also named false positives or FPs). If you use higher +# paranoia levels, it is likely that you will need to add some exclusion +# rules for certain requests and applications receiving complex input. +# +# - A paranoia level of 1 is default. In this level, most core rules +# are enabled. PL1 is advised for beginners, installations +# covering many different sites and applications, and for setups +# with standard security requirements. +# At PL1 you should face FPs rarely. If you encounter FPs, please +# open an issue on the CRS GitHub site and don't forget to attach your +# complete Audit Log record for the request with the issue. +# - Paranoia level 2 includes many extra rules, for instance enabling +# many regexp-based SQL and XSS injection protections, and adding +# extra keywords checked for code injections. PL2 is advised +# for moderate to experienced users desiring more complete coverage +# and for installations with elevated security requirements. +# PL2 comes with some FPs which you need to handle. +# - Paranoia level 3 enables more rules and keyword lists, and tweaks +# limits on special characters used. PL3 is aimed at users experienced +# at the handling of FPs and at installations with a high security +# requirement. +# - Paranoia level 4 further restricts special characters. +# The highest level is advised for experienced users protecting +# installations with very high security requirements. Running PL4 will +# likely produce a very high number of FPs which have to be +# treated before the site can go productive. +# +# All rules will log their PL to the audit log; +# example: [tag "paranoia-level/2"]. This allows you to deduct from the +# audit log how the WAF behavior is affected by paranoia level. +# +# It is important to also look into the variable +# tx.enforce_bodyproc_urlencoded (Enforce Body Processor URLENCODED) +# defined below. Enabling it closes a possible bypass of CRS. +# +# Uncomment this rule to change the default: +# +SecAction \ + "id:900000, \ + phase:1, \ + nolog, \ + pass, \ + t:none, \ + setvar:tx.paranoia_level=1" + + +# It is possible to execute rules from a higher paranoia level but not include +# them in the anomaly scoring. This allows you to take a well-tuned system on +# paranoia level 1 and add rules from paranoia level 2 without having to fear +# the new rules would lead to false positives that raise your score above the +# threshold. +# This optional feature is enabled by uncommenting the following rule and +# setting the tx.executing_paranoia_level. +# Technically, rules up to the level defined in tx.executing_paranoia_level +# will be executed, but only the rules up to tx.paranoia_level affect the +# anomaly scores. +# By default, tx.executing_paranoia_level is set to tx.paranoia_level. +# tx.executing_paranoia_level must not be lower than tx.paranoia_level. +# +# Please notice that setting tx.executing_paranoia_level to a higher paranoia +# level results in a performance impact that is equally high as setting +# tx.paranoia_level to said level. +# +#SecAction \ +# "id:900001,\ +# phase:1,\ +# nolog,\ +# pass,\ +# t:none,\ +# setvar:tx.executing_paranoia_level=1" + + +# +# -- [[ Enforce Body Processor URLENCODED ]] ----------------------------------- +# +# ModSecurity selects the body processor based on the Content-Type request +# header. But clients are not always setting the Content-Type header for their +# request body payloads. This will leave ModSecurity with limited vision into +# the payload. The variable tx.enforce_bodyproc_urlencoded lets you force the +# URLENCODED body processor in these situations. This is off by default, as it +# implies a change of the behaviour of ModSecurity beyond CRS (the body +# processor applies to all rules, not only CRS) and because it may lead to +# false positives already on paranoia level 1. However, enabling this variable +# closes a possible bypass of CRS so it should be considered. +# +# Uncomment this rule to change the default: +# +#SecAction \ +# "id:900010,\ +# phase:1,\ +# nolog,\ +# pass,\ +# t:none,\ +# setvar:tx.enforce_bodyproc_urlencoded=1" + + +# +# -- [[ Anomaly Mode Severity Levels ]] ---------------------------------------- +# +# Each rule in the CRS has an associated severity level. +# These are the default scoring points for each severity level. +# These settings will be used to increment the anomaly score if a rule matches. +# You may adjust these points to your liking, but this is usually not needed. +# +# - CRITICAL severity: Anomaly Score of 5. +# Mostly generated by the application attack rules (93x and 94x files). +# - ERROR severity: Anomaly Score of 4. +# Generated mostly from outbound leakage rules (95x files). +# - WARNING severity: Anomaly Score of 3. +# Generated mostly by malicious client rules (91x files). +# - NOTICE severity: Anomaly Score of 2. +# Generated mostly by the protocol rules (92x files). +# +# In anomaly mode, these scores are cumulative. +# So it's possible for a request to hit multiple rules. +# +# (Note: In this file, we use 'phase:1' to set CRS configuration variables. +# In general, 'phase:request' is used. However, we want to make absolutely sure +# that all configuration variables are set before the CRS rules are processed.) +# +#SecAction \ +# "id:900100,\ +# phase:1,\ +# nolog,\ +# pass,\ +# t:none,\ +# setvar:tx.critical_anomaly_score=5,\ +# setvar:tx.error_anomaly_score=4,\ +# setvar:tx.warning_anomaly_score=3,\ +# setvar:tx.notice_anomaly_score=2" + + +# +# -- [[ Anomaly Mode Blocking Threshold Levels ]] ------------------------------ +# +# Here, you can specify at which cumulative anomaly score an inbound request, +# or outbound response, gets blocked. +# +# Most detected inbound threats will give a critical score of 5. +# Smaller violations, like violations of protocol/standards, carry lower scores. +# +# [ At default value ] +# If you keep the blocking thresholds at the defaults, the CRS will work +# similarly to previous CRS versions: a single critical rule match will cause +# the request to be blocked and logged. +# +# [ Using higher values ] +# If you want to make the CRS less sensitive, you can increase the blocking +# thresholds, for instance to 7 (which would require multiple rule matches +# before blocking) or 10 (which would require at least two critical alerts - or +# a combination of many lesser alerts), or even higher. However, increasing the +# thresholds might cause some attacks to bypass the CRS rules or your policies. +# +# [ New deployment strategy: Starting high and decreasing ] +# It is a common practice to start a fresh CRS installation with elevated +# anomaly scoring thresholds (>100) and then lower the limits as your +# confidence in the setup grows. You may also look into the Sampling +# Percentage section below for a different strategy to ease into a new +# CRS installation. +# +# [ Anomaly Threshold / Paranoia Level Quadrant ] +# +# High Anomaly Limit | High Anomaly Limit +# Low Paranoia Level | High Paranoia Level +# -> Fresh Site | -> Experimental Site +# ------------------------------------------------------ +# Low Anomaly Limit | Low Anomaly Limit +# Low Paranoia Level | High Paranoia Level +# -> Standard Site | -> High Security Site +# +# Uncomment this rule to change the defaults: +# +SecAction \ + "id:900110, \ + phase:1, \ + nolog, \ + pass, \ + t:none, \ + setvar:tx.inbound_anomaly_score_threshold=100, \ + setvar:tx.outbound_anomaly_score_threshold=100" + +# +# -- [[ Application Specific Rule Exclusions ]] ---------------------------------------- +# +# Some well-known applications may undertake actions that appear to be +# malicious. This includes actions such as allowing HTML or Javascript within +# parameters. In such cases the CRS aims to prevent false positives by allowing +# administrators to enable prebuilt, application specific exclusions on an +# application by application basis. +# These application specific exclusions are distinct from the rules that would +# be placed in the REQUEST-900-EXCLUSION-RULES-BEFORE-CRS configuration file as +# they are prebuilt for specific applications. The 'REQUEST-900' file is +# designed for users to add their own custom exclusions. Note, using these +# application specific exclusions may loosen restrictions of the CRS, +# especially if used with an application they weren't designed for. As a result +# they should be applied with care. +# To use this functionality you must specify a supported application. To do so +# uncomment rule 900130. In addition to uncommenting the rule you will need to +# specify which application(s) you'd like to enable exclusions for. Only a +# (very) limited set of applications are currently supported, please use the +# filenames prefixed with 'REQUEST-903' to guide you in your selection. +# Such filenames use the following convention: +# REQUEST-903.9XXX-{APPNAME}-EXCLUSIONS-RULES.conf +# +# It is recommended if you run multiple web applications on your site to limit +# the effects of the exclusion to only the path where the excluded webapp +# resides using a rule similar to the following example: +# SecRule REQUEST_URI "@beginsWith /wordpress/" setvar:tx.crs_exclusions_wordpress=1 + +# +# Modify and uncomment this rule to select which application: +# +SecAction \ + "id:900130,\ + phase:1,\ + nolog,\ + pass,\ + t:none,\ + setvar:tx.crs_exclusions_drupal=1" + +# +# -- [[ HTTP Policy Settings ]] ------------------------------------------------ +# +# This section defines your policies for the HTTP protocol, such as: +# - allowed HTTP versions, HTTP methods, allowed request Content-Types +# - forbidden file extensions (e.g. .bak, .sql) and request headers (e.g. Proxy) +# +# These variables are used in the following rule files: +# - REQUEST-911-METHOD-ENFORCEMENT.conf +# - REQUEST-912-DOS-PROTECTION.conf +# - REQUEST-920-PROTOCOL-ENFORCEMENT.conf + +# HTTP methods that a client is allowed to use. +# Default: GET HEAD POST OPTIONS +# Example: for RESTful APIs, add the following methods: PUT PATCH DELETE +# Example: for WebDAV, add the following methods: CHECKOUT COPY DELETE LOCK +# MERGE MKACTIVITY MKCOL MOVE PROPFIND PROPPATCH PUT UNLOCK +# Uncomment this rule to change the default. +#SecAction \ +# "id:900200,\ +# phase:1,\ +# nolog,\ +# pass,\ +# t:none,\ +# setvar:'tx.allowed_methods=GET HEAD POST OPTIONS'" + +# Content-Types that a client is allowed to send in a request. +# Default: |application/x-www-form-urlencoded| |multipart/form-data| |multipart/related| +# |text/xml| |application/xml| |application/soap+xml| |application/x-amf| |application/json| +# |application/cloudevents+json| |application/cloudevents-batch+json| |application/octet-stream| +# |application/csp-report| |application/xss-auditor-report| |text/plain| +# Uncomment this rule to change the default. +#SecAction \ +# "id:900220,\ +# phase:1,\ +# nolog,\ +# pass,\ +# t:none,\ +# setvar:'tx.allowed_request_content_type=|application/x-www-form-urlencoded| |multipart/form-data| |multipart/related| |text/xml| |application/xml| |application/soap+xml| |application/x-amf| |application/json| |application/cloudevents+json| |application/cloudevents-batch+json| |application/octet-stream| |application/csp-report| |application/xss-auditor-report| |text/plain|'" + +# Allowed HTTP versions. +# Default: HTTP/1.0 HTTP/1.1 HTTP/2 HTTP/2.0 +# Example for legacy clients: HTTP/0.9 HTTP/1.0 HTTP/1.1 HTTP/2 HTTP/2.0 +# Note that some web server versions use 'HTTP/2', some 'HTTP/2.0', so +# we include both version strings by default. +# Uncomment this rule to change the default. +#SecAction \ +# "id:900230,\ +# phase:1,\ +# nolog,\ +# pass,\ +# t:none,\ +# setvar:'tx.allowed_http_versions=HTTP/1.0 HTTP/1.1 HTTP/2 HTTP/2.0'" + +# Forbidden file extensions. +# Guards against unintended exposure of development/configuration files. +# Default: .asa/ .asax/ .ascx/ .axd/ .backup/ .bak/ .bat/ .cdx/ .cer/ .cfg/ .cmd/ .com/ .config/ .conf/ .cs/ .csproj/ .csr/ .dat/ .db/ .dbf/ .dll/ .dos/ .htr/ .htw/ .ida/ .idc/ .idq/ .inc/ .ini/ .key/ .licx/ .lnk/ .log/ .mdb/ .old/ .pass/ .pdb/ .pol/ .printer/ .pwd/ .rdb/ .resources/ .resx/ .sql/ .swp/ .sys/ .vb/ .vbs/ .vbproj/ .vsdisco/ .webinfo/ .xsd/ .xsx/ +# Example: .bak/ .config/ .conf/ .db/ .ini/ .log/ .old/ .pass/ .pdb/ .rdb/ .sql/ +# Uncomment this rule to change the default. +#SecAction \ +# "id:900240,\ +# phase:1,\ +# nolog,\ +# pass,\ +# t:none,\ +# setvar:'tx.restricted_extensions=.asa/ .asax/ .ascx/ .axd/ .backup/ .bak/ .bat/ .cdx/ .cer/ .cfg/ .cmd/ .com/ .config/ .conf/ .cs/ .csproj/ .csr/ .dat/ .db/ .dbf/ .dll/ .dos/ .htr/ .htw/ .ida/ .idc/ .idq/ .inc/ .ini/ .key/ .licx/ .lnk/ .log/ .mdb/ .old/ .pass/ .pdb/ .pol/ .printer/ .pwd/ .rdb/ .resources/ .resx/ .sql/ .swp/ .sys/ .vb/ .vbs/ .vbproj/ .vsdisco/ .webinfo/ .xsd/ .xsx/'" + +# Forbidden request headers. +# Header names should be lowercase, enclosed by /slashes/ as delimiters. +# Blocking Proxy header prevents 'httpoxy' vulnerability: https://httpoxy.org +# Default: /proxy/ /lock-token/ /content-range/ /if/ +# Uncomment this rule to change the default. +#SecAction \ +# "id:900250,\ +# phase:1,\ +# nolog,\ +# pass,\ +# t:none,\ +# setvar:'tx.restricted_headers=/proxy/ /lock-token/ /content-range/ /if/'" + +# File extensions considered static files. +# Extensions include the dot, lowercase, enclosed by /slashes/ as delimiters. +# Used in DoS protection rule. See section "Anti-Automation / DoS Protection". +# Default: /.jpg/ /.jpeg/ /.png/ /.gif/ /.js/ /.css/ /.ico/ /.svg/ /.webp/ +# Uncomment this rule to change the default. +#SecAction \ +# "id:900260,\ +# phase:1,\ +# nolog,\ +# pass,\ +# t:none,\ +# setvar:'tx.static_extensions=/.jpg/ /.jpeg/ /.png/ /.gif/ /.js/ /.css/ /.ico/ /.svg/ /.webp/'" + +# Content-Types charsets that a client is allowed to send in a request. +# Default: utf-8|iso-8859-1|iso-8859-15|windows-1252 +# Uncomment this rule to change the default. +# Use "|" to separate multiple charsets like in the rule defining +# tx.allowed_request_content_type. +#SecAction \ +# "id:900280,\ +# phase:1,\ +# nolog,\ +# pass,\ +# t:none,\ +# setvar:'tx.allowed_request_content_type_charset=utf-8|iso-8859-1|iso-8859-15|windows-1252'" + +# +# -- [[ HTTP Argument/Upload Limits ]] ----------------------------------------- +# +# Here you can define optional limits on HTTP get/post parameters and uploads. +# This can help to prevent application specific DoS attacks. +# +# These values are checked in REQUEST-920-PROTOCOL-ENFORCEMENT.conf. +# Beware of blocking legitimate traffic when enabling these limits. +# + +# Block request if number of arguments is too high +# Default: unlimited +# Example: 255 +# Uncomment this rule to set a limit. +#SecAction \ +# "id:900300,\ +# phase:1,\ +# nolog,\ +# pass,\ +# t:none,\ +# setvar:tx.max_num_args=255" + +# Block request if the length of any argument name is too high +# Default: unlimited +# Example: 100 +# Uncomment this rule to set a limit. +#SecAction \ +# "id:900310,\ +# phase:1,\ +# nolog,\ +# pass,\ +# t:none,\ +# setvar:tx.arg_name_length=100" + +# Block request if the length of any argument value is too high +# Default: unlimited +# Example: 400 +# Uncomment this rule to set a limit. +#SecAction \ +# "id:900320,\ +# phase:1,\ +# nolog,\ +# pass,\ +# t:none,\ +# setvar:tx.arg_length=400" + +# Block request if the total length of all combined arguments is too high +# Default: unlimited +# Example: 64000 +# Uncomment this rule to set a limit. +#SecAction \ +# "id:900330,\ +# phase:1,\ +# nolog,\ +# pass,\ +# t:none,\ +# setvar:tx.total_arg_length=64000" + +# Block request if the total length of all combined arguments is too high +# Default: unlimited +# Example: 64000 +# Uncomment this rule to set a limit. +#SecAction \ +# "id:900330,\ +# phase:1,\ +# nolog,\ +# pass,\ +# t:none,\ +# setvar:tx.total_arg_length=64000" + +# Block request if the file size of any individual uploaded file is too high +# Default: unlimited +# Example: 1048576 +# Uncomment this rule to set a limit. +#SecAction \ +# "id:900340,\ +# phase:1,\ +# nolog,\ +# pass,\ +# t:none,\ +# setvar:tx.max_file_size=1048576" + +# Block request if the total size of all combined uploaded files is too high +# Default: unlimited +# Example: 1048576 +# Uncomment this rule to set a limit. +#SecAction \ +# "id:900350,\ +# phase:1,\ +# nolog,\ +# pass,\ +# t:none,\ +# setvar:tx.combined_file_sizes=1048576" + + +# +# -- [[ Easing In / Sampling Percentage ]] ------------------------------------- +# +# Adding the Core Rule Set to an existing productive site can lead to false +# positives, unexpected performance issues and other undesired side effects. +# +# It can be beneficial to test the water first by enabling the CRS for a +# limited number of requests only and then, when you have solved the issues (if +# any) and you have confidence in the setup, to raise the ratio of requests +# being sent into the ruleset. +# +# Adjust the percentage of requests that are funnelled into the Core Rules by +# setting TX.sampling_percentage below. The default is 100, meaning that every +# request gets checked by the CRS. The selection of requests, which are going +# to be checked, is based on a pseudo random number generated by ModSecurity. +# +# If a request is allowed to pass without being checked by the CRS, there is no +# entry in the audit log (for performance reasons), but an error log entry is +# written. If you want to disable the error log entry, then issue the +# following directive somewhere after the inclusion of the CRS +# (E.g., RESPONSE-999-EXCLUSION-RULES-AFTER-CRS.conf). +# +# SecRuleUpdateActionById 901150 "nolog" +# +# ATTENTION: If this TX.sampling_percentage is below 100, then some of the +# requests will bypass the Core Rules completely and you lose the ability to +# protect your service with ModSecurity. +# +# Uncomment this rule to enable this feature: +# +#SecAction "id:900400,\ +# phase:1,\ +# pass,\ +# nolog,\ +# setvar:tx.sampling_percentage=100" + + +# +# -- [[ Project Honey Pot HTTP Blacklist ]] ------------------------------------ +# +# Optionally, you can check the client IP address against the Project Honey Pot +# HTTPBL (dnsbl.httpbl.org). In order to use this, you need to register to get a +# free API key. Set it here with SecHttpBlKey. +# +# Project Honeypot returns multiple different malicious IP types. +# You may specify which you want to block by enabling or disabling them below. +# +# Ref: https://www.projecthoneypot.org/httpbl.php +# Ref: https://github.com/SpiderLabs/ModSecurity/wiki/Reference-Manual#wiki-SecHttpBlKey +# +# Uncomment these rules to use this feature: +# +#SecHttpBlKey XXXXXXXXXXXXXXXXX +#SecAction "id:900500,\ +# phase:1,\ +# nolog,\ +# pass,\ +# t:none,\ +# setvar:tx.block_search_ip=1,\ +# setvar:tx.block_suspicious_ip=1,\ +# setvar:tx.block_harvester_ip=1,\ +# setvar:tx.block_spammer_ip=1" + + +# +# -- [[ GeoIP Database ]] ------------------------------------------------------ +# +# There are some rulesets that inspect geolocation data of the client IP address +# (geoLookup). The CRS uses geoLookup to implement optional country blocking. +# +# To use geolocation, we make use of the MaxMind GeoIP database. +# This database is not included with the CRS and must be downloaded. +# +# There are two formats for the GeoIP database. ModSecurity v2 uses GeoLite (.dat files), +# and ModSecurity v3 uses GeoLite2 (.mmdb files). +# +# If you use ModSecurity 3, MaxMind provides a binary for updating GeoLite2 files, +# see https://github.com/maxmind/geoipupdate. +# +# Download the package for your OS, and read https://dev.maxmind.com/geoip/geoipupdate/ +# for configuration options. +# +# Warning: GeoLite (not GeoLite2) databases are considered legacy, and not being updated anymore. +# See https://support.maxmind.com/geolite-legacy-discontinuation-notice/ for more info. +# +# Therefore, if you use ModSecurity v2, you need to regenerate updated .dat files +# from CSV files first. +# +# You can achieve this using https://github.com/sherpya/geolite2legacy +# Pick the zip files from maxmind site: +# https://geolite.maxmind.com/download/geoip/database/GeoLite2-Country-CSV.zip +# +# Follow the guidelines for installing the tool and run: +# ./geolite2legacy.py -i GeoLite2-Country-CSV.zip \ +# -f geoname2fips.csv -o /usr/share/GeoliteCountry.dat +# +# Update the database regularly, see Step 3 of the configuration link above. +# +# By default, when you execute `sudo geoipupdate` on Linux, files from the free database +# will be downloaded to `/usr/share/GeoIP` (both v1 and v2). +# +# Then choose from: +# - `GeoLite2-Country.mmdb` (if you are using ModSecurity v3) +# - `GeoLiteCountry.dat` (if you are using ModSecurity v2) +# +# Ref: http://blog.spiderlabs.com/2010/10/detecting-malice-with-modsecurity-geolocation-data.html +# Ref: http://blog.spiderlabs.com/2010/11/detecting-malice-with-modsecurity-ip-forensics.html +# +# Uncomment only one of the next rules here to use this feature. +# Choose the one depending on the ModSecurity version you are using, and change the path accordingly: +# +# For ModSecurity v3: +#SecGeoLookupDB /usr/share/GeoIP/GeoLite2-Country.mmdb +# For ModSecurity v2 (points to the converted one): +#SecGeoLookupDB /usr/share/GeoIP/GeoLiteCountry.dat + +# +# -=[ Block Countries ]=- +# +# Rules in the IP Reputation file can check the client against a list of high +# risk country codes. These countries have to be defined in the variable +# tx.high_risk_country_codes via their ISO 3166 two-letter country code: +# https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2#Officially_assigned_code_elements +# +# If you are sure that you are not getting any legitimate requests from a given +# country, then you can disable all access from that country via this variable. +# The rule performing the test has the rule id 910100. +# +# This rule requires SecGeoLookupDB to be enabled and the GeoIP database to be +# downloaded (see the section "GeoIP Database" above.) +# +# By default, the list is empty. A list used by some sites was the following: +# setvar:'tx.high_risk_country_codes=UA ID YU LT EG RO BG TR RU PK MY CN'" +# +# Uncomment this rule to use this feature: +# +#SecAction \ +# "id:900600,\ +# phase:1,\ +# nolog,\ +# pass,\ +# t:none,\ +# setvar:'tx.high_risk_country_codes='" + + +# +# -- [[ Anti-Automation / DoS Protection ]] ------------------------------------ +# +# Optional DoS protection against clients making requests too quickly. +# +# When a client is making more than 100 requests (excluding static files) within +# 60 seconds, this is considered a 'burst'. After two bursts, the client is +# blocked for 600 seconds. +# +# Requests to static files are not counted towards DoS; they are listed in the +# 'tx.static_extensions' setting, which you can change in this file (see +# section "HTTP Policy Settings"). +# +# For a detailed description, see rule file REQUEST-912-DOS-PROTECTION.conf. +# +# Uncomment this rule to use this feature: +# +#SecAction \ +# "id:900700,\ +# phase:1,\ +# nolog,\ +# pass,\ +# t:none,\ +# setvar:'tx.dos_burst_time_slice=60',\ +# setvar:'tx.dos_counter_threshold=100',\ +# setvar:'tx.dos_block_timeout=600'" + + +# +# -- [[ Check UTF-8 encoding ]] ------------------------------------------------ +# +# The CRS can optionally check request contents for invalid UTF-8 encoding. +# We only want to apply this check if UTF-8 encoding is actually used by the +# site; otherwise it will result in false positives. +# +# Uncomment this rule to use this feature: +# +#SecAction \ +# "id:900950,\ +# phase:1,\ +# nolog,\ +# pass,\ +# t:none,\ +# setvar:tx.crs_validate_utf8_encoding=1" + + +# +# -- [[ Blocking Based on IP Reputation ]] ------------------------------------ +# +# Blocking based on reputation is permanent in the CRS. Unlike other rules, +# which look at the individual request, the blocking of IPs is based on +# a persistent record in the IP collection, which remains active for a +# certain amount of time. +# +# There are two ways an individual client can become flagged for blocking: +# - External information (RBL, GeoIP, etc.) +# - Internal information (Core Rules) +# +# The record in the IP collection carries a flag, which tags requests from +# individual clients with a flag named IP.reput_block_flag. +# But the flag alone is not enough to have a client blocked. There is also +# a global switch named tx.do_reput_block. This is off by default. If you set +# it to 1 (=On), requests from clients with the IP.reput_block_flag will +# be blocked for a certain duration. +# +# Variables +# ip.reput_block_flag Blocking flag for the IP collection record +# ip.reput_block_reason Reason (= rule message) that caused to blocking flag +# tx.do_reput_block Switch deciding if we really block based on flag +# tx.reput_block_duration Setting to define the duration of a block +# +# It may be important to know, that all the other core rules are skipped for +# requests, when it is clear that they carry the blocking flag in question. +# +# Uncomment this rule to use this feature: +# +#SecAction \ +# "id:900960,\ +# phase:1,\ +# nolog,\ +# pass,\ +# t:none,\ +# setvar:tx.do_reput_block=1" +# +# Uncomment this rule to change the blocking time: +# Default: 300 (5 minutes) +# +#SecAction \ +# "id:900970,\ +# phase:1,\ +# nolog,\ +# pass,\ +# t:none,\ +# setvar:tx.reput_block_duration=300" + + +# +# -- [[ Collection timeout ]] -------------------------------------------------- +# +# Set the SecCollectionTimeout directive from the ModSecurity default (1 hour) +# to a lower setting which is appropriate to most sites. +# This increases performance by cleaning out stale collection (block) entries. +# +# This value should be greater than or equal to: +# tx.reput_block_duration (see section "Blocking Based on IP Reputation") and +# tx.dos_block_timeout (see section "Anti-Automation / DoS Protection"). +# +# Ref: https://github.com/SpiderLabs/ModSecurity/wiki/Reference-Manual#wiki-SecCollectionTimeout + +# Please keep this directive uncommented. +# Default: 600 (10 minutes) +SecCollectionTimeout 600 + + +# +# -- [[ End of setup ]] -------------------------------------------------------- +# +# The CRS checks the tx.crs_setup_version variable to ensure that the setup +# has been loaded. If you are not planning to use this setup template, +# you must manually set the tx.crs_setup_version variable before including +# the CRS rules/* files. +# +# The variable is a numerical representation of the CRS version number. +# E.g., v3.0.0 is represented as 300. +# +SecAction \ + "id:900990,\ + phase:1,\ + nolog,\ + pass,\ + t:none,\ + setvar:tx.crs_setup_version=332" diff --git a/terraform/applications/nginx-waf/modsecurity/mod-sec-rules.conf b/terraform/applications/nginx-waf/modsecurity/mod-sec-rules.conf new file mode 100644 index 00000000..a43d1115 --- /dev/null +++ b/terraform/applications/nginx-waf/modsecurity/mod-sec-rules.conf @@ -0,0 +1,2 @@ +modsecurity_rules ' + SecRuleRemoveById 949110; diff --git a/terraform/applications/nginx-waf/modsecurity/modsecurity-override.conf b/terraform/applications/nginx-waf/modsecurity/modsecurity-override.conf new file mode 100644 index 00000000..b129aa3c --- /dev/null +++ b/terraform/applications/nginx-waf/modsecurity/modsecurity-override.conf @@ -0,0 +1,30 @@ +#SecArgumentSeparator & +#SecAuditEngine Off +#SecAuditLog /dev/null 2>&1 +#SecAuditLogFormat JSON +#SecAuditLogParts ABJFHZ +#SecAuditLogRelevantStatus "^(?:5|4(?!04))" +#SecAuditLogStorageDir /var/log/modsecurity/audit/ +#SecAuditLogType Serial +#SecCookieFormat 0 +#SecDataDir /tmp/modsecurity/data +#SecDebugLog /var/log/modsecurity/debug.log +#SecDebugLogLevel 3 +#SecPcreMatchLimit 100000 +#SecPcreMatchLimitRecursion 100000 +#SecRequestBodyAccess on +#SecRequestBodyLimit 13107200 +#SecRequestBodyJsonDepthLimit 512 +#SecRequestBodyLimitAction Reject +#SecRequestBodyNoFilesLimit 131072 +#SecResponseBodyAccess on +#SecResponseBodyLimit 1048576 +#SecResponseBodyLimitAction ProcessPartial +#SecResponseBodyMimeType text/plain text/html text/xml +#SecRuleEngine On +#SecTmpDir /tmp/modsecurity/tmp +#SecTmpSaveUploadedFiles on +#SecUnicodeMapFile unicode.mapping 20127 +#SecUploadDir /tmp/modsecurity/upload +#SecUploadFileMode 0644 +#SecRule REQUEST_URI "@beginsWith /node" "id:700004, phase:1,log,allow, msg:'Node URI accessed'" diff --git a/terraform/applications/nginx-waf/modsecurity/modsecurity.conf b/terraform/applications/nginx-waf/modsecurity/modsecurity.conf new file mode 100644 index 00000000..49adbb96 --- /dev/null +++ b/terraform/applications/nginx-waf/modsecurity/modsecurity.conf @@ -0,0 +1,269 @@ +# -- Rule engine initialization ---------------------------------------------- + +# Enable ModSecurity, attaching it to every transaction. Use detection +# only to start with, because that minimises the chances of post-installation +# disruption. +# +SecRuleEngine On + + +# -- Request body handling --------------------------------------------------- + +# Allow ModSecurity to access request bodies. If you don't, ModSecurity +# won't be able to see any POST parameters, which opens a large security +# hole for attackers to exploit. +# +SecRequestBodyAccess On + + +# Enable XML request body parser. +# Initiate XML Processor in case of xml content-type +# +SecRule REQUEST_HEADERS:Content-Type "(?:application(?:/soap\+|/)|text/)xml" \ + "id:'200000',phase:1,t:none,t:lowercase,pass,nolog,ctl:requestBodyProcessor=XML" + +# Enable JSON request body parser. +# Initiate JSON Processor in case of JSON content-type; change accordingly +# if your application does not use 'application/json' +# +SecRule REQUEST_HEADERS:Content-Type "application/json" \ + "id:'200001',phase:1,t:none,t:lowercase,pass,nolog,ctl:requestBodyProcessor=JSON" + +# Sample rule to enable JSON request body parser for more subtypes. +# Uncomment or adapt this rule if you want to engage the JSON +# Processor for "+json" subtypes +# +#SecRule REQUEST_HEADERS:Content-Type "^application/.+[+]json$" \ +# "id:'200006',phase:1,t:none,t:lowercase,pass,nolog,ctl:requestBodyProcessor=JSON" + +# Maximum request body size we will accept for buffering. If you support +# file uploads then the value given on the first line has to be as large +# as the largest file you are willing to accept. The second value refers +# to the size of data, with files excluded. You want to keep that value as +# low as practical. +# +SecRequestBodyLimit 13107200 +SecRequestBodyNoFilesLimit 320000 + +# What to do if the request body size is above our configured limit. +# Keep in mind that this setting will automatically be set to ProcessPartial +# when SecRuleEngine is set to DetectionOnly mode in order to minimize +# disruptions when initially deploying ModSecurity. +# +SecRequestBodyLimitAction Reject + +# Verify that we've correctly processed the request body. +# As a rule of thumb, when failing to process a request body +# you should reject the request (when deployed in blocking mode) +# or log a high-severity alert (when deployed in detection-only mode). +# +SecRule REQBODY_ERROR "!@eq 0" \ +"id:'200002', phase:2,t:none,log,deny,status:400,msg:'Failed to parse request body.',logdata:'%{reqbody_error_msg}',severity:2" + +# By default be strict with what we accept in the multipart/form-data +# request body. If the rule below proves to be too strict for your +# environment consider changing it to detection-only. You are encouraged +# _not_ to remove it altogether. +# +SecRule MULTIPART_STRICT_ERROR "!@eq 0" \ +"id:'200003',phase:2,t:none,log,deny,status:400, \ +msg:'Multipart request body failed strict validation: \ +PE %{REQBODY_PROCESSOR_ERROR}, \ +BQ %{MULTIPART_BOUNDARY_QUOTED}, \ +BW %{MULTIPART_BOUNDARY_WHITESPACE}, \ +DB %{MULTIPART_DATA_BEFORE}, \ +DA %{MULTIPART_DATA_AFTER}, \ +HF %{MULTIPART_HEADER_FOLDING}, \ +LF %{MULTIPART_LF_LINE}, \ +SM %{MULTIPART_MISSING_SEMICOLON}, \ +IQ %{MULTIPART_INVALID_QUOTING}, \ +IP %{MULTIPART_INVALID_PART}, \ +IH %{MULTIPART_INVALID_HEADER_FOLDING}, \ +FL %{MULTIPART_FILE_LIMIT_EXCEEDED}'" + +# Did we see anything that might be a boundary? +# +# Here is a short description about the ModSecurity Multipart parser: the +# parser returns with value 0, if all "boundary-like" line matches with +# the boundary string which given in MIME header. In any other cases it returns +# with different value, eg. 1 or 2. +# +# The RFC 1341 descript the multipart content-type and its syntax must contains +# only three mandatory lines (above the content): +# * Content-Type: multipart/mixed; boundary=BOUNDARY_STRING +# * --BOUNDARY_STRING +# * --BOUNDARY_STRING-- +# +# First line indicates, that this is a multipart content, second shows that +# here starts a part of the multipart content, third shows the end of content. +# +# If there are any other lines, which starts with "--", then it should be +# another boundary id - or not. +# +# After 3.0.3, there are two kinds of types of boundary errors: strict and permissive. +# +# If multipart content contains the three necessary lines with correct order, but +# there are one or more lines with "--", then parser returns with value 2 (non-zero). +# +# If some of the necessary lines (usually the start or end) misses, or the order +# is wrong, then parser returns with value 1 (also a non-zero). +# +# You can choose, which one is what you need. The example below contains the +# 'strict' mode, which means if there are any lines with start of "--", then +# ModSecurity blocked the content. But the next, commented example contains +# the 'permissive' mode, then you check only if the necessary lines exists in +# correct order. Whit this, you can enable to upload PEM files (eg "----BEGIN.."), +# or other text files, which contains eg. HTTP headers. +# +# The difference is only the operator - in strict mode (first) the content blocked +# in case of any non-zero value. In permissive mode (second, commented) the +# content blocked only if the value is explicit 1. If it 0 or 2, the content will +# allowed. +# + +# +# See #1747 and #1924 for further information on the possible values for +# MULTIPART_UNMATCHED_BOUNDARY. +# +SecRule MULTIPART_UNMATCHED_BOUNDARY "@eq 1" \ + "id:'200004',phase:2,t:none,log,deny,msg:'Multipart parser detected a possible unmatched boundary.'" + + +# PCRE Tuning +# We want to avoid a potential RegEx DoS condition +# +SecPcreMatchLimit 500000 +SecPcreMatchLimitRecursion 500000 + +# Some internal errors will set flags in TX and we will need to look for these. +# All of these are prefixed with "MSC_". The following flags currently exist: +# +# MSC_PCRE_LIMITS_EXCEEDED: PCRE match limits were exceeded. +# +SecRule TX:/^MSC_/ "!@streq 0" \ + "id:'200005',phase:2,t:none,deny,msg:'ModSecurity internal error flagged: %{MATCHED_VAR_NAME}'" + + +# -- Response body handling -------------------------------------------------- + +# Allow ModSecurity to access response bodies. +# You should have this directive enabled in order to identify errors +# and data leakage issues. +# +# Do keep in mind that enabling this directive does increases both +# memory consumption and response latency. +# +SecResponseBodyAccess Off + +# Which response MIME types do you want to inspect? You should adjust the +# configuration below to catch documents but avoid static files +# (e.g., images and archives). +# +SecResponseBodyMimeType text/plain text/html text/xml + +# Buffer response bodies of up to 512 KB in length. +SecResponseBodyLimit 524288 + +# What happens when we encounter a response body larger than the configured +# limit? By default, we process what we have and let the rest through. +# That's somewhat less secure, but does not break any legitimate pages. +# +SecResponseBodyLimitAction ProcessPartial + + +# -- Filesystem configuration ------------------------------------------------ + +# The location where ModSecurity stores temporary files (for example, when +# it needs to handle a file upload that is larger than the configured limit). +# +# This default setting is chosen due to all systems have /tmp available however, +# this is less than ideal. It is recommended that you specify a location that's private. +# +SecTmpDir /tmp/ + +# The location where ModSecurity will keep its persistent data. This default setting +# is chosen due to all systems have /tmp available however, it +# too should be updated to a place that other users can't access. +# +SecDataDir /tmp/ + + +# -- File uploads handling configuration ------------------------------------- + +# The location where ModSecurity stores intercepted uploaded files. This +# location must be private to ModSecurity. You don't want other users on +# the server to access the files, do you? +# +#SecUploadDir /opt/modsecurity/var/upload/ + +# By default, only keep the files that were determined to be unusual +# in some way (by an external inspection script). For this to work you +# will also need at least one file inspection rule. +# +#SecUploadKeepFiles RelevantOnly + +# Uploaded files are by default created with permissions that do not allow +# any other user to access them. You may need to relax that if you want to +# interface ModSecurity to an external program (e.g., an anti-virus). +# +#SecUploadFileMode 0600 + + +# -- Debug log configuration ------------------------------------------------- + +# The default debug log configuration is to duplicate the error, warning +# and notice messages from the error log. +# +#SecDebugLog /var/log/modsecurity/debug.log +#SecDebugLogLevel 3 + + +# -- Audit log configuration ------------------------------------------------- + +# Log the transactions that are marked by a rule, as well as those that +# trigger a server error (determined by a 5xx or 4xx, excluding 404, +# level response status codes). +# +SecAuditEngine RelevantOnly +SecAuditLogRelevantStatus "^(?:5|4(?!04))" + +# Log everything we know about a transaction. +SecAuditLogParts ABIJDEFHZ + +# Use a single file for logging. This is much easier to look at, but +# assumes that you will use the audit log only ocassionally. +# +SecAuditLogType Serial +SecAuditLog /var/log/modsecurity/modsec_audit.log + +# Specify the path for concurrent audit logging. +SecAuditLogStorageDir /var/log/modsecurity/ + + +# -- Miscellaneous ----------------------------------------------------------- + +# Use the most commonly used application/x-www-form-urlencoded parameter +# separator. There's probably only one application somewhere that uses +# something else so don't expect to change this value. +# +SecArgumentSeparator & + +# Settle on version 0 (zero) cookies, as that is what most applications +# use. Using an incorrect cookie version may open your installation to +# evasion attacks (against the rules that examine named cookies). +# +SecCookieFormat 0 + +# Specify your Unicode Code Point. +# This mapping is used by the t:urlDecodeUni transformation function +# to properly map encoded data to your language. Properly setting +# these directives helps to reduce false positives and negatives. +# +SecUnicodeMapFile unicode.mapping 20127 + +# Improve the quality of ModSecurity by sharing information about your +# current ModSecurity version and dependencies versions. +# The following information will be shared: ModSecurity version, +# Web Server version, APR version, PCRE version, Lua version, Libxml2 +# version, Anonymous unique id for host. +SecStatusEngine On diff --git a/terraform/applications/nginx-waf/modsecurity/modsecurity.conf-recommended b/terraform/applications/nginx-waf/modsecurity/modsecurity.conf-recommended new file mode 100644 index 00000000..02aece40 --- /dev/null +++ b/terraform/applications/nginx-waf/modsecurity/modsecurity.conf-recommended @@ -0,0 +1,284 @@ +# -- Rule engine initialization ---------------------------------------------- + +# Enable ModSecurity, attaching it to every transaction. Use detection +# only to start with, because that minimises the chances of post-installation +# disruption. +# +SecRuleEngine DetectionOnly + + +# -- Request body handling --------------------------------------------------- + +# Allow ModSecurity to access request bodies. If you don't, ModSecurity +# won't be able to see any POST parameters, which opens a large security +# hole for attackers to exploit. +# +SecRequestBodyAccess On + + +# Enable XML request body parser. +# Initiate XML Processor in case of xml content-type +# +SecRule REQUEST_HEADERS:Content-Type "^(?:application(?:/soap\+|/)|text/)xml" \ + "id:'200000',phase:1,t:none,t:lowercase,pass,nolog,ctl:requestBodyProcessor=XML" + +# Enable JSON request body parser. +# Initiate JSON Processor in case of JSON content-type; change accordingly +# if your application does not use 'application/json' +# +SecRule REQUEST_HEADERS:Content-Type "^application/json" \ + "id:'200001',phase:1,t:none,t:lowercase,pass,nolog,ctl:requestBodyProcessor=JSON" + +# Sample rule to enable JSON request body parser for more subtypes. +# Uncomment or adapt this rule if you want to engage the JSON +# Processor for "+json" subtypes +# +#SecRule REQUEST_HEADERS:Content-Type "^application/[a-z0-9.-]+[+]json" \ +# "id:'200006',phase:1,t:none,t:lowercase,pass,nolog,ctl:requestBodyProcessor=JSON" + +# Maximum request body size we will accept for buffering. If you support +# file uploads then the value given on the first line has to be as large +# as the largest file you are willing to accept. The second value refers +# to the size of data, with files excluded. You want to keep that value as +# low as practical. +# +SecRequestBodyLimit 13107200 +SecRequestBodyNoFilesLimit 131072 + +# What to do if the request body size is above our configured limit. +# Keep in mind that this setting will automatically be set to ProcessPartial +# when SecRuleEngine is set to DetectionOnly mode in order to minimize +# disruptions when initially deploying ModSecurity. +# +SecRequestBodyLimitAction Reject + +# Maximum parsing depth allowed for JSON objects. You want to keep this +# value as low as practical. +# +SecRequestBodyJsonDepthLimit 512 + +# Maximum number of args allowed per request. You want to keep this +# value as low as practical. The value should match that in rule 200007. +SecArgumentsLimit 1000 + +# If SecArgumentsLimit has been set, you probably want to reject any +# request body that has only been partly parsed. The value used in this +# rule should match what was used with SecArgumentsLimit +SecRule &ARGS "@ge 1000" \ +"id:'200007', phase:2,t:none,log,deny,status:400,msg:'Failed to fully parse request body due to large argument count',severity:2" + +# Verify that we've correctly processed the request body. +# As a rule of thumb, when failing to process a request body +# you should reject the request (when deployed in blocking mode) +# or log a high-severity alert (when deployed in detection-only mode). +# +SecRule REQBODY_ERROR "!@eq 0" \ +"id:'200002', phase:2,t:none,log,deny,status:400,msg:'Failed to parse request body.',logdata:'%{reqbody_error_msg}',severity:2" + +# By default be strict with what we accept in the multipart/form-data +# request body. If the rule below proves to be too strict for your +# environment consider changing it to detection-only. You are encouraged +# _not_ to remove it altogether. +# +SecRule MULTIPART_STRICT_ERROR "!@eq 0" \ +"id:'200003',phase:2,t:none,log,deny,status:400, \ +msg:'Multipart request body failed strict validation: \ +PE %{REQBODY_PROCESSOR_ERROR}, \ +BQ %{MULTIPART_BOUNDARY_QUOTED}, \ +BW %{MULTIPART_BOUNDARY_WHITESPACE}, \ +DB %{MULTIPART_DATA_BEFORE}, \ +DA %{MULTIPART_DATA_AFTER}, \ +HF %{MULTIPART_HEADER_FOLDING}, \ +LF %{MULTIPART_LF_LINE}, \ +SM %{MULTIPART_MISSING_SEMICOLON}, \ +IQ %{MULTIPART_INVALID_QUOTING}, \ +IP %{MULTIPART_INVALID_PART}, \ +IH %{MULTIPART_INVALID_HEADER_FOLDING}, \ +FL %{MULTIPART_FILE_LIMIT_EXCEEDED}'" + +# Did we see anything that might be a boundary? +# +# Here is a short description about the ModSecurity Multipart parser: the +# parser returns with value 0, if all "boundary-like" line matches with +# the boundary string which given in MIME header. In any other cases it returns +# with different value, eg. 1 or 2. +# +# The RFC 1341 descript the multipart content-type and its syntax must contains +# only three mandatory lines (above the content): +# * Content-Type: multipart/mixed; boundary=BOUNDARY_STRING +# * --BOUNDARY_STRING +# * --BOUNDARY_STRING-- +# +# First line indicates, that this is a multipart content, second shows that +# here starts a part of the multipart content, third shows the end of content. +# +# If there are any other lines, which starts with "--", then it should be +# another boundary id - or not. +# +# After 3.0.3, there are two kinds of types of boundary errors: strict and permissive. +# +# If multipart content contains the three necessary lines with correct order, but +# there are one or more lines with "--", then parser returns with value 2 (non-zero). +# +# If some of the necessary lines (usually the start or end) misses, or the order +# is wrong, then parser returns with value 1 (also a non-zero). +# +# You can choose, which one is what you need. The example below contains the +# 'strict' mode, which means if there are any lines with start of "--", then +# ModSecurity blocked the content. But the next, commented example contains +# the 'permissive' mode, then you check only if the necessary lines exists in +# correct order. Whit this, you can enable to upload PEM files (eg "----BEGIN.."), +# or other text files, which contains eg. HTTP headers. +# +# The difference is only the operator - in strict mode (first) the content blocked +# in case of any non-zero value. In permissive mode (second, commented) the +# content blocked only if the value is explicit 1. If it 0 or 2, the content will +# allowed. +# + +# +# See #1747 and #1924 for further information on the possible values for +# MULTIPART_UNMATCHED_BOUNDARY. +# +SecRule MULTIPART_UNMATCHED_BOUNDARY "@eq 1" \ + "id:'200004',phase:2,t:none,log,deny,msg:'Multipart parser detected a possible unmatched boundary.'" + + +# PCRE Tuning +# We want to avoid a potential RegEx DoS condition +# +SecPcreMatchLimit 1000 +SecPcreMatchLimitRecursion 1000 + +# Some internal errors will set flags in TX and we will need to look for these. +# All of these are prefixed with "MSC_". The following flags currently exist: +# +# MSC_PCRE_LIMITS_EXCEEDED: PCRE match limits were exceeded. +# +SecRule TX:/^MSC_/ "!@streq 0" \ + "id:'200005',phase:2,t:none,deny,msg:'ModSecurity internal error flagged: %{MATCHED_VAR_NAME}'" + + +# -- Response body handling -------------------------------------------------- + +# Allow ModSecurity to access response bodies. +# You should have this directive enabled in order to identify errors +# and data leakage issues. +# +# Do keep in mind that enabling this directive does increases both +# memory consumption and response latency. +# +SecResponseBodyAccess On + +# Which response MIME types do you want to inspect? You should adjust the +# configuration below to catch documents but avoid static files +# (e.g., images and archives). +# +SecResponseBodyMimeType text/plain text/html text/xml + +# Buffer response bodies of up to 512 KB in length. +SecResponseBodyLimit 524288 + +# What happens when we encounter a response body larger than the configured +# limit? By default, we process what we have and let the rest through. +# That's somewhat less secure, but does not break any legitimate pages. +# +SecResponseBodyLimitAction ProcessPartial + + +# -- Filesystem configuration ------------------------------------------------ + +# The location where ModSecurity stores temporary files (for example, when +# it needs to handle a file upload that is larger than the configured limit). +# +# This default setting is chosen due to all systems have /tmp available however, +# this is less than ideal. It is recommended that you specify a location that's private. +# +SecTmpDir /tmp/ + +# The location where ModSecurity will keep its persistent data. This default setting +# is chosen due to all systems have /tmp available however, it +# too should be updated to a place that other users can't access. +# +SecDataDir /tmp/ + + +# -- File uploads handling configuration ------------------------------------- + +# The location where ModSecurity stores intercepted uploaded files. This +# location must be private to ModSecurity. You don't want other users on +# the server to access the files, do you? +# +#SecUploadDir /opt/modsecurity/var/upload/ + +# By default, only keep the files that were determined to be unusual +# in some way (by an external inspection script). For this to work you +# will also need at least one file inspection rule. +# +#SecUploadKeepFiles RelevantOnly + +# Uploaded files are by default created with permissions that do not allow +# any other user to access them. You may need to relax that if you want to +# interface ModSecurity to an external program (e.g., an anti-virus). +# +#SecUploadFileMode 0600 + + +# -- Debug log configuration ------------------------------------------------- + +# The default debug log configuration is to duplicate the error, warning +# and notice messages from the error log. +# +#SecDebugLog /opt/modsecurity/var/log/debug.log +#SecDebugLogLevel 3 + + +# -- Audit log configuration ------------------------------------------------- + +# Log the transactions that are marked by a rule, as well as those that +# trigger a server error (determined by a 5xx or 4xx, excluding 404, +# level response status codes). +# +SecAuditEngine RelevantOnly +SecAuditLogRelevantStatus "^(?:5|4(?!04))" + +# Log everything we know about a transaction. +SecAuditLogParts ABIJDEFHZ + +# Use a single file for logging. This is much easier to look at, but +# assumes that you will use the audit log only ocassionally. +# +SecAuditLogType Serial +SecAuditLog /var/log/modsec_audit.log + +# Specify the path for concurrent audit logging. +#SecAuditLogStorageDir /opt/modsecurity/var/audit/ + + +# -- Miscellaneous ----------------------------------------------------------- + +# Use the most commonly used application/x-www-form-urlencoded parameter +# separator. There's probably only one application somewhere that uses +# something else so don't expect to change this value. +# +SecArgumentSeparator & + +# Settle on version 0 (zero) cookies, as that is what most applications +# use. Using an incorrect cookie version may open your installation to +# evasion attacks (against the rules that examine named cookies). +# +SecCookieFormat 0 + +# Specify your Unicode Code Point. +# This mapping is used by the t:urlDecodeUni transformation function +# to properly map encoded data to your language. Properly setting +# these directives helps to reduce false positives and negatives. +# +SecUnicodeMapFile unicode.mapping 20127 + +# Improve the quality of ModSecurity by sharing information about your +# current ModSecurity version and dependencies versions. +# The following information will be shared: ModSecurity version, +# Web Server version, APR version, PCRE version, Lua version, Libxml2 +# version, Anonymous unique id for host. +SecStatusEngine On diff --git a/terraform/applications/nginx-waf/modsecurity/unicode.mapping b/terraform/applications/nginx-waf/modsecurity/unicode.mapping new file mode 100644 index 00000000..04f22a0d --- /dev/null +++ b/terraform/applications/nginx-waf/modsecurity/unicode.mapping @@ -0,0 +1,94 @@ +(MAC - Roman) + + +(MAC - Icelandic) + + +1250 (ANSI - Central Europe) +00a1:21 00a2:63 00a3:4c 00a5:59 00aa:61 00b2:32 00b3:33 00b9:31 00ba:6f 00bc:31 00bd:31 00be:33 00c0:41 00c3:41 00c5:41 00c6:41 00c8:45 00ca:45 00cc:49 00cf:49 00d1:4e 00d2:4f 00d5:4f 00d8:4f 00d9:55 00db:55 00e0:61 00e3:61 00e5:61 00e6:61 00e8:65 00ea:65 00ec:69 00ef:69 00f1:6e 00f2:6f 00f5:6f 00f8:6f 00f9:75 00fb:75 00ff:79 0100:41 0101:61 0108:43 0109:63 010a:43 010b:63 0112:45 0113:65 0114:45 0115:65 0116:45 0117:65 011c:47 011d:67 011e:47 011f:67 0120:47 0121:67 0122:47 0123:67 0124:48 0125:68 0126:48 0127:68 0128:49 0129:69 012a:49 012b:69 012c:49 012d:69 012e:49 012f:69 0130:49 0131:69 0134:4a 0135:6a 0136:4b 0137:6b 013b:4c 013c:6c 0145:4e 0146:6e 014c:4f 014d:6f 014e:4f 014f:6f 0152:4f 0153:6f 0156:52 0157:72 015c:53 015d:73 0166:54 0167:74 0168:55 0169:75 016a:55 016b:75 016c:55 016d:75 0172:55 0173:75 0174:57 0175:77 0176:59 0177:79 0178:59 0180:62 0191:46 0192:66 0197:49 019a:6c 019f:4f 01a0:4f 01a1:6f 01ab:74 01ae:54 01af:55 01b0:75 01b6:7a 01c0:7c 01c3:21 01cd:41 01ce:61 01cf:49 01d0:69 01d1:4f 01d2:6f 01d3:55 01d4:75 01d5:55 01d6:75 01d7:55 01d8:75 01d9:55 01da:75 01db:55 01dc:75 01de:41 01df:61 01e4:47 01e5:67 01e6:47 01e7:67 01e8:4b 01e9:6b 01ea:4f 01eb:6f 01ec:4f 01ed:6f 01f0:6a 0261:67 02b9:27 02ba:22 02bc:27 02c4:5e 02c6:5e 02c8:27 02cb:60 02cd:5f 02dc:7e 0300:60 0302:5e 0303:7e 030e:22 0331:5f 0332:5f 037e:3b 04bb:68 0589:3a 066a:25 2000:20 2001:20 2002:20 2003:20 2004:20 2005:20 2006:20 2010:2d 2011:2d 2032:27 2035:60 203c:21 2044:2f 2074:34 2075:35 2076:36 2077:37 2078:38 2080:30 2081:31 2082:32 2083:33 2084:34 2085:35 2086:36 2087:37 2088:38 2089:39 2102:43 2107:45 210a:67 210b:48 210c:48 210d:48 210e:68 2110:49 2111:49 2112:4c 2113:6c 2115:4e 2118:50 2119:50 211a:51 211b:52 211c:52 211d:52 2124:5a 2128:5a 212a:4b 212c:42 212d:43 212e:65 212f:65 2130:45 2131:46 2133:4d 2134:6f 2191:5e 2194:2d 2195:7c 21a8:7c 2212:2d 2215:2f 2216:5c 2217:2a 221f:4c 2223:7c 2236:3a 223c:7e 2303:5e 2329:3c 232a:3e 2502:2d 250c:2d 2514:4c 2518:2d 251c:2b 2524:2b 252c:54 2534:2b 253c:2b 2550:3d 2554:2d 255a:4c 255d:2d 2566:54 256c:2b 2580:2d 2584:2d 2588:2d 2591:2d 2592:2d 2593:2d 25ac:2d 25b2:5e 25ba:3e 25c4:3c 25cb:30 25d9:30 263c:30 2640:2b 2642:3e 266a:64 266b:64 2758:7c 3000:20 3008:3c 3009:3e 301a:5b 301b:5d ff01:21 ff02:22 ff03:23 ff04:24 ff05:25 ff06:26 ff07:27 ff08:28 ff09:29 ff0a:2a ff0b:2b ff0c:2c ff0d:2d ff0e:2e ff0f:2f ff10:30 ff11:31 ff12:32 ff13:33 ff14:34 ff15:35 ff16:36 ff17:37 ff18:38 ff19:39 ff1a:3a ff1b:3b ff1c:3c ff1d:3d ff1e:3e ff20:40 ff21:41 ff22:42 ff23:43 ff24:44 ff25:45 ff26:46 ff27:47 ff28:48 ff29:49 ff2a:4a ff2b:4b ff2c:4c ff2d:4d ff2e:4e ff2f:4f ff30:50 ff31:51 ff32:52 ff33:53 ff34:54 ff35:55 ff36:56 ff37:57 ff38:58 ff39:59 ff3a:5a ff3b:5b ff3c:5c ff3d:5d ff3e:5e ff3f:5f ff40:60 ff41:61 ff42:62 ff43:63 ff44:64 ff45:65 ff46:66 ff47:67 ff48:68 ff49:69 ff4a:6a ff4b:6b ff4c:6c ff4d:6d ff4e:6e ff4f:6f ff50:70 ff51:71 ff52:72 ff53:73 ff54:74 ff55:75 ff56:76 ff57:77 ff58:78 ff59:79 ff5a:7a ff5b:7b ff5c:7c ff5d:7d ff5e:7e + +1251 (ANSI - Cyrillic) +00c0:41 00c1:41 00c2:41 00c3:41 00c4:41 00c5:41 00c7:43 00c8:45 00c9:45 00ca:45 00cb:45 00cc:49 00cd:49 00ce:49 00cf:49 00d1:4e 00d2:4f 00d3:4f 00d4:4f 00d5:4f 00d6:4f 00d8:4f 00d9:55 00da:55 00db:55 00dc:55 00dd:59 00e0:61 00e1:61 00e2:61 00e3:61 00e4:61 00e5:61 00e7:63 00e8:65 00e9:65 00ea:65 00eb:65 00ec:69 00ed:69 00ee:69 00ef:69 00f1:6e 00f2:6f 00f3:6f 00f4:6f 00f5:6f 00f6:6f 00f8:6f 00f9:75 00fa:75 00fb:75 00fc:75 00fd:79 00ff:79 0100:41 0101:61 0102:41 0103:61 0104:41 0105:61 0106:43 0107:63 0108:43 0109:63 010a:43 010b:63 010c:43 010d:63 010e:44 010f:64 0110:44 0111:64 0112:45 0113:65 0114:45 0115:65 0116:45 0117:65 0118:45 0119:65 011a:45 011b:65 011c:47 011d:67 011e:47 011f:67 0120:47 0121:67 0122:47 0123:67 0124:48 0125:68 0126:48 0127:68 0128:49 0129:69 012a:49 012b:69 012c:49 012d:69 012e:49 012f:69 0130:49 0134:4a 0135:6a 0136:4b 0137:6b 0139:4c 013a:6c 013b:4c 013c:6c 013d:4c 013e:6c 0141:4c 0142:6c 0143:4e 0144:6e 0145:4e 0146:6e 0147:4e 0148:6e 014c:4f 014d:6f 014e:4f 014f:6f 0150:4f 0151:6f 0154:52 0155:72 0156:52 0157:72 0158:52 0159:72 015a:53 015b:73 015c:53 015d:73 015e:53 015f:73 0160:53 0161:73 0162:54 0163:74 0164:54 0165:74 0166:54 0167:74 0168:55 0169:75 016a:55 016b:75 016c:55 016d:75 016e:55 016f:75 0170:55 0171:75 0172:55 0173:75 0174:57 0175:77 0176:59 0177:79 0178:59 0179:5a 017b:5a 017c:7a 017d:5a 017e:7a 0180:62 0197:49 019a:6c 019f:4f 01a0:4f 01a1:6f 01ab:74 01ae:54 01af:55 01b0:75 01cd:41 01ce:61 01cf:49 01d0:69 01d1:4f 01d2:6f 01d3:55 01d4:75 01d5:55 01d6:75 01d7:55 01d8:75 01d9:55 01da:75 01db:55 01dc:75 01de:41 01df:61 01e4:47 01e5:67 01e6:47 01e7:67 01e8:4b 01e9:6b 01ea:4f 01eb:6f 01ec:4f 01ed:6f 01f0:6a 203c:21 2190:3c 2191:5e 2192:3e 2193:76 2194:2d 221a:76 221f:4c 2500:2d 250c:2d 2514:4c 2518:2d 251c:2b 2524:2b 252c:54 2534:2b 253c:2b 2550:3d 2552:2d 2558:4c 2559:4c 255a:4c 255b:2d 255c:2d 255d:2d 2564:54 2565:54 2566:54 256a:2b 256b:2b 256c:2b 2580:2d 2584:2d 2588:2d 2591:2d 2592:2d 2593:2d 25ac:2d 25b2:5e 25ba:3e 25c4:3c 25cb:30 25d9:30 263a:4f 263b:4f 263c:30 2640:2b 2642:3e 266a:64 266b:64 ff01:21 ff02:22 ff03:23 ff04:24 ff05:25 ff06:26 ff07:27 ff08:28 ff09:29 ff0a:2a ff0b:2b ff0c:2c ff0d:2d ff0e:2e ff0f:2f ff10:30 ff11:31 ff12:32 ff13:33 ff14:34 ff15:35 ff16:36 ff17:37 ff18:38 ff19:39 ff1a:3a ff1b:3b ff1c:3c ff1d:3d ff1e:3e ff20:40 ff21:41 ff22:42 ff23:43 ff24:44 ff25:45 ff26:46 ff27:47 ff28:48 ff29:49 ff2a:4a ff2b:4b ff2c:4c ff2d:4d ff2e:4e ff2f:4f ff30:50 ff31:51 ff32:52 ff33:53 ff34:54 ff35:55 ff36:56 ff37:57 ff38:58 ff39:59 ff3a:5a ff3b:5b ff3c:5c ff3d:5d ff3e:5e ff3f:5f ff40:60 ff41:61 ff42:62 ff43:63 ff44:64 ff45:65 ff46:66 ff47:67 ff48:68 ff49:69 ff4a:6a ff4b:6b ff4c:6c ff4d:6d ff4e:6e ff4f:6f ff50:70 ff51:71 ff52:72 ff53:73 ff54:74 ff55:75 ff56:76 ff57:77 ff58:78 ff59:79 ff5a:7a ff5b:7b ff5c:7c ff5d:7d ff5e:7e + +1252 (ANSI - Latin I) +0100:41 0101:61 0102:41 0103:61 0104:41 0105:61 0106:43 0107:63 0108:43 0109:63 010a:43 010b:63 010c:43 010d:63 010e:44 010f:64 0111:64 0112:45 0113:65 0114:45 0115:65 0116:45 0117:65 0118:45 0119:65 011a:45 011b:65 011c:47 011d:67 011e:47 011f:67 0120:47 0121:67 0122:47 0123:67 0124:48 0125:68 0126:48 0127:68 0128:49 0129:69 012a:49 012b:69 012c:49 012d:69 012e:49 012f:69 0130:49 0131:69 0134:4a 0135:6a 0136:4b 0137:6b 0139:4c 013a:6c 013b:4c 013c:6c 013d:4c 013e:6c 0141:4c 0142:6c 0143:4e 0144:6e 0145:4e 0146:6e 0147:4e 0148:6e 014c:4f 014d:6f 014e:4f 014f:6f 0150:4f 0151:6f 0154:52 0155:72 0156:52 0157:72 0158:52 0159:72 015a:53 015b:73 015c:53 015d:73 015e:53 015f:73 0162:54 0163:74 0164:54 0165:74 0166:54 0167:74 0168:55 0169:75 016a:55 016b:75 016c:55 016d:75 016e:55 016f:75 0170:55 0171:75 0172:55 0173:75 0174:57 0175:77 0176:59 0177:79 0179:5a 017b:5a 017c:7a 0180:62 0197:49 019a:6c 019f:4f 01a0:4f 01a1:6f 01ab:74 01ae:54 01af:55 01b0:75 01b6:7a 01c0:7c 01c3:21 01cd:41 01ce:61 01cf:49 01d0:69 01d1:4f 01d2:6f 01d3:55 01d4:75 01d5:55 01d6:75 01d7:55 01d8:75 01d9:55 01da:75 01db:55 01dc:75 01de:41 01df:61 01e4:47 01e5:67 01e6:47 01e7:67 01e8:4b 01e9:6b 01ea:4f 01eb:6f 01ec:4f 01ed:6f 01f0:6a 0261:67 02b9:27 02ba:22 02bc:27 02c4:5e 02c8:27 02cb:60 02cd:5f 0300:60 0302:5e 0303:7e 030e:22 0331:5f 0332:5f 037e:3b 0393:47 0398:54 03a3:53 03a6:46 03a9:4f 03b1:61 03b4:64 03b5:65 03c0:70 03c3:73 03c4:74 03c6:66 04bb:68 0589:3a 066a:25 2000:20 2001:20 2002:20 2003:20 2004:20 2005:20 2006:20 2010:2d 2011:2d 2017:3d 2032:27 2035:60 2044:2f 2074:34 2075:35 2076:36 2077:37 2078:38 207f:6e 2080:30 2081:31 2082:32 2083:33 2084:34 2085:35 2086:36 2087:37 2088:38 2089:39 20a7:50 2102:43 2107:45 210a:67 210b:48 210c:48 210d:48 210e:68 2110:49 2111:49 2112:4c 2113:6c 2115:4e 2118:50 2119:50 211a:51 211b:52 211c:52 211d:52 2124:5a 2128:5a 212a:4b 212c:42 212d:43 212e:65 212f:65 2130:45 2131:46 2133:4d 2134:6f 2212:2d 2215:2f 2216:5c 2217:2a 221a:76 221e:38 2223:7c 2229:6e 2236:3a 223c:7e 2261:3d 2264:3d 2265:3d 2303:5e 2320:28 2321:29 2329:3c 232a:3e 2500:2d 250c:2b 2510:2b 2514:2b 2518:2b 251c:2b 252c:2d 2534:2d 253c:2b 2550:2d 2552:2b 2553:2b 2554:2b 2555:2b 2556:2b 2557:2b 2558:2b 2559:2b 255a:2b 255b:2b 255c:2b 255d:2b 2564:2d 2565:2d 2566:2d 2567:2d 2568:2d 2569:2d 256a:2b 256b:2b 256c:2b 2584:5f 2758:7c 3000:20 3008:3c 3009:3e 301a:5b 301b:5d ff01:21 ff02:22 ff03:23 ff04:24 ff05:25 ff06:26 ff07:27 ff08:28 ff09:29 ff0a:2a ff0b:2b ff0c:2c ff0d:2d ff0e:2e ff0f:2f ff10:30 ff11:31 ff12:32 ff13:33 ff14:34 ff15:35 ff16:36 ff17:37 ff18:38 ff19:39 ff1a:3a ff1b:3b ff1c:3c ff1d:3d ff1e:3e ff20:40 ff21:41 ff22:42 ff23:43 ff24:44 ff25:45 ff26:46 ff27:47 ff28:48 ff29:49 ff2a:4a ff2b:4b ff2c:4c ff2d:4d ff2e:4e ff2f:4f ff30:50 ff31:51 ff32:52 ff33:53 ff34:54 ff35:55 ff36:56 ff37:57 ff38:58 ff39:59 ff3a:5a ff3b:5b ff3c:5c ff3d:5d ff3e:5e ff3f:5f ff40:60 ff41:61 ff42:62 ff43:63 ff44:64 ff45:65 ff46:66 ff47:67 ff48:68 ff49:69 ff4a:6a ff4b:6b ff4c:6c ff4d:6d ff4e:6e ff4f:6f ff50:70 ff51:71 ff52:72 ff53:73 ff54:74 ff55:75 ff56:76 ff57:77 ff58:78 ff59:79 ff5a:7a ff5b:7b ff5c:7c ff5d:7d ff5e:7e + +1253 (ANSI - Greek) +00b4:2f 00c0:41 00c1:41 00c2:41 00c3:41 00c4:41 00c5:41 00c7:43 00c8:45 00c9:45 00ca:45 00cb:45 00cc:49 00cd:49 00ce:49 00cf:49 00d1:4e 00d2:4f 00d3:4f 00d4:4f 00d5:4f 00d6:4f 00d8:4f 00d9:55 00da:55 00db:55 00dc:55 00dd:59 00e0:61 00e1:61 00e2:61 00e3:61 00e4:61 00e5:61 00e7:63 00e8:65 00e9:65 00ea:65 00eb:65 00ec:69 00ed:69 00ee:69 00ef:69 00f1:6e 00f2:6f 00f3:6f 00f4:6f 00f5:6f 00f6:6f 00f8:6f 00f9:75 00fa:75 00fb:75 00fc:75 00fd:79 00ff:79 0100:41 0101:61 0102:41 0103:61 0104:41 0105:61 0106:43 0107:63 0108:43 0109:63 010a:43 010b:63 010c:43 010d:63 010e:44 010f:64 0110:44 0111:64 0112:45 0113:65 0114:45 0115:65 0116:45 0117:65 0118:45 0119:65 011a:45 011b:65 011c:47 011d:67 011e:47 011f:67 0120:47 0121:67 0122:47 0123:67 0124:48 0125:68 0126:48 0127:68 0128:49 0129:69 012a:49 012b:69 012c:49 012d:69 012e:49 012f:69 0130:49 0134:4a 0135:6a 0136:4b 0137:6b 0139:4c 013a:6c 013b:4c 013c:6c 013d:4c 013e:6c 0141:4c 0142:6c 0143:4e 0144:6e 0145:4e 0146:6e 0147:4e 0148:6e 014c:4f 014d:6f 014e:4f 014f:6f 0150:4f 0151:6f 0154:52 0155:72 0156:52 0157:72 0158:52 0159:72 015a:53 015b:73 015c:53 015d:73 015e:53 015f:73 0160:53 0161:73 0162:54 0163:74 0164:54 0165:74 0166:54 0167:74 0168:55 0169:75 016a:55 016b:75 016c:55 016d:75 016e:55 016f:75 0170:55 0171:75 0172:55 0173:75 0174:57 0175:77 0176:59 0177:79 0178:59 0179:5a 017b:5a 017c:7a 017d:5a 017e:7a 0180:62 0197:49 019a:6c 019f:4f 01a0:4f 01a1:6f 01ab:74 01ae:54 01af:55 01b0:75 01cd:41 01ce:61 01cf:49 01d0:69 01d1:4f 01d2:6f 01d3:55 01d4:75 01d5:55 01d6:75 01d7:55 01d8:75 01d9:55 01da:75 01db:55 01dc:75 01de:41 01df:61 01e4:47 01e5:67 01e6:47 01e7:67 01e8:4b 01e9:6b 01ea:4f 01eb:6f 01ec:4f 01ed:6f 01f0:6a 037e:3b 203c:21 2190:3c 2191:5e 2192:3e 2193:76 2194:2d 221f:4c 2500:2d 250c:2d 2514:4c 2518:2d 251c:2b 2524:2b 252c:54 2534:2b 253c:2b 2550:3d 2554:2d 255a:4c 255d:2d 2566:54 256c:2b 2580:2d 2584:2d 2588:2d 2591:2d 2592:2d 2593:2d 25ac:2d 25b2:5e 25ba:3e 25c4:3c 25cb:30 25d9:30 263a:4f 263b:4f 263c:30 2640:2b 2642:3e 266a:64 266b:64 ff01:21 ff02:22 ff03:23 ff04:24 ff05:25 ff06:26 ff07:27 ff08:28 ff09:29 ff0a:2a ff0b:2b ff0c:2c ff0d:2d ff0e:2e ff0f:2f ff10:30 ff11:31 ff12:32 ff13:33 ff14:34 ff15:35 ff16:36 ff17:37 ff18:38 ff19:39 ff1a:3a ff1b:3b ff1c:3c ff1d:3d ff1e:3e ff20:40 ff21:41 ff22:42 ff23:43 ff24:44 ff25:45 ff26:46 ff27:47 ff28:48 ff29:49 ff2a:4a ff2b:4b ff2c:4c ff2d:4d ff2e:4e ff2f:4f ff30:50 ff31:51 ff32:52 ff33:53 ff34:54 ff35:55 ff36:56 ff37:57 ff38:58 ff39:59 ff3a:5a ff3b:5b ff3c:5c ff3d:5d ff3e:5e ff3f:5f ff40:60 ff41:61 ff42:62 ff43:63 ff44:64 ff45:65 ff46:66 ff47:67 ff48:68 ff49:69 ff4a:6a ff4b:6b ff4c:6c ff4d:6d ff4e:6e ff4f:6f ff50:70 ff51:71 ff52:72 ff53:73 ff54:74 ff55:75 ff56:76 ff57:77 ff58:78 ff59:79 ff5a:7a ff5b:7b ff5c:7c ff5d:7d ff5e:7e + +1254 (ANSI - Turkish) +00dd:59 00fd:79 0100:41 0101:61 0102:41 0103:61 0104:41 0105:61 0106:43 0107:63 0108:43 0109:63 010a:43 010b:63 010c:43 010d:63 010e:44 010f:64 0110:44 0111:64 0112:45 0113:65 0114:45 0115:65 0116:45 0117:65 0118:45 0119:65 011a:45 011b:65 011c:47 011d:67 0120:47 0121:67 0122:47 0123:67 0124:48 0125:68 0126:48 0127:68 0128:49 0129:69 012a:49 012b:69 012c:49 012d:69 012e:49 012f:69 0134:4a 0135:6a 0136:4b 0137:6b 0139:4c 013a:6c 013b:4c 013c:6c 013d:4c 013e:6c 0141:4c 0142:6c 0143:4e 0144:6e 0145:4e 0146:6e 0147:4e 0148:6e 014c:4f 014d:6f 014e:4f 014f:6f 0150:4f 0151:6f 0154:52 0155:72 0156:52 0157:72 0158:52 0159:72 015a:53 015b:73 015c:53 015d:73 0162:54 0163:74 0164:54 0165:74 0166:54 0167:74 0168:55 0169:75 016a:55 016b:75 016c:55 016d:75 016e:55 016f:75 0170:55 0171:75 0172:55 0173:75 0174:57 0175:77 0176:59 0177:79 0179:5a 017b:5a 017c:7a 017d:5a 017e:7a 0180:62 0189:44 0197:49 019a:6c 019f:4f 01a0:4f 01a1:6f 01ab:74 01ae:54 01af:55 01b0:75 01b6:7a 01c0:7c 01c3:21 01cd:41 01ce:61 01cf:49 01d0:69 01d1:4f 01d2:6f 01d3:55 01d4:75 01d5:55 01d6:75 01d7:55 01d8:75 01d9:55 01da:75 01db:55 01dc:75 01de:41 01df:61 01e4:47 01e5:67 01e6:47 01e7:67 01e8:4b 01e9:6b 01ea:4f 01eb:6f 01ec:4f 01ed:6f 01f0:6a 0261:67 02b9:27 02ba:22 02bc:27 02c4:5e 02c7:5e 02c8:27 02cb:60 02cd:5f 02d8:5e 02d9:27 0300:60 0302:5e 0331:5f 0332:5f 04bb:68 0589:3a 066a:25 2000:20 2001:20 2002:20 2003:20 2004:20 2005:20 2006:20 2010:2d 2011:2d 2032:27 2035:60 203c:21 2044:2f 2074:34 2075:35 2076:36 2077:37 2078:38 2081:30 2084:34 2085:35 2086:36 2087:37 2088:38 2089:39 2102:43 2107:45 210a:67 210b:48 210c:48 210d:48 210e:68 2110:49 2111:49 2112:4c 2113:6c 2115:4e 2118:50 2119:50 211a:51 211b:52 211c:52 211d:52 2124:5a 2128:5a 212a:4b 212c:42 212d:43 212e:65 212f:65 2130:45 2131:46 2133:4d 2134:6f 2191:5e 2193:76 2194:2d 2195:7c 21a8:7c 2212:2d 2215:2f 2216:5c 2217:2a 221f:4c 2223:7c 2236:3a 223c:7e 2303:5e 2329:3c 232a:3e 2502:2d 250c:2d 2514:4c 2518:2d 251c:2b 2524:2b 252c:54 2534:2b 253c:2b 2550:3d 2554:2d 255a:4c 255d:2d 2566:54 256c:2b 2580:2d 2584:2d 2588:2d 2591:2d 2592:2d 2593:2d 25ac:2d 25b2:5e 25ba:3e 25c4:3c 25cb:30 25d9:30 263a:4f 263b:4f 263c:30 2640:2b 2642:3e 266a:64 266b:64 2758:7c 3000:20 3008:3c 3009:3e 301a:5b 301b:3d 301d:22 301e:22 ff01:21 ff02:22 ff03:23 ff04:24 ff05:25 ff06:26 ff07:27 ff08:28 ff09:29 ff0a:2a ff0b:2b ff0c:2c ff0d:2d ff0e:2e ff0f:2f ff10:30 ff11:31 ff12:32 ff13:33 ff14:34 ff15:35 ff16:36 ff17:37 ff18:38 ff19:39 ff1a:3a ff1b:3b ff1c:3c ff1d:3d ff1e:3e ff20:40 ff21:41 ff22:42 ff23:43 ff24:44 ff25:45 ff26:46 ff27:47 ff28:48 ff29:49 ff2a:4a ff2b:4b ff2c:4c ff2d:4d ff2e:4e ff2f:4f ff30:50 ff31:51 ff32:52 ff33:53 ff34:54 ff35:55 ff36:56 ff37:57 ff38:58 ff39:59 ff3a:5a ff3b:5b ff3c:5c ff3d:5d ff3e:5e ff3f:5f ff40:60 ff41:61 ff42:62 ff43:63 ff44:64 ff45:65 ff46:66 ff47:67 ff48:68 ff49:69 ff4a:6a ff4b:6b ff4c:6c ff4d:6d ff4e:6e ff4f:6f ff50:70 ff51:71 ff52:72 ff53:73 ff54:74 ff55:75 ff56:76 ff57:77 ff58:78 ff59:79 ff5a:7a ff5b:7b ff5c:7c ff5d:7d ff5e:7e + +1255 (ANSI - Hebrew) +0191:46 ff01:21 ff02:22 ff03:23 ff04:24 ff05:25 ff06:26 ff07:27 ff08:28 ff09:29 ff0a:2a ff0b:2b ff0c:2c ff0d:2d ff0e:2e ff0f:2f ff10:30 ff11:31 ff12:32 ff13:33 ff14:34 ff15:35 ff16:36 ff17:37 ff18:38 ff19:39 ff1a:3a ff1b:3b ff1c:3c ff1d:3d ff1e:3e ff20:40 ff21:41 ff22:42 ff23:43 ff24:44 ff25:45 ff26:46 ff27:47 ff28:48 ff29:49 ff2a:4a ff2b:4b ff2c:4c ff2d:4d ff2e:4e ff2f:4f ff30:50 ff31:51 ff32:52 ff33:53 ff34:54 ff35:55 ff36:56 ff37:57 ff38:58 ff39:59 ff3a:5a ff3b:5b ff3c:5c ff3d:5d ff3e:5e ff3f:5f ff40:60 ff41:61 ff42:62 ff43:63 ff44:64 ff45:65 ff46:66 ff47:67 ff48:68 ff49:69 ff4a:6a ff4b:6b ff4c:6c ff4d:6d ff4e:6e ff4f:6f ff50:70 ff51:71 ff52:72 ff53:73 ff54:74 ff55:75 ff56:76 ff57:77 ff58:78 ff59:79 ff5a:7a ff5b:7b ff5c:7c ff5d:7d ff5e:7e + +1256 (ANSI - Arabic) +0620:41 0621:41 0622:43 0623:45 0624:45 0625:45 0626:45 0627:49 0628:49 0629:4f 062a:55 062b:55 062c:55 062d:46 062e:43 062f:44 0630:45 0631:46 0632:47 0633:48 0634:49 0635:4a 0636:4b 0637:4c 0638:4d 0639:4e 063a:4f 0641:41 0642:42 0643:43 0644:44 0645:45 0646:46 0647:47 0648:48 0649:49 064a:4a 064b:4b 064c:4c 064d:4d 064e:4e 064f:4f 0650:50 0651:51 0652:52 + +1257 (ANSI - Baltic) +ff01:21 ff02:22 ff03:23 ff04:24 ff05:25 ff06:26 ff07:27 ff08:28 ff09:29 ff0a:2a ff0b:2b ff0c:2c ff0d:2d ff0e:2e ff0f:2f ff10:30 ff11:31 ff12:32 ff13:33 ff14:34 ff15:35 ff16:36 ff17:37 ff18:38 ff19:39 ff1a:3a ff1b:3b ff1c:3c ff1d:3d ff1e:3e ff20:40 ff21:41 ff22:42 ff23:43 ff24:44 ff25:45 ff26:46 ff27:47 ff28:48 ff29:49 ff2a:4a ff2b:4b ff2c:4c ff2d:4d ff2e:4e ff2f:4f ff30:50 ff31:51 ff32:52 ff33:53 ff34:54 ff35:55 ff36:56 ff37:57 ff38:58 ff39:59 ff3a:5a ff3b:5b ff3c:5c ff3d:5d ff3e:5e ff3f:5f ff40:60 ff41:61 ff42:62 ff43:63 ff44:64 ff45:65 ff46:66 ff47:67 ff48:68 ff49:69 ff4a:6a ff4b:6b ff4c:6c ff4d:6d ff4e:6e ff4f:6f ff50:70 ff51:71 ff52:72 ff53:73 ff54:74 ff55:75 ff56:76 ff57:77 ff58:78 ff59:79 ff5a:7a ff5b:7b ff5c:7c ff5d:7d ff5e:7e + +1258 (ANSI/OEM - Viet Nam) +ff01:21 ff02:22 ff03:23 ff04:24 ff05:25 ff06:26 ff07:27 ff08:28 ff09:29 ff0a:2a ff0b:2b ff0c:2c ff0d:2d ff0e:2e ff0f:2f ff10:30 ff11:31 ff12:32 ff13:33 ff14:34 ff15:35 ff16:36 ff17:37 ff18:38 ff19:39 ff1a:3a ff1b:3b ff1c:3c ff1d:3d ff1e:3e ff20:40 ff21:41 ff22:42 ff23:43 ff24:44 ff25:45 ff26:46 ff27:47 ff28:48 ff29:49 ff2a:4a ff2b:4b ff2c:4c ff2d:4d ff2e:4e ff2f:4f ff30:50 ff31:51 ff32:52 ff33:53 ff34:54 ff35:55 ff36:56 ff37:57 ff38:58 ff39:59 ff3a:5a ff3b:5b ff3c:5c ff3d:5d ff3e:5e ff3f:5f ff40:60 ff41:61 ff42:62 ff43:63 ff44:64 ff45:65 ff46:66 ff47:67 ff48:68 ff49:69 ff4a:6a ff4b:6b ff4c:6c ff4d:6d ff4e:6e ff4f:6f ff50:70 ff51:71 ff52:72 ff53:73 ff54:74 ff55:75 ff56:76 ff57:77 ff58:78 ff59:79 ff5a:7a ff5b:7b ff5c:7c ff5d:7d ff5e:7e + +20127 (US-ASCII) +00a0:20 00a1:21 00a2:63 00a4:24 00a5:59 00a6:7c 00a9:43 00aa:61 00ab:3c 00ad:2d 00ae:52 00b2:32 00b3:33 00b7:2e 00b8:2c 00b9:31 00ba:6f 00bb:3e 00c0:41 00c1:41 00c2:41 00c3:41 00c4:41 00c5:41 00c6:41 00c7:43 00c8:45 00c9:45 00ca:45 00cb:45 00cc:49 00cd:49 00ce:49 00cf:49 00d0:44 00d1:4e 00d2:4f 00d3:4f 00d4:4f 00d5:4f 00d6:4f 00d8:4f 00d9:55 00da:55 00db:55 00dc:55 00dd:59 00e0:61 00e1:61 00e2:61 00e3:61 00e4:61 00e5:61 00e6:61 00e7:63 00e8:65 00e9:65 00ea:65 00eb:65 00ec:69 00ed:69 00ee:69 00ef:69 00f1:6e 00f2:6f 00f3:6f 00f4:6f 00f5:6f 00f6:6f 00f8:6f 00f9:75 00fa:75 00fb:75 00fc:75 00fd:79 00ff:79 0100:41 0101:61 0102:41 0103:61 0104:41 0105:61 0106:43 0107:63 0108:43 0109:63 010a:43 010b:63 010c:43 010d:63 010e:44 010f:64 0110:44 0111:64 0112:45 0113:65 0114:45 0115:65 0116:45 0117:65 0118:45 0119:65 011a:45 011b:65 011c:47 011d:67 011e:47 011f:67 0120:47 0121:67 0122:47 0123:67 0124:48 0125:68 0126:48 0127:68 0128:49 0129:69 012a:49 012b:69 012c:49 012d:69 012e:49 012f:69 0130:49 0131:69 0134:4a 0135:6a 0136:4b 0137:6b 0139:4c 013a:6c 013b:4c 013c:6c 013d:4c 013e:6c 0141:4c 0142:6c 0143:4e 0144:6e 0145:4e 0146:6e 0147:4e 0148:6e 014c:4f 014d:6f 014e:4f 014f:6f 0150:4f 0151:6f 0152:4f 0153:6f 0154:52 0155:72 0156:52 0157:72 0158:52 0159:72 015a:53 015b:73 015c:53 015d:73 015e:53 015f:73 0160:53 0161:73 0162:54 0163:74 0164:54 0165:74 0166:54 0167:74 0168:55 0169:75 016a:55 016b:75 016c:55 016d:75 016e:55 016f:75 0170:55 0171:75 0172:55 0173:75 0174:57 0175:77 0176:59 0177:79 0178:59 0179:5a 017b:5a 017c:7a 017d:5a 017e:7a 0180:62 0189:44 0191:46 0192:66 0197:49 019a:6c 019f:4f 01a0:4f 01a1:6f 01ab:74 01ae:54 01af:55 01b0:75 01b6:7a 01cd:41 01ce:61 01cf:49 01d0:69 01d1:4f 01d2:6f 01d3:55 01d4:75 01d5:55 01d6:75 01d7:55 01d8:75 01d9:55 01da:75 01db:55 01dc:75 01de:41 01df:61 01e4:47 01e5:67 01e6:47 01e7:67 01e8:4b 01e9:6b 01ea:4f 01eb:6f 01ec:4f 01ed:6f 01f0:6a 0261:67 02b9:27 02ba:22 02bc:27 02c4:5e 02c6:5e 02c8:27 02cb:60 02cd:5f 02dc:7e 0300:60 0302:5e 0303:7e 030e:22 0331:5f 0332:5f 2000:20 2001:20 2002:20 2003:20 2004:20 2005:20 2006:20 2010:2d 2011:2d 2013:2d 2014:2d 2018:27 2019:27 201a:2c 201c:22 201d:22 201e:22 2022:2e 2026:2e 2032:27 2035:60 2039:3c 203a:3e 2122:54 ff01:21 ff02:22 ff03:23 ff04:24 ff05:25 ff06:26 ff07:27 ff08:28 ff09:29 ff0a:2a ff0b:2b ff0c:2c ff0d:2d ff0e:2e ff0f:2f ff10:30 ff11:31 ff12:32 ff13:33 ff14:34 ff15:35 ff16:36 ff17:37 ff18:38 ff19:39 ff1a:3a ff1b:3b ff1c:3c ff1d:3d ff1e:3e ff20:40 ff21:41 ff22:42 ff23:43 ff24:44 ff25:45 ff26:46 ff27:47 ff28:48 ff29:49 ff2a:4a ff2b:4b ff2c:4c ff2d:4d ff2e:4e ff2f:4f ff30:50 ff31:51 ff32:52 ff33:53 ff34:54 ff35:55 ff36:56 ff37:57 ff38:58 ff39:59 ff3a:5a ff3b:5b ff3c:5c ff3d:5d ff3e:5e ff3f:5f ff40:60 ff41:61 ff42:62 ff43:63 ff44:64 ff45:65 ff46:66 ff47:67 ff48:68 ff49:69 ff4a:6a ff4b:6b ff4c:6c ff4d:6d ff4e:6e ff4f:6f ff50:70 ff51:71 ff52:72 ff53:73 ff54:74 ff55:75 ff56:76 ff57:77 ff58:78 ff59:79 ff5a:7a ff5b:7b ff5c:7c ff5d:7d ff5e:7e + +20261 (T.61) +f8dd:5c f8de:5e f8df:60 f8e0:7b f8fc:7d f8fd:7e f8fe:7f + +20866 (Russian - KOI8) +00a7:15 00ab:3c 00ad:2d 00ae:52 00b1:2b 00b6:14 00bb:3e 00c0:41 00c1:41 00c2:41 00c3:41 00c4:41 00c5:41 00c7:43 00c8:45 00c9:45 00ca:45 00cb:45 00cc:49 00cd:49 00ce:49 00cf:49 00d1:4e 00d2:4f 00d3:4f 00d4:4f 00d5:4f 00d6:4f 00d8:4f 00d9:55 00da:55 00db:55 00dc:55 00dd:59 00e0:61 00e1:61 00e2:61 00e3:61 00e4:61 00e5:61 00e7:63 00e8:65 00e9:65 00ea:65 00eb:65 00ec:69 00ed:69 00ee:69 00ef:69 00f1:6e 00f2:6f 00f3:6f 00f4:6f 00f5:6f 00f6:6f 00f8:6f 00f9:75 00fa:75 00fb:75 00fc:75 00fd:79 00ff:79 0100:41 0101:61 0102:41 0103:61 0104:41 0105:61 0106:43 0107:63 0108:43 0109:63 010a:43 010b:63 010c:43 010d:63 010e:44 010f:64 0110:44 0111:64 0112:45 0113:65 0114:45 0115:65 0116:45 0117:65 0118:45 0119:65 011a:45 011b:65 011c:47 011d:67 011e:47 011f:67 0120:47 0121:67 0122:47 0123:67 0124:48 0125:68 0126:48 0127:68 0128:49 0129:69 012a:49 012b:69 012c:49 012d:69 012e:49 012f:69 0130:49 0134:4a 0135:6a 0136:4b 0137:6b 0139:4c 013a:6c 013b:4c 013c:6c 013d:4c 013e:6c 0141:4c 0142:6c 0143:4e 0144:6e 0145:4e 0146:6e 0147:4e 0148:6e 014c:4f 014d:6f 014e:4f 014f:6f 0150:4f 0151:6f 0154:52 0155:72 0156:52 0157:72 0158:52 0159:72 015a:53 015b:73 015c:53 015d:73 015e:53 015f:73 0160:53 0161:73 0162:54 0163:74 0164:54 0165:74 0166:54 0167:74 0168:55 0169:75 016a:55 016b:75 016c:55 016d:75 016e:55 016f:75 0170:55 0171:75 0172:55 0173:75 0174:57 0175:77 0176:59 0177:79 0178:59 0179:5a 017b:5a 017c:7a 017d:5a 017e:7a 0180:62 0197:49 019a:6c 019f:4f 01a0:4f 01a1:6f 01ab:74 01ae:54 01af:55 01b0:75 01cd:41 01ce:61 01cf:49 01d0:69 01d1:4f 01d2:6f 01d3:55 01d4:75 01d5:55 01d6:75 01d7:55 01d8:75 01d9:55 01da:75 01db:55 01dc:75 01de:41 01df:61 01e4:47 01e5:67 01e6:47 01e7:67 01e8:4b 01e9:6b 01ea:4f 01eb:6f 01ec:4f 01ed:6f 01f0:6a 2013:2d 2014:2d 2018:27 2019:27 201a:27 201c:22 201d:22 201e:22 2022:07 2026:3a 2030:25 2039:3c 203a:3e 203c:13 2122:54 2190:1b 2191:18 2192:1a 2193:19 2194:1d 2195:12 21a8:17 221f:1c 2302:7f 25ac:16 25b2:1e 25ba:10 25bc:1f 25c4:11 25cb:09 25d8:08 25d9:0a 263a:01 263b:02 263c:0f 2640:0c 2642:0b 2660:06 2663:05 2665:03 2666:04 266a:0d 266b:0e + +28591 (ISO 8859-1 Latin I) +0100:41 0101:61 0102:41 0103:61 0104:41 0105:61 0106:43 0107:63 0108:43 0109:63 010a:43 010b:63 010c:43 010d:63 010e:44 010f:64 0110:44 0111:64 0112:45 0113:65 0114:45 0115:65 0116:45 0117:65 0118:45 0119:65 011a:45 011b:65 011c:47 011d:67 011e:47 011f:67 0120:47 0121:67 0122:47 0123:67 0124:48 0125:68 0126:48 0127:68 0128:49 0129:69 012a:49 012b:69 012c:49 012d:69 012e:49 012f:69 0130:49 0131:69 0134:4a 0135:6a 0136:4b 0137:6b 0139:4c 013a:6c 013b:4c 013c:6c 013d:4c 013e:6c 0141:4c 0142:6c 0143:4e 0144:6e 0145:4e 0146:6e 0147:4e 0148:6e 014c:4f 014d:6f 014e:4f 014f:6f 0150:4f 0151:6f 0152:4f 0153:6f 0154:52 0155:72 0156:52 0157:72 0158:52 0159:72 015a:53 015b:73 015c:53 015d:73 015e:53 015f:73 0160:53 0161:73 0162:54 0163:74 0164:54 0165:74 0166:54 0167:74 0168:55 0169:75 016a:55 016b:75 016c:55 016d:75 016e:55 016f:75 0170:55 0171:75 0172:55 0173:75 0174:57 0175:77 0176:59 0177:79 0178:59 0179:5a 017b:5a 017c:7a 017d:5a 017e:7a 0180:62 0189:44 0191:46 0192:66 0197:49 019a:6c 019f:4f 01a0:4f 01a1:6f 01ab:74 01ae:54 01af:55 01b0:75 01b6:7a 01cd:41 01ce:61 01cf:49 01d0:69 01d1:4f 01d2:6f 01d3:55 01d4:75 01d5:55 01d6:75 01d7:55 01d8:75 01d9:55 01da:75 01db:55 01dc:75 01de:41 01df:61 01e4:47 01e5:67 01e6:47 01e7:67 01e8:4b 01e9:6b 01ea:4f 01eb:6f 01ec:4f 01ed:6f 01f0:6a 0261:67 02b9:27 02ba:22 02bc:27 02c4:5e 02c6:5e 02c8:27 02cb:60 02cd:5f 02dc:7e 0300:60 0302:5e 0303:7e 030e:22 0331:5f 0332:5f 2000:20 2001:20 2002:20 2003:20 2004:20 2005:20 2006:20 2010:2d 2011:2d 2013:2d 2014:2d 2018:27 2019:27 201a:2c 201c:22 201d:22 201e:22 2022:2e 2026:2e 2032:27 2035:60 2039:3c 203a:3e 2122:54 ff01:21 ff02:22 ff03:23 ff04:24 ff05:25 ff06:26 ff07:27 ff08:28 ff09:29 ff0a:2a ff0b:2b ff0c:2c ff0d:2d ff0e:2e ff0f:2f ff10:30 ff11:31 ff12:32 ff13:33 ff14:34 ff15:35 ff16:36 ff17:37 ff18:38 ff19:39 ff1a:3a ff1b:3b ff1c:3c ff1d:3d ff1e:3e ff20:40 ff21:41 ff22:42 ff23:43 ff24:44 ff25:45 ff26:46 ff27:47 ff28:48 ff29:49 ff2a:4a ff2b:4b ff2c:4c ff2d:4d ff2e:4e ff2f:4f ff30:50 ff31:51 ff32:52 ff33:53 ff34:54 ff35:55 ff36:56 ff37:57 ff38:58 ff39:59 ff3a:5a ff3b:5b ff3c:5c ff3d:5d ff3e:5e ff3f:5f ff40:60 ff41:61 ff42:62 ff43:63 ff44:64 ff45:65 ff46:66 ff47:67 ff48:68 ff49:69 ff4a:6a ff4b:6b ff4c:6c ff4d:6d ff4e:6e ff4f:6f ff50:70 ff51:71 ff52:72 ff53:73 ff54:74 ff55:75 ff56:76 ff57:77 ff58:78 ff59:79 ff5a:7a ff5b:7b ff5c:7c ff5d:7d ff5e:7e + +28592 (ISO 8859-2 Central Europe) +00a1:21 00a2:63 00a5:59 00a6:7c 00a9:43 00aa:61 00ab:3c 00ae:52 00b2:32 00b3:33 00b7:2e 00b9:31 00ba:6f 00bb:3e 00c0:41 00c3:41 00c5:41 00c6:41 00c8:45 00ca:45 00cc:49 00cf:49 00d0:44 00d1:4e 00d2:4f 00d5:4f 00d8:4f 00d9:55 00db:55 00e0:61 00e3:61 00e5:61 00e6:61 00e8:65 00ea:65 00ec:69 00ef:69 00f1:6e 00f2:6f 00f5:6f 00f8:6f 00f9:75 00fb:75 00ff:79 0100:41 0101:61 0108:43 0109:63 010a:43 010b:63 0112:45 0113:65 0114:45 0115:65 0116:45 0117:65 011c:47 011d:67 011e:47 011f:67 0120:47 0121:67 0122:47 0123:67 0124:48 0125:68 0126:48 0127:68 0128:49 0129:69 012a:49 012b:69 012c:49 012d:69 012e:49 012f:69 0130:49 0131:69 0134:4a 0135:6a 0136:4b 0137:6b 013b:4c 013c:6c 0145:4e 0146:6e 014c:4f 014d:6f 014e:4f 014f:6f 0152:4f 0153:6f 0156:52 0157:72 015c:53 015d:73 0166:54 0167:74 0168:55 0169:75 016a:55 016b:75 016c:55 016d:75 0172:55 0173:75 0174:57 0175:77 0176:59 0177:79 0178:59 0180:62 0189:44 0191:46 0192:66 0197:49 019a:6c 019f:4f 01a0:4f 01a1:6f 01ab:74 01ae:54 01af:55 01b0:75 01b6:7a 01cd:41 01ce:61 01cf:49 01d0:69 01d1:4f 01d2:6f 01d3:55 01d4:75 01d5:55 01d6:75 01d7:55 01d8:75 01d9:55 01da:75 01db:55 01dc:75 01de:41 01df:61 01e4:47 01e5:67 01e6:47 01e7:67 01e8:4b 01e9:6b 01ea:4f 01eb:6f 01ec:4f 01ed:6f 01f0:6a 0261:67 02b9:27 02ba:22 02bc:27 02c4:5e 02c6:5e 02c8:27 02cb:60 02cd:5f 02dc:7e 0300:60 0302:5e 0303:7e 030e:22 0331:5f 0332:5f 2000:20 2001:20 2002:20 2003:20 2004:20 2005:20 2006:20 2010:2d 2011:2d 2013:2d 2014:2d 2018:27 2019:27 201a:2c 201c:22 201d:22 201e:22 2022:2e 2026:2e 2032:27 2035:60 2039:3c 203a:3e 2122:54 ff01:21 ff02:22 ff03:23 ff04:24 ff05:25 ff06:26 ff07:27 ff08:28 ff09:29 ff0a:2a ff0b:2b ff0c:2c ff0d:2d ff0e:2e ff0f:2f ff10:30 ff11:31 ff12:32 ff13:33 ff14:34 ff15:35 ff16:36 ff17:37 ff18:38 ff19:39 ff1a:3a ff1b:3b ff1c:3c ff1d:3d ff1e:3e ff20:40 ff21:41 ff22:42 ff23:43 ff24:44 ff25:45 ff26:46 ff27:47 ff28:48 ff29:49 ff2a:4a ff2b:4b ff2c:4c ff2d:4d ff2e:4e ff2f:4f ff30:50 ff31:51 ff32:52 ff33:53 ff34:54 ff35:55 ff36:56 ff37:57 ff38:58 ff39:59 ff3a:5a ff3b:5b ff3c:5c ff3d:5d ff3e:5e ff3f:5f ff40:60 ff41:61 ff42:62 ff43:63 ff44:64 ff45:65 ff46:66 ff47:67 ff48:68 ff49:69 ff4a:6a ff4b:6b ff4c:6c ff4d:6d ff4e:6e ff4f:6f ff50:70 ff51:71 ff52:72 ff53:73 ff54:74 ff55:75 ff56:76 ff57:77 ff58:78 ff59:79 ff5a:7a ff5b:7b ff5c:7c ff5d:7d ff5e:7e + +28605 (ISO 8859-15 Latin 9) +00a6:7c 0100:41 0101:61 0102:41 0103:61 0104:41 0105:61 0106:43 0107:63 0108:43 0109:63 010a:43 010b:63 010c:43 010d:63 010e:44 010f:64 0112:45 0113:65 0114:45 0115:65 0116:45 0117:65 0118:45 0119:65 011a:45 011b:65 011c:47 011d:67 011e:47 011f:67 0120:47 0121:67 0122:47 0123:67 0124:48 0125:68 0126:48 0127:68 0128:49 0129:69 012a:49 012b:69 012c:49 012d:69 012e:49 012f:69 0130:49 0131:69 0134:4a 0135:6a 0136:4b 0137:6b 0138:6b 0139:4c 013a:6c 013b:4c 013c:6c 013d:4c 013e:6c 0141:4c 0142:6c 0143:4e 0144:6e 0145:4e 0146:6e 0147:4e 0148:6e 014a:4e 014b:6e 014c:4f 014d:6f 014e:4f 014f:6f 0150:4f 0151:6f 0154:52 0155:72 0156:52 0157:72 0158:52 0159:72 015a:53 015b:73 015c:53 015d:73 015e:53 015f:73 0162:54 0163:74 0164:54 0165:74 0166:54 0167:74 0168:54 0169:74 016a:55 016b:75 016c:55 016d:75 016e:55 016f:75 0170:55 0171:75 0172:55 0173:75 0174:57 0175:77 0176:59 0177:79 0179:5a 017b:5a 017c:7a 0180:62 0189:44 0191:46 0192:66 0197:49 019a:6c 019f:4f 01a0:4f 01a1:6f 01ab:74 01ae:54 01af:55 01b0:75 01b6:7a 01cd:41 01ce:61 01cf:49 01d0:69 01d1:4f 01d2:6f 01d3:55 01d4:75 01d5:55 01d6:75 01d7:55 01d8:75 01d9:55 01da:75 01db:55 01dc:75 01de:41 01df:61 01e4:47 01e5:67 01e6:47 01e7:67 01e8:4b 01e9:6b 01ea:4f 01eb:6f 01ec:4f 01ed:6f 01f0:6a 0261:67 02b9:27 02ba:22 02bc:27 02c4:5e 02c6:5e 02c8:27 02cb:60 02cd:5f 02dc:7e 0300:60 0302:5e 0303:7e 030e:22 0331:5f 0332:5f 2000:20 2001:20 2002:20 2003:20 2004:20 2005:20 2006:20 2010:2d 2011:2d 2013:2d 2014:2d 2018:27 2019:27 201a:2c 201c:22 201d:22 201e:22 2022:2e 2026:2e 2032:27 2035:60 2039:3c 203a:3e 2122:54 ff01:21 ff02:22 ff03:23 ff04:24 ff05:25 ff06:26 ff07:27 ff08:28 ff09:29 ff0a:2a ff0b:2b ff0c:2c ff0d:2d ff0e:2e ff0f:2f ff10:30 ff11:31 ff12:32 ff13:33 ff14:34 ff15:35 ff16:36 ff17:37 ff18:38 ff19:39 ff1a:3a ff1b:3b ff1c:3c ff1d:3d ff1e:3e ff20:40 ff21:41 ff22:42 ff23:43 ff24:44 ff25:45 ff26:46 ff27:47 ff28:48 ff29:49 ff2a:4a ff2b:4b ff2c:4c ff2d:4d ff2e:4e ff2f:4f ff30:50 ff31:51 ff32:52 ff33:53 ff34:54 ff35:55 ff36:56 ff37:57 ff38:58 ff39:59 ff3a:5a ff3b:5b ff3c:5c ff3d:5d ff3e:5e ff3f:5f ff40:60 ff41:61 ff42:62 ff43:63 ff44:64 ff45:65 ff46:66 ff47:67 ff48:68 ff49:69 ff4a:6a ff4b:6b ff4c:6c ff4d:6d ff4e:6e ff4f:6f ff50:70 ff51:71 ff52:72 ff53:73 ff54:74 ff55:75 ff56:76 ff57:77 ff58:78 ff59:79 ff5a:7a ff5b:7b ff5c:7c ff5d:7d ff5e:7e + +37 (IBM EBCDIC - U.S./Canada) +0004:37 0005:2d 0006:2e 0007:2f 0008:16 0009:05 000a:25 0014:3c 0015:3d 0016:32 0017:26 001a:3f 001b:27 0020:40 0021:5a 0022:7f 0023:7b 0024:5b 0025:6c 0026:50 0027:7d 0028:4d 0029:5d 002a:5c 002b:4e 002c:6b 002d:60 002e:4b 002f:61 003a:7a 003b:5e 003c:4c 003d:7e 003e:6e 003f:6f 0040:7c 005f:6d 0060:79 007c:4f 007f:07 0080:20 0081:21 0082:22 0083:23 0084:24 0085:15 0086:06 0087:17 0088:28 0089:29 008a:2a 008b:2b 008c:2c 008d:09 008e:0a 008f:1b 0090:30 0091:31 0092:1a 0093:33 0094:34 0095:35 0096:36 0097:08 0098:38 0099:39 009a:3a 009b:3b 009c:04 009d:14 009e:3e 00a0:41 00a2:4a 00a6:6a 00ac:5f 00c0:64 00c1:65 00c2:62 00c3:66 00c4:63 00c5:67 00c7:68 00c8:74 00c9:71 00ca:72 00cb:73 00cc:78 00cd:75 00ce:76 00cf:77 00d1:69 00df:59 00e0:44 00e1:45 00e2:42 00e3:46 00e4:43 00e5:47 00e7:48 00e8:54 00e9:51 00ea:52 00eb:53 00ec:58 00ed:55 00ee:56 00ef:57 00f1:49 00f8:70 ff01:5a ff02:7f ff03:7b ff04:5b ff05:6c ff06:50 ff07:7d ff08:4d ff09:5d ff0a:5c ff0b:4e ff0c:6b ff0d:60 ff0e:4b ff0f:61 ff1a:7a ff1b:5e ff1c:4c ff1d:7e ff1e:6e ff20:7c ff3f:6d ff40:79 ff5c:4f + +437 (OEM - United States) +00a4:0f 00a7:15 00a8:22 00a9:63 00ad:2d 00ae:72 00af:5f 00b3:33 00b4:27 00b6:14 00b8:2c 00b9:31 00be:5f 00c0:41 00c1:41 00c2:41 00c3:41 00c8:45 00ca:45 00cb:45 00cc:49 00cd:49 00ce:49 00cf:49 00d0:44 00d2:4f 00d3:4f 00d4:4f 00d5:4f 00d7:78 00d8:4f 00d9:55 00da:55 00db:55 00dd:59 00de:5f 00e3:61 00f0:64 00f5:6f 00f8:6f 00fd:79 00fe:5f 0100:41 0101:61 0102:41 0103:61 0104:41 0105:61 0106:43 0107:63 0108:43 0109:63 010a:43 010b:63 010c:43 010d:63 010e:44 010f:64 0110:44 0111:64 0112:45 0113:65 0114:45 0115:65 0116:45 0117:65 0118:45 0119:65 011a:45 011b:65 011c:47 011d:67 011e:47 011f:67 0120:47 0121:67 0122:47 0123:67 0124:48 0125:68 0126:48 0127:68 0128:49 0129:69 012a:49 012b:69 012c:49 012d:69 012e:49 012f:69 0130:49 0131:69 0134:4a 0135:6a 0136:4b 0137:6b 0139:4c 013a:6c 013b:4c 013c:6c 013d:4c 013e:6c 0141:4c 0142:6c 0143:4e 0144:6e 0145:4e 0146:6e 0147:4e 0148:6e 014c:4f 014d:6f 014e:4f 014f:6f 0150:4f 0151:6f 0152:4f 0153:6f 0154:52 0155:72 0156:52 0157:72 0158:52 0159:72 015a:53 015b:73 015c:53 015d:73 015e:53 015f:73 0160:53 0161:73 0162:54 0163:74 0164:54 0165:74 0166:54 0167:74 0168:55 0169:75 016a:55 016b:75 016c:55 016d:75 016e:55 016f:75 0170:55 0171:75 0172:55 0173:75 0174:57 0175:77 0176:59 0177:79 0178:59 0179:5a 017b:5a 017c:7a 017d:5a 017e:7a 0180:62 0189:44 0197:49 019a:6c 019f:4f 01a0:4f 01a1:6f 01ab:74 01ae:54 01af:55 01b0:75 01b6:7a 01c0:7c 01c3:21 01cd:41 01ce:61 01cf:49 01d0:69 01d1:4f 01d2:6f 01d3:55 01d4:75 01d5:55 01d6:75 01d7:55 01d8:75 01d9:55 01da:75 01db:55 01dc:75 01de:41 01df:61 01e4:47 01e5:67 01e6:47 01e7:67 01e8:4b 01e9:6b 01ea:4f 01eb:6f 01ec:4f 01ed:6f 01f0:6a 0261:67 02b9:27 02ba:22 02bc:27 02c4:5e 02c6:5e 02c8:27 02ca:27 02cb:60 02cd:5f 02dc:7e 0300:60 0301:27 0302:5e 0303:7e 0308:22 030e:22 0327:2c 0331:5f 0332:5f 037e:3b 04bb:68 0589:3a 066a:25 2000:20 2001:20 2002:20 2003:20 2004:20 2005:20 2006:20 2010:2d 2011:2d 2013:2d 2014:2d 2017:5f 2018:60 2019:27 201a:2c 201c:22 201d:22 201e:2c 2020:2b 2022:07 2026:2e 2030:25 2032:27 2035:60 2039:3c 203a:3e 203c:13 2044:2f 2074:34 2075:35 2076:36 2077:37 2078:38 2080:30 2081:31 2082:32 2083:33 2084:34 2085:35 2086:36 2087:37 2088:38 2089:39 20dd:09 2102:43 2107:45 210a:67 210b:48 210c:48 210d:48 210e:68 2110:49 2111:49 2112:4c 2113:6c 2115:4e 2118:50 2119:50 211a:51 211b:52 211c:52 211d:52 2122:54 2124:5a 2128:5a 212a:4b 212c:42 212d:43 212e:65 212f:65 2130:45 2131:46 2133:4d 2134:6f 2190:1b 2191:18 2192:1a 2193:19 2194:1d 2195:12 21a8:17 2212:2d 2215:2f 2216:5c 2217:2a 221f:1c 2223:7c 2236:3a 223c:7e 2302:7f 2303:5e 2329:3c 232a:3e 25ac:16 25b2:1e 25ba:10 25bc:1f 25c4:11 25cb:09 25d8:08 25d9:0a 263a:01 263b:02 263c:0f 2640:0c 2642:0b 2660:06 2663:05 2665:03 2666:04 266a:0d 266b:0e 2758:7c 3000:20 3007:09 3008:3c 3009:3e 301a:5b 301b:5d ff01:21 ff02:22 ff03:23 ff04:24 ff05:25 ff06:26 ff07:27 ff08:28 ff09:29 ff0a:2a ff0b:2b ff0c:2c ff0d:2d ff0e:2e ff0f:2f ff10:30 ff11:31 ff12:32 ff13:33 ff14:34 ff15:35 ff16:36 ff17:37 ff18:38 ff19:39 ff1a:3a ff1b:3b ff1c:3c ff1d:3d ff1e:3e ff20:40 ff21:41 ff22:42 ff23:43 ff24:44 ff25:45 ff26:46 ff27:47 ff28:48 ff29:49 ff2a:4a ff2b:4b ff2c:4c ff2d:4d ff2e:4e ff2f:4f ff30:50 ff31:51 ff32:52 ff33:53 ff34:54 ff35:55 ff36:56 ff37:57 ff38:58 ff39:59 ff3a:5a ff3b:5b ff3c:5c ff3d:5d ff3e:5e ff3f:5f ff40:60 ff41:61 ff42:62 ff43:63 ff44:64 ff45:65 ff46:66 ff47:67 ff48:68 ff49:69 ff4a:6a ff4b:6b ff4c:6c ff4d:6d ff4e:6e ff4f:6f ff50:70 ff51:71 ff52:72 ff53:73 ff54:74 ff55:75 ff56:76 ff57:77 ff58:78 ff59:79 ff5a:7a ff5b:7b ff5c:7c ff5d:7d ff5e:7e + +500 (IBM EBCDIC - International) +0004:37 0005:2d 0006:2e 0007:2f 0008:16 0009:05 000a:25 0014:3c 0015:3d 0016:32 0017:26 001a:3f 001b:27 0020:40 0021:4f 0022:7f 0023:7b 0024:5b 0025:6c 0026:50 0027:7d 0028:4d 0029:5d 002a:5c 002b:4e 002c:6b 002d:60 002e:4b 002f:61 003a:7a 003b:5e 003c:4c 003d:7e 003e:6e 003f:6f 0040:7c 005b:4a 005d:5a 005e:5f 005f:6d 0060:79 007f:07 0080:20 0081:21 0082:22 0083:23 0084:24 0085:15 0086:06 0087:17 0088:28 0089:29 008a:2a 008b:2b 008c:2c 008d:09 008e:0a 008f:1b 0090:30 0091:31 0092:1a 0093:33 0094:34 0095:35 0096:36 0097:08 0098:38 0099:39 009a:3a 009b:3b 009c:04 009d:14 009e:3e 00a0:41 00a6:6a 00c0:64 00c1:65 00c2:62 00c3:66 00c4:63 00c5:67 00c7:68 00c8:74 00c9:71 00ca:72 00cb:73 00cc:78 00cd:75 00ce:76 00cf:77 00d1:69 00df:59 00e0:44 00e1:45 00e2:42 00e3:46 00e4:43 00e5:47 00e7:48 00e8:54 00e9:51 00ea:52 00eb:53 00ec:58 00ed:55 00ee:56 00ef:57 00f1:49 00f8:70 ff01:4f ff02:7f ff03:7b ff04:5b ff05:6c ff06:50 ff07:7d ff08:4d ff09:5d ff0a:5c ff0b:4e ff0c:6b ff0d:60 ff0e:4b ff0f:61 ff1a:7a ff1b:5e ff1c:4c ff1d:7e ff1e:6e ff20:7c ff3b:4a ff3d:5a ff3e:5f ff3f:6d ff40:79 + +850 (OEM - Multilingual Latin I) +0100:41 0101:61 0102:41 0103:61 0104:41 0105:61 0106:43 0107:63 0108:43 0109:63 010a:43 010b:63 010c:43 010d:63 010e:44 010f:64 0110:44 0111:64 0112:45 0113:65 0114:45 0115:65 0116:45 0117:65 0118:45 0119:65 011a:45 011b:65 011c:47 011d:67 011e:47 011f:67 0120:47 0121:67 0122:47 0123:67 0124:48 0125:68 0126:48 0127:68 0128:49 0129:69 012a:49 012b:69 012c:49 012d:69 012e:49 012f:69 0130:49 0134:4a 0135:6a 0136:4b 0137:6b 0139:4c 013a:6c 013b:4c 013c:6c 013d:4c 013e:6c 0141:4c 0142:6c 0143:4e 0144:6e 0145:4e 0146:6e 0147:4e 0148:6e 014c:4f 014d:6f 014e:4f 014f:6f 0150:4f 0151:6f 0152:4f 0153:6f 0154:52 0155:72 0156:52 0157:72 0158:52 0159:72 015a:53 015b:73 015c:53 015d:73 015e:53 015f:73 0160:53 0161:73 0162:54 0163:74 0164:54 0165:74 0166:54 0167:74 0168:55 0169:75 016a:55 016b:75 016c:55 016d:75 016e:55 016f:75 0170:55 0171:75 0172:55 0173:75 0174:57 0175:77 0176:59 0177:79 0178:59 0179:5a 017b:5a 017c:7a 017d:5a 017e:7a 0180:62 0189:44 0197:49 019a:6c 019f:4f 01a0:4f 01a1:6f 01a9:53 01ab:74 01ae:54 01af:55 01b0:75 01b6:5a 01c3:21 01cd:41 01ce:61 01cf:49 01d0:69 01d1:4f 01d2:6f 01d3:55 01d4:75 01d5:55 01d6:75 01d7:55 01d8:75 01d9:55 01da:75 01db:55 01dc:75 01de:41 01df:61 01e4:47 01e5:67 01e6:47 01e7:67 01e8:4b 01e9:6b 01ea:4f 01eb:6f 01ec:4f 01ed:6f 01f0:6a 0261:67 02ba:22 02bc:27 02c4:5e 02c6:5e 02c8:27 02cb:27 02cd:5f 02dc:7e 0300:27 0302:5e 0303:7e 030e:22 0331:5f 0332:5f 037e:3b 0393:47 03a3:53 03a6:46 03a9:4f 03b1:61 03b4:64 03b5:65 03c0:70 03c3:73 03c4:74 03c6:66 04bb:68 0589:3a 066a:25 2000:20 2001:20 2002:20 2003:20 2004:20 2005:20 2006:20 2010:2d 2011:2d 2013:2d 2014:2d 2018:27 2019:27 201a:27 201c:22 201d:22 201e:22 2022:07 2024:07 2026:2e 2030:25 2039:3c 203a:3e 203c:13 2044:2f 2070:30 2074:34 2075:35 2076:36 2077:37 2078:39 207f:6e 2080:30 2084:34 2085:35 2086:36 2087:37 2088:38 2089:39 20a7:50 20dd:4f 2102:43 2107:45 210a:67 210b:48 210c:48 210d:48 210e:68 2110:49 2111:49 2112:4c 2113:6c 2115:4e 2118:50 2119:50 211a:51 211b:52 211c:52 211d:52 2122:54 2124:5a 2126:4f 2128:5a 212a:4b 212c:42 212d:43 212e:65 212f:65 2130:45 2131:46 2133:4d 2134:6f 2190:1b 2191:18 2192:1a 2193:19 2194:1d 2195:12 21a8:17 2211:53 2212:2d 2215:2f 2216:2f 2217:2a 2219:07 221a:56 221e:38 221f:1c 2229:6e 2236:3a 223c:7e 2248:7e 2261:3d 2264:3d 2265:3d 2302:7f 2303:5e 2320:28 2321:29 2329:3c 232a:3e 25ac:16 25b2:1e 25ba:10 25bc:1f 25c4:11 25cb:09 25d8:08 25d9:0a 263a:01 263b:02 263c:0f 2640:0c 2642:0b 2660:06 2663:05 2665:03 2666:04 266a:0d 266b:0e 2713:56 3000:20 3007:4f 3008:3c 3009:3e 301a:5b 301b:5d ff01:21 ff02:22 ff03:23 ff04:24 ff05:25 ff06:26 ff07:27 ff08:28 ff09:29 ff0a:2a ff0b:2b ff0c:2c ff0d:2d ff0e:2e ff0f:2f ff10:30 ff11:31 ff12:32 ff13:33 ff14:34 ff15:35 ff16:36 ff17:37 ff18:38 ff19:39 ff1a:3a ff1b:3b ff1c:3c ff1d:3d ff1e:3e ff20:40 ff21:41 ff22:42 ff23:43 ff24:44 ff25:45 ff26:46 ff27:47 ff28:48 ff29:49 ff2a:4a ff2b:4b ff2c:4c ff2d:4d ff2e:4e ff2f:4f ff30:50 ff31:51 ff32:52 ff33:53 ff34:54 ff35:55 ff36:56 ff37:57 ff38:58 ff39:59 ff3a:5a ff3b:5b ff3c:5c ff3d:5d ff3e:5e ff3f:5f ff40:60 ff41:61 ff42:62 ff43:63 ff44:64 ff45:65 ff46:66 ff47:67 ff48:68 ff49:69 ff4a:6a ff4b:6b ff4c:6c ff4d:6d ff4e:6e ff4f:6f ff50:70 ff51:71 ff52:72 ff53:73 ff54:74 ff55:75 ff56:76 ff57:77 ff58:78 ff59:79 ff5a:7a ff5b:7b ff5c:7c ff5d:7d ff5e:7e + +860 (OEM - Portuguese) +00a4:0f 00a5:59 00a7:15 00a8:22 00a9:63 00ad:5f 00ae:72 00af:16 00b3:33 00b4:2f 00b6:14 00b8:2c 00b9:31 00be:33 00c4:41 00c5:41 00c6:41 00cb:45 00ce:49 00cf:49 00d0:44 00d6:4f 00d7:58 00d8:4f 00db:55 00dd:59 00de:54 00e4:61 00e5:61 00e6:61 00eb:65 00ee:69 00ef:69 00f0:64 00f6:6f 00f8:6f 00fb:75 00fd:79 00fe:74 00ff:79 0100:41 0101:61 0102:41 0103:61 0104:41 0105:61 0106:43 0107:63 0108:43 0109:63 010a:43 010b:63 010c:43 010d:63 010e:44 010f:64 0110:44 0111:64 0112:45 0113:65 0114:45 0115:65 0116:45 0117:65 0118:45 0119:65 011a:45 011b:65 011c:47 011d:67 011e:47 011f:67 0120:47 0121:67 0122:47 0123:67 0124:48 0125:68 0126:48 0127:68 0128:49 0129:69 012a:49 012b:69 012c:49 012d:69 012e:49 012f:69 0130:49 0131:69 0134:4a 0135:6a 0136:4b 0137:6b 0139:4c 013a:6c 013b:4c 013c:6c 013d:4c 013e:6c 0141:4c 0142:6c 0143:4e 0144:6e 0145:4e 0146:6e 0147:4e 0148:6e 014c:4f 014d:6f 014e:4f 014f:6f 0150:4f 0151:6f 0152:4f 0153:6f 0154:52 0155:72 0156:52 0157:72 0158:52 0159:72 015a:53 015b:73 015c:53 015d:73 015e:53 015f:73 0160:5c 0161:7c 0162:54 0163:74 0164:54 0165:74 0166:54 0167:74 0168:55 0169:75 016a:55 016b:75 016c:55 016d:75 016e:55 016f:75 0170:55 0171:75 0172:55 0173:75 0174:57 0175:77 0176:59 0177:79 0178:59 0179:5a 017b:5a 017c:7a 017d:5a 017e:7a 0180:62 0189:44 0191:46 0192:66 0197:49 019a:6c 019f:4f 01a0:4f 01a1:6f 01ab:74 01ae:54 01af:55 01b0:75 01b6:7a 01c0:7c 01c3:21 01cd:41 01ce:61 01cf:49 01d0:69 01d1:4f 01d2:6f 01d3:55 01d4:75 01d5:55 01d6:75 01d7:55 01d8:75 01d9:55 01da:75 01db:55 01dc:75 01de:41 01df:61 01e4:47 01e5:67 01e6:47 01e7:67 01e8:4b 01e9:6b 01ea:4f 01eb:6f 01ec:4f 01ed:6f 01f0:6a 0261:67 0278:66 02b9:27 02ba:22 02bc:27 02c4:5e 02c6:5e 02c8:27 02c9:16 02ca:2f 02cb:60 02cd:5f 02dc:7e 0300:60 0301:2f 0302:5e 0303:7e 0304:16 0305:16 0308:22 030e:22 0327:2c 0331:5f 0332:5f 037e:3b 04bb:68 0589:3a 066a:25 2000:20 2001:20 2002:20 2003:20 2004:20 2005:20 2006:20 2010:5f 2011:5f 2013:5f 2014:5f 2017:5f 2018:27 2019:27 201a:2c 201c:22 201d:22 201e:22 2022:07 2024:07 2026:2e 2030:25 2032:27 2035:60 2039:3c 203a:3e 203c:13 2044:2f 2070:30 2074:34 2075:35 2076:36 2077:37 2078:38 2080:30 2081:31 2083:33 2084:34 2085:35 2086:36 2087:37 2088:38 2089:39 20dd:4f 2102:43 2107:45 210a:67 210b:48 210c:48 210d:48 210e:68 2110:49 2111:49 2112:4c 2113:6c 2115:4e 2118:70 2119:50 211a:51 211b:52 211c:52 211d:52 2122:74 2124:5a 2128:5a 212a:4b 212b:41 212c:42 212d:43 212e:65 212f:65 2130:45 2131:46 2133:4d 2134:6f 2190:1b 2191:18 2192:1a 2193:19 2194:1d 2195:12 21a8:17 2205:4f 2212:5f 2215:2f 2216:5c 2217:2a 221f:1c 2223:7c 2236:3a 223c:7e 22c5:07 2302:7f 2303:5e 2329:3c 232a:3e 25ac:16 25b2:1e 25ba:10 25bc:1f 25c4:11 25cb:09 25d8:08 25d9:0a 263a:01 263b:02 263c:0f 2640:0c 2642:0b 2660:06 2663:05 2665:03 2666:04 266a:0d 266b:0e 3000:20 3007:4f 3008:3c 3009:3e 301a:5b 301b:5d 30fb:07 + +861 (OEM - Icelandic) +00a2:63 00a4:0f 00a5:59 00a7:15 00a8:22 00a9:63 00aa:61 00ad:5f 00ae:72 00af:16 00b3:33 00b4:2f 00b6:14 00b8:2c 00b9:31 00ba:6f 00be:33 00c0:41 00c2:41 00c3:41 00c8:45 00ca:45 00cb:45 00cc:49 00ce:49 00cf:49 00d1:4e 00d2:4f 00d4:4f 00d5:4f 00d7:58 00d9:55 00db:55 00e3:61 00ec:69 00ee:69 00ef:69 00f1:6e 00f2:6f 00f5:6f 00f9:75 00ff:79 0100:41 0101:61 0102:41 0103:61 0104:41 0105:61 0106:43 0107:63 0108:43 0109:63 010a:43 010b:63 010c:43 010d:63 010e:44 010f:64 0111:64 0112:45 0113:65 0114:45 0115:65 0116:45 0117:65 0118:45 0119:65 011a:45 011b:65 011c:47 011d:67 011e:47 011f:67 0120:47 0121:67 0122:47 0123:67 0124:48 0125:68 0126:48 0127:68 0128:49 0129:69 012a:49 012b:69 012c:49 012d:69 012e:49 012f:69 0130:49 0131:69 0134:4a 0135:6a 0136:4b 0137:6b 0139:4c 013a:6c 013b:4c 013c:6c 013d:4c 013e:6c 0141:4c 0142:6c 0143:4e 0144:6e 0145:4e 0146:6e 0147:4e 0148:6e 014c:4f 014d:6f 014e:4f 014f:6f 0150:4f 0151:6f 0152:4f 0153:6f 0154:52 0155:72 0156:52 0157:72 0158:52 0159:72 015a:53 015b:73 015c:53 015d:73 015e:53 015f:73 0160:53 0161:73 0162:54 0163:74 0164:54 0165:74 0166:54 0167:74 0168:55 0169:75 016a:55 016b:75 016c:55 016d:75 016e:55 016f:75 0170:55 0171:75 0172:55 0173:75 0174:57 0175:77 0176:59 0177:79 0178:59 0179:5a 017b:5a 017c:7a 017d:5a 017e:7a 0180:62 0197:49 019a:6c 019f:4f 01a0:4f 01a1:6f 01ab:74 01ae:54 01af:55 01b0:75 01b6:7a 01c3:21 01cd:41 01ce:61 01cf:49 01d0:69 01d1:4f 01d2:6f 01d3:55 01d4:75 01d5:55 01d6:75 01d7:55 01d8:75 01d9:55 01da:75 01db:55 01dc:75 01de:41 01df:61 01e4:47 01e5:67 01e6:47 01e7:67 01e8:4b 01e9:6b 01ea:4f 01eb:6f 01ec:4f 01ed:6f 01f0:6a 0261:67 0278:66 02b9:27 02ba:22 02bc:27 02c4:5e 02c6:5e 02c8:27 02c9:16 02ca:2f 02cb:60 02cd:5f 02dc:7e 0300:60 0301:2f 0302:5e 0303:7e 0304:16 0305:16 0308:22 030e:22 0327:2c 0331:5f 0332:5f 037e:3b 04bb:68 0589:3a 066a:25 2000:20 2001:20 2002:20 2003:20 2004:20 2005:20 2006:20 2010:2d 2011:2d 2013:2d 2014:2d 2017:5f 2018:27 2019:27 201a:27 201c:22 201d:22 201e:22 2022:07 2024:07 2026:07 2030:25 2032:27 2035:27 2039:3c 203a:3e 203c:13 2044:2f 2070:30 2074:34 2075:35 2076:36 2077:37 2078:38 2080:30 2081:31 2083:33 2084:34 2085:35 2086:36 2087:37 2088:38 2089:39 20dd:4f 2102:43 2107:45 210a:67 210b:48 210c:48 210d:48 210e:68 2110:49 2111:49 2112:4c 2113:6c 2115:4e 2118:70 2119:50 211a:51 211b:52 211c:52 211d:52 2122:74 2124:5a 2128:5a 212a:4b 212c:42 212d:43 212e:65 212f:65 2130:45 2131:46 2133:4d 2134:6f 2190:1b 2191:18 2192:1a 2193:19 2194:1d 2195:12 21a8:17 2205:4f 2212:5f 2215:2f 2216:5c 2217:2a 221f:1c 2223:7c 2236:3a 223c:7e 22c5:07 2302:7f 2303:5e 2329:3c 232a:3e 25ac:16 25b2:1e 25ba:10 25bc:1f 25c4:11 25cb:09 25d8:08 25d9:0a 263a:01 263b:02 263c:0f 2640:0c 2642:0b 2660:06 2663:05 2665:03 2666:04 266a:0d 266b:0e 3000:20 3007:4f 3008:3c 3009:3e 301a:5b 301b:5d 30fb:07 + +863 (OEM - Canadian French) +00a1:21 00a5:59 00a9:63 00aa:61 00ad:16 00ae:72 00b9:33 00ba:6f 00c1:41 00c3:41 00c4:41 00c5:41 00c6:41 00cc:49 00cd:49 00d0:44 00d1:4e 00d2:4f 00d3:4f 00d5:4f 00d6:4f 00d7:58 00d8:4f 00da:55 00dd:59 00de:54 00e1:61 00e3:61 00e4:61 00e5:61 00e6:61 00ec:69 00ed:69 00f0:64 00f1:6e 00f2:6f 00f5:6f 00f6:6f 00f8:6f 00fd:79 00fe:74 00ff:79 0100:41 0101:61 0102:41 0103:61 0104:41 0105:61 0106:43 0107:63 0108:43 0109:63 010a:43 010b:63 010c:43 010d:63 010e:44 010f:64 0110:44 0111:64 0112:45 0113:65 0114:45 0115:65 0116:45 0117:65 0118:45 0119:65 011a:45 011b:65 011c:47 011d:67 011e:47 011f:67 0120:47 0121:67 0122:47 0123:67 0124:48 0125:68 0126:48 0127:68 0128:49 0129:69 012a:49 012b:69 012c:49 012d:69 012e:49 012f:69 0130:49 0131:69 0134:4a 0135:6a 0136:4b 0137:6b 0139:4c 013a:6c 013b:4c 013c:6c 013d:4c 013e:6c 0141:4c 0142:6c 0143:4e 0144:6e 0145:4e 0146:6e 0147:4e 0148:6e 014c:4f 014d:6f 014e:4f 014f:6f 0150:4f 0151:6f 0152:4f 0153:6f 0154:52 0155:72 0156:52 0157:72 0158:52 0159:72 015a:53 015b:73 015c:53 015d:73 015e:53 015f:73 0160:53 0161:73 0162:54 0163:74 0164:54 0165:74 0166:54 0167:74 0168:55 0169:75 016a:55 016b:75 016c:55 016d:75 016e:55 016f:75 0170:55 0171:75 0172:55 0173:75 0174:57 0175:77 0176:59 0177:79 0178:59 0179:5a 017b:5a 017c:7a 017d:5a 017e:7a 0180:62 0189:44 0197:49 019a:6c 019f:4f 01a0:4f 01a1:6f 01ab:74 01ae:54 01af:55 01b0:75 01b6:7a 01c3:21 01cd:41 01ce:61 01cf:49 01d0:69 01d1:4f 01d2:6f 01d3:55 01d4:75 01d5:55 01d6:75 01d7:55 01d8:75 01d9:55 01da:75 01db:55 01dc:75 01de:41 01df:61 01e4:47 01e5:67 01e6:47 01e7:67 01e8:4b 01e9:6b 01ea:4f 01eb:6f 01ec:4f 01ed:6f 01f0:6a 0261:67 02b9:22 02ba:27 02bc:27 02c4:5e 02c6:5e 02c8:27 02c9:16 02cb:60 02cd:5f 02dc:7e 0300:60 0302:5e 0303:7e 0304:16 0305:16 0331:5f 0332:5f 037e:3b 04bb:68 0589:3a 066a:25 2000:20 2001:20 2002:20 2003:20 2004:20 2005:20 2006:20 2010:2d 2011:2d 2013:2d 2014:2d 2018:27 2019:27 201a:27 201c:22 201d:22 201e:22 2022:07 2024:07 2026:07 2030:25 2032:27 2035:27 2039:3c 203a:3e 203c:13 2044:2f 2070:30 2074:34 2075:35 2076:36 2077:37 2078:38 2080:30 2081:31 2084:34 2085:35 2086:36 2087:37 2088:38 2089:39 20a7:50 20dd:4f 2102:43 2107:45 210a:67 210b:48 210c:48 210d:48 210e:68 2110:49 2111:49 2112:4c 2113:6c 2115:4e 2118:70 2119:50 211a:51 211b:52 211c:52 211d:52 2122:74 2124:5a 2128:5a 212a:4b 212b:41 212c:42 212d:43 212e:65 212f:65 2130:45 2131:46 2133:4d 2134:6f 2190:1b 2191:18 2192:1a 2193:19 2194:1d 2195:12 21a8:17 2205:4f 2212:5f 2215:2f 2216:5c 2217:2a 221f:1c 2223:7c 2236:3a 223c:7e 22c5:07 2302:7f 2303:5e 2329:3c 232a:3e 25ac:16 25b2:1e 25ba:10 25bc:1f 25c4:11 25cb:09 25d8:08 25d9:0a 263a:01 263b:02 263c:0f 2640:0c 2642:0b 2660:06 2663:05 2665:03 2666:04 266a:0d 266b:0e 3000:20 3007:4f 3008:3c 3009:3e 301a:5b 301b:5d 30fb:07 + +865 (OEM - Nordic) +00a2:63 00a5:59 00a7:15 00a8:22 00a9:63 00ad:5f 00ae:72 00af:16 00b3:33 00b4:2f 00b6:14 00b8:2c 00b9:31 00bb:3e 00be:33 00c0:41 00c1:41 00c2:41 00c3:41 00c8:45 00ca:45 00cb:45 00cc:49 00cd:49 00ce:49 00cf:49 00d0:44 00d2:4f 00d3:4f 00d4:4f 00d5:4f 00d7:58 00d9:55 00da:55 00db:55 00dd:59 00de:54 00e3:61 00f0:64 00f5:6f 00fd:79 00fe:74 0100:41 0101:61 0102:41 0103:61 0104:41 0105:61 0106:43 0107:63 0108:43 0109:63 010a:43 010b:63 010c:43 010d:63 010e:44 010f:64 0110:44 0111:64 0112:45 0113:65 0114:45 0115:65 0116:45 0117:65 0118:45 0119:65 011a:45 011b:65 011c:47 011d:67 011e:47 011f:67 0120:47 0121:67 0122:47 0123:67 0124:48 0125:68 0126:48 0127:68 0128:49 0129:69 012a:49 012b:69 012c:49 012d:69 012e:49 012f:69 0130:49 0131:69 0134:4a 0135:6a 0136:4b 0137:6b 0139:4c 013a:6c 013b:4c 013c:6c 013d:4c 013e:6c 0141:4c 0142:6c 0143:4e 0144:6e 0145:4e 0146:6e 0147:4e 0148:6e 014c:4f 014d:6f 014e:4f 014f:6f 0150:4f 0151:6f 0152:4f 0153:6f 0154:52 0155:72 0156:52 0157:72 0158:52 0159:72 015a:53 015b:73 015c:53 015d:73 015e:53 015f:73 0160:53 0161:73 0162:54 0163:74 0164:54 0165:74 0166:54 0167:74 0168:55 0169:75 016a:55 016b:75 016c:55 016d:75 016e:55 016f:75 0170:55 0171:75 0172:55 0173:75 0174:57 0175:77 0176:59 0177:79 0178:59 0179:5a 017b:5a 017c:7a 017d:5a 017e:7a 0180:62 0189:44 0197:49 019a:6c 019f:4f 01a0:4f 01a1:6f 01ab:74 01ae:54 01af:55 01b0:75 01b6:7a 01c3:21 01cd:41 01ce:61 01cf:49 01d0:69 01d1:4f 01d2:6f 01d3:55 01d4:75 01d5:55 01d6:75 01d7:55 01d8:75 01d9:55 01da:75 01db:55 01dc:75 01de:41 01df:61 01e4:47 01e5:67 01e6:47 01e7:67 01e8:4b 01e9:6b 01ea:4f 01eb:6f 01ec:4f 01ed:6f 01f0:6a 0261:67 02b9:27 02ba:22 02bc:27 02c4:5e 02c6:5e 02c8:27 02c9:16 02ca:2f 02cb:60 02cd:5f 02dc:7e 0300:60 0301:2f 0302:5e 0303:7e 0304:16 0305:16 0308:22 030e:22 0327:2c 0331:5f 0332:5f 037e:3b 04bb:68 0589:3a 066a:25 2000:20 2001:20 2002:20 2003:20 2004:20 2005:20 2006:20 2010:2d 2011:2d 2013:2d 2014:2d 2017:5f 2018:27 2019:27 201a:27 201c:22 201d:22 201e:22 2022:07 2024:07 2026:07 2030:25 2032:27 2035:27 2039:3c 203a:3e 203c:13 2044:2f 2070:30 2074:34 2075:35 2076:36 2077:37 2078:38 2080:30 2081:31 2083:33 2084:34 2085:35 2086:36 2087:37 2088:38 2089:39 20dd:4f 2102:43 2107:45 210a:67 210b:48 210c:48 210d:48 210e:68 2110:49 2111:49 2112:4c 2113:6c 2115:4e 2118:70 2119:50 211a:51 211b:52 211c:52 211d:52 2122:74 2124:5a 2128:5a 212a:4b 212c:42 212d:43 212e:65 212f:65 2130:45 2131:46 2133:4d 2134:6f 2190:1b 2191:18 2192:1a 2193:19 2194:1d 2195:12 21a8:17 2205:4f 2212:5f 2215:2f 2216:5c 2217:2a 221f:1c 2223:7c 2236:3a 223c:7e 226b:3c 22c5:07 2302:7f 2303:5e 2329:3c 232a:3e 25ac:16 25b2:1e 25ba:10 25bc:1f 25c4:11 25cb:09 25d8:08 25d9:0a 263a:01 263b:02 263c:0f 2640:0c 2642:0b 2660:06 2663:05 2665:03 2666:04 266a:0d 266b:0e 3000:20 3007:4f 3008:3c 3009:3e 300b:3e 301a:5b 301b:5d 30fb:07 + +874 (ANSI/OEM - Thai) +00a7:15 00b6:14 203c:13 2190:1b 2191:18 2192:1a 2193:19 2194:1d 2195:12 21a8:17 221f:1c 2302:7f 25ac:16 25b2:1e 25ba:10 25bc:1f 25c4:11 25cb:09 25d8:08 25d9:0a 263a:01 263b:02 263c:0f 2640:0c 2642:0b 2660:06 2663:05 2665:03 2666:04 266a:0d 266b:0e ff01:21 ff02:22 ff03:23 ff04:24 ff05:25 ff06:26 ff07:27 ff08:28 ff09:29 ff0a:2a ff0b:2b ff0c:2c ff0d:2d ff0e:2e ff0f:2f ff10:30 ff11:31 ff12:32 ff13:33 ff14:34 ff15:35 ff16:36 ff17:37 ff18:38 ff19:39 ff1a:3a ff1b:3b ff1c:3c ff1d:3d ff1e:3e ff20:40 ff21:41 ff22:42 ff23:43 ff24:44 ff25:45 ff26:46 ff27:47 ff28:48 ff29:49 ff2a:4a ff2b:4b ff2c:4c ff2d:4d ff2e:4e ff2f:4f ff30:50 ff31:51 ff32:52 ff33:53 ff34:54 ff35:55 ff36:56 ff37:57 ff38:58 ff39:59 ff3a:5a ff3b:5b ff3c:5c ff3d:5d ff3e:5e ff3f:5f ff40:60 ff41:61 ff42:62 ff43:63 ff44:64 ff45:65 ff46:66 ff47:67 ff48:68 ff49:69 ff4a:6a ff4b:6b ff4c:6c ff4d:6d ff4e:6e ff4f:6f ff50:70 ff51:71 ff52:72 ff53:73 ff54:74 ff55:75 ff56:76 ff57:77 ff58:78 ff59:79 ff5a:7a ff5b:7b ff5c:7c ff5d:7d ff5e:7e + +932 (ANSI/OEM - Japanese Shift-JIS) +00a1:21 00a5:5c 00a6:7c 00a9:63 00aa:61 00ad:2d 00ae:52 00b2:32 00b3:33 00b9:31 00ba:6f 00c0:41 00c1:41 00c2:41 00c3:41 00c4:41 00c5:41 00c6:41 00c7:43 00c8:45 00c9:45 00ca:45 00cb:45 00cc:49 00cd:49 00ce:49 00cf:49 00d0:44 00d1:4e 00d2:4f 00d3:4f 00d4:4f 00d5:4f 00d6:4f 00d8:4f 00d9:55 00da:55 00db:55 00dc:55 00dd:59 00de:54 00df:73 00e0:61 00e1:61 00e2:61 00e3:61 00e4:61 00e5:61 00e6:61 00e7:63 00e8:65 00e9:65 00ea:65 00eb:65 00ec:69 00ed:69 00ee:69 00ef:69 00f0:64 00f1:6e 00f2:6f 00f3:6f 00f4:6f 00f5:6f 00f6:6f 00f8:6f 00f9:75 00fa:75 00fb:75 00fc:75 00fd:79 00fe:74 00ff:79 + +936 (ANSI/OEM - Simplified Chinese GBK) +00a6:7c 00aa:61 00ad:2d 00b2:32 00b3:33 00b9:31 00ba:6f 00d0:44 00dd:59 00de:54 00e2:61 00f0:65 00fd:79 00fe:74 + +949 (ANSI/OEM - Korean) +00a6:7c 00c0:41 00c1:41 00c2:41 00c3:41 00c4:41 00c5:41 00c7:43 00c8:45 00c9:45 00ca:45 00cb:45 00cc:49 00cd:49 00ce:49 00cf:49 00d1:4e 00d2:4f 00d3:4f 00d4:4f 00d5:4f 00d6:4f 00d9:55 00da:55 00db:55 00dc:55 00dd:59 00e0:61 00e1:61 00e2:61 00e3:61 00e4:61 00e5:61 00e7:63 00e8:65 00e9:65 00ea:65 00eb:65 00ec:69 00ed:69 00ee:69 00ef:69 00f1:6e 00f2:6f 00f3:6f 00f4:6f 00f5:6f 00f6:6f 00f9:75 00fa:75 00fb:75 00fc:75 00fd:79 00ff:79 20a9:5c + +950 (ANSI/OEM - Traditional Chinese Big5) +00a1:21 00a6:7c 00a9:63 00aa:61 00ad:2d 00ae:52 00b2:32 00b3:33 00b9:31 00ba:6f 00c0:41 00c1:41 00c2:41 00c3:41 00c4:41 00c5:41 00c6:41 00c7:43 00c8:45 00c9:45 00ca:45 00cb:45 00cc:49 00cd:49 00ce:49 00cf:49 00d0:44 00d1:4e 00d2:4f 00d3:4f 00d4:4f 00d5:4f 00d6:4f 00d8:4f 00d9:55 00da:55 00db:55 00dc:55 00dd:59 00de:54 00df:73 00e0:61 00e1:61 00e2:61 00e3:61 00e4:61 00e5:61 00e6:61 00e7:63 00e8:65 00e9:65 00ea:65 00eb:65 00ec:69 00ed:69 00ee:69 00ef:69 00f0:65 00f1:6e 00f2:6f 00f3:6f 00f4:6f 00f5:6f 00f6:6f 00f8:6f 00f9:75 00fa:75 00fb:75 00fc:75 00fd:79 00fe:74 00ff:79 + +(UTF-7) + + +(UTF-8) diff --git a/terraform/applications/nginx-waf/nginx.conf b/terraform/applications/nginx-waf/nginx.conf new file mode 100755 index 00000000..1fb4f838 --- /dev/null +++ b/terraform/applications/nginx-waf/nginx.conf @@ -0,0 +1,69 @@ +## DO NOT UNCOMMENT +## This gets uncommented during init. +## +#load_module modules/ngx_http_modsecurity_module.so; +## +## DO NOT UNCOMMENT +## This gets uncommented during init. + +worker_processes auto; +daemon off; + +error_log stderr error; +events { worker_connections 1024; } + +http { + charset utf-8; + log_format cloudfoundry 'NginxLog "$request" $status $body_bytes_sent'; + access_log /dev/stdout cloudfoundry; + default_type application/octet-stream; + include mime.types; + sendfile on; + #recursive_error_pages on; + + resolver 169.254.0.2 ipv6=off valid=30s; + + tcp_nopush on; + client_max_body_size 512m; + keepalive_timeout 60s; + map_hash_bucket_size 128; + port_in_redirect off; # Ensure that redirects don't include the internal container PORT - 8080 + + real_ip_header X-Forwarded-For; + set_real_ip_from 127.0.0.1/32; + set_real_ip_from 10.0.0.0/8; + set_real_ip_from 172.16.0.0/12; + set_real_ip_from 192.168.0.0/16; + set_real_ip_from 52.222.122.97/32; + set_real_ip_from 52.222.123.172/32; + real_ip_recursive on; + + ## + ## DO NOT UNCOMMENT + ## This gets uncommented during init. + ## + + #include nginx/conf.d/*.conf; + + ## + ## DO NOT UNCOMMENT + ## This gets uncommented during init. + ## + + + ## + ## DO NOT UNCOMMENT + ## This is required for the Nginx buildpack to start. + ## + + # server { + # listen {{port}}; + # root public; + # index index.html index.htm Default.htm; + # } + + ## + ## DO NOT UNCOMMENT + ## This is required for the Nginx buildpack to start. + ## +} diff --git a/terraform/applications/nginx-waf/nginx/conf.d/default.conf b/terraform/applications/nginx-waf/nginx/conf.d/default.conf new file mode 100644 index 00000000..395c89bb --- /dev/null +++ b/terraform/applications/nginx-waf/nginx/conf.d/default.conf @@ -0,0 +1,167 @@ +map $http_upgrade $connection_upgrade { + default upgrade; + '' close; +} + +map $request_uri $error_page { + default /404/index.html; + ~/(?[a-z][a-z]?[a-z]|zh-hans)/(.*) /$lang/404/index.html; +} + +server { + server_name _; + listen $VCAP_APP_PORT; + + modsecurity on; + modsecurity_rules_file /home/vcap/app/nginx/snippets/owasp-modsecurity-main.conf; + + set $cf_forwarded_host "$host"; + #if ($http_x_cf_forwarded_url ~* ^(https?\:\/\/)(.*?)(\/(.*))?$) { + # set $cf_forwarded_host "$2"; + #} + + set $port 8881; + if ($cf_forwarded_host ~* \-cms\-) { + set $port 8882; + } + + location @fourohfour_english { + allow all; + access_log off; + + default_type text/plain; + return 404 'Not Found'; + break; + # rewrite ^ /404/index.html; + include nginx/snippets/proxy-to-static.conf; + break; + } + + location ^~ /s3/files { + set $port 8883; + proxy_redirect off; + proxy_connect_timeout 300; + chunked_transfer_encoding off; + proxy_pass http://127.0.0.1:$port; + proxy_cookie_flags ~SESS.* secure; + proxy_set_header Host $cf_forwarded_host; + proxy_set_header X-Forwarded-For $remote_addr; + proxy_set_header X-Real-IP $remote_addr; + error_page 403 = @fourohfour_english; + } + + location / { + proxy_redirect off; + proxy_connect_timeout 300; + chunked_transfer_encoding off; + proxy_pass http://127.0.0.1:$port; + proxy_cookie_flags ~SESS.* secure; + proxy_set_header Host $cf_forwarded_host; + proxy_set_header X-Forwarded-For $remote_addr; + proxy_set_header X-Real-IP $remote_addr; + error_page 403 = @fourohfour_english; + } +} + +server { + server_name 127.0.0.1; + listen 8881; + + location @fourohfour { + allow all; + access_log off; + + rewrite ^ $error_page; + include nginx/snippets/proxy-to-static.conf; + } + + location / { + #rewrite ^/static/(.*) /$1; + rewrite ^([^.]*[^/])$ $1/; + rewrite (.*)/$ $1/index.html last; + + #include nginx/dynamic/deny-by-domain.conf; + # include nginx/snippets/ip-restrict-static.conf; + include nginx/snippets/proxy-to-static.conf; + error_page 403 = @fourohfour; + } +} + +server { + server_name 127.0.0.1; + listen 8882; + + error_page 403 = @forbidden; + + location @forbidden { + allow all; + access_log off; + + default_type text/plain; + return 403 'Forbidden by USAGov'; + break; + # redirect to homepage usa.gov + } + + location / { + access_log on; + rewrite_log on; + #include nginx/dynamic/deny-by-domain.conf; + include nginx/snippets/ip-restrict-cms.conf; + include nginx/snippets/proxy-to-app.conf; + } +} + +server { + server_name 127.0.0.1; + listen 8883; + + #Rewrite all s3 file requests to cms path. + #Location blocks below will handle the rest. + rewrite ^/s3/files/(.*)$ /cms/public/$1 break; + + location @fourohfour { + allow all; + access_log off; + + default_type text/plain; + return 404 'Not Found'; + break; + rewrite ^ $error_page; + include nginx/snippets/proxy-to-static.conf; + } + + location / { + rewrite ^/s3/files/(.*) /cms/public/$1; + rewrite ^([^.]*[^/])$ $1/; + rewrite (.*)/$ $1/index.html last; + + #include nginx/dynamic/deny-by-domain.conf; + # include nginx/snippets/ip-restrict-static.conf; + include nginx/snippets/proxy-to-storage.conf; + error_page 403 = @fourohfour; + } +} + +server { + server_name 127.0.0.1; + listen 8884; + + + location @fourohfour { + allow all; + access_log off; + + rewrite ^ $error_page; + include nginx/snippets/proxy-to-static.conf; + } + + location / { + rewrite ^/static/(.*) /$1; + rewrite ^([^.]*[^/])$ $1/; + rewrite (.*)/$ $1/index.html last; + + include nginx/snippets/proxy-to-static.conf; + error_page 403 = @fourohfour; + } +} diff --git a/terraform/applications/nginx-waf/nginx/dynamic/deny-by-domain.conf b/terraform/applications/nginx-waf/nginx/dynamic/deny-by-domain.conf new file mode 100644 index 00000000..41c90fc2 --- /dev/null +++ b/terraform/applications/nginx-waf/nginx/dynamic/deny-by-domain.conf @@ -0,0 +1 @@ +# Restricted by domain (placeholder, to be replaced via cron) diff --git a/terraform/applications/nginx-waf/nginx/dynamic/deny_domain_by_ip.sh b/terraform/applications/nginx-waf/nginx/dynamic/deny_domain_by_ip.sh new file mode 100644 index 00000000..679a7954 --- /dev/null +++ b/terraform/applications/nginx-waf/nginx/dynamic/deny_domain_by_ip.sh @@ -0,0 +1,29 @@ +#!/bin/sh +# Get IP addresses for domains in domains-deny.list. +# If the results differ from deny-by-domain.conf, update that file. +# If there were changes AND the flag --no_reload was not passed, reload nginx. +# (--no-reload is only wanted during setup, before nginx has started.) + +BASEDIR=$(dirname $0) + +echo "# Restricted by domain (via cron job):" > ${BASEDIR}/deny-by-domain_new.conf +while read -r line +do + ddns_record="$line" + if [[ ! -z $ddns_record ]]; then + resolved_ip=`getent ahosts $line | awk '{ print $1 ; exit }'` + if [[ ! -z $resolved_ip ]]; then + echo " deny $resolved_ip; # from $ddns_record" >> ${BASEDIR}/deny-by-domain_new.conf + fi + fi +done < ${BASEDIR}/domains-deny.list + +# Update deny-by-domain.conf only if there are changes. +CHANGES=$(diff ${BASEDIR}/deny-by-domain.conf ${BASEDIR}/deny-by-domain_new.conf) +if [[ ! -z "$CHANGES" ]]; then + cat ${BASEDIR}/deny-by-domain_new.conf > ${BASEDIR}/deny-by-domain.conf + if [ "$1" != "--no-reload" ]; then + /usr/sbin/nginx -s reload + fi +fi +rm ${BASEDIR}/deny-by-domain_new.conf diff --git a/terraform/applications/nginx-waf/nginx/dynamic/domains-deny.list b/terraform/applications/nginx-waf/nginx/dynamic/domains-deny.list new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/terraform/applications/nginx-waf/nginx/dynamic/domains-deny.list @@ -0,0 +1 @@ + diff --git a/terraform/applications/nginx-waf/nginx/snippets/ip-restrict-cms.conf.tmpl b/terraform/applications/nginx-waf/nginx/snippets/ip-restrict-cms.conf.tmpl new file mode 100644 index 00000000..3cb7e179 --- /dev/null +++ b/terraform/applications/nginx-waf/nginx/snippets/ip-restrict-cms.conf.tmpl @@ -0,0 +1,6 @@ +#allow 127.0.0.1/32; +#allow 172.0.0.0/8; + +${IPS_ALLOWED_CMS} + +#deny all; diff --git a/terraform/applications/nginx-waf/nginx/snippets/ip-restrict-static.conf.tmpl b/terraform/applications/nginx-waf/nginx/snippets/ip-restrict-static.conf.tmpl new file mode 100644 index 00000000..088ffe0e --- /dev/null +++ b/terraform/applications/nginx-waf/nginx/snippets/ip-restrict-static.conf.tmpl @@ -0,0 +1,3 @@ +${IPS_DENYED_STATIC} + +allow all; diff --git a/terraform/applications/nginx-waf/nginx/snippets/owasp-modsecurity-main.conf b/terraform/applications/nginx-waf/nginx/snippets/owasp-modsecurity-main.conf new file mode 100644 index 00000000..c529b5ea --- /dev/null +++ b/terraform/applications/nginx-waf/nginx/snippets/owasp-modsecurity-main.conf @@ -0,0 +1,7 @@ +# Include the recommended configuration +Include /home/vcap/app/modsecurity/modsecurity.conf +Include /home/vcap/app/modsecurity/modsecurity-override.conf +Include /home/vcap/app/modsecurity/crs-setup.conf +Include /home/vcap/app/modsecurity/crs/*.conf +# A test rule +SecRule ARGS:testparam "@contains test" "id:1234,deny,log,status:403" diff --git a/terraform/applications/nginx-waf/nginx/snippets/proxy-to-app.conf.tmpl b/terraform/applications/nginx-waf/nginx/snippets/proxy-to-app.conf.tmpl new file mode 100644 index 00000000..b62bb7bd --- /dev/null +++ b/terraform/applications/nginx-waf/nginx/snippets/proxy-to-app.conf.tmpl @@ -0,0 +1,24 @@ +set $cf_forwarded_host "$host"; +set $cf_forwarded_uri "$request_uri"; + +set $cf_destination_host "${cms_internal_endpoint}"; +set $cf_destination_port "61443"; + +set $base_host "$cf_forwarded_host"; +if ($cf_forwarded_host ~* ^(.*)-waf-(.*)\.app\.cloud\.gov$) { + set $base_host "$1-cms-$2"; +} + +proxy_http_version 1.1; +proxy_set_header Connection ""; +proxy_redirect off; +proxy_connect_timeout 300; +chunked_transfer_encoding off; + +proxy_set_header Host $cf_forwarded_host; +proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; +proxy_set_header X-Real-IP $remote_addr; + +# Use XX-CF-APP-INSTANCE on the original request if you wish to target an instance +proxy_set_header X-CF-APP-INSTANCE $http_xx_cf_app_instance; +proxy_pass https://$cf_destination_host:$cf_destination_port$cf_forwarded_uri; diff --git a/terraform/applications/nginx-waf/nginx/snippets/proxy-to-static.conf.tmpl b/terraform/applications/nginx-waf/nginx/snippets/proxy-to-static.conf.tmpl new file mode 100644 index 00000000..e1bd02c9 --- /dev/null +++ b/terraform/applications/nginx-waf/nginx/snippets/proxy-to-static.conf.tmpl @@ -0,0 +1,14 @@ +proxy_http_version 1.1; +proxy_set_header Connection \"\"; +proxy_set_header Authorization ''; +proxy_set_header Host ${static_bucket}.${static_fips_endpoint}; +proxy_hide_header x-amz-id-2; +proxy_hide_header x-amz-request-id; +proxy_hide_header x-amz-meta-server-side-encryption; +proxy_hide_header x-amz-server-side-encryption; +proxy_hide_header Set-Cookie; +proxy_ignore_headers Set-Cookie; +proxy_intercept_errors on; +#add_header Cache-Control max-age=31536000; +add_header Cache-Control max-age=60; +proxy_pass https://${static_bucket}.${static_fips_endpoint}; diff --git a/terraform/applications/nginx-waf/nginx/snippets/proxy-to-storage.conf.tmpl b/terraform/applications/nginx-waf/nginx/snippets/proxy-to-storage.conf.tmpl new file mode 100644 index 00000000..4bd2caef --- /dev/null +++ b/terraform/applications/nginx-waf/nginx/snippets/proxy-to-storage.conf.tmpl @@ -0,0 +1,14 @@ +proxy_http_version 1.1; +proxy_set_header Connection \"\"; +proxy_set_header Authorization ''; +proxy_set_header Host ${storage_bucket}.${storage_fips_endpoint}; +proxy_hide_header x-amz-id-2; +proxy_hide_header x-amz-request-id; +proxy_hide_header x-amz-meta-server-side-encryption; +proxy_hide_header x-amz-server-side-encryption; +proxy_hide_header Set-Cookie; +proxy_ignore_headers Set-Cookie; +proxy_intercept_errors on; +#add_header Cache-Control max-age=31536000; +add_header Cache-Control max-age=60; +proxy_pass https://${storage_bucket}.${storage_fips_endpoint}; diff --git a/terraform/applications/nginx-waf/packages/.DS_Store b/terraform/applications/nginx-waf/packages/.DS_Store new file mode 100644 index 00000000..cbcfe95b Binary files /dev/null and b/terraform/applications/nginx-waf/packages/.DS_Store differ diff --git a/terraform/applications/nginx-waf/packages/coreruleset-4.7.0-minimal.tar.gz b/terraform/applications/nginx-waf/packages/coreruleset-4.7.0-minimal.tar.gz new file mode 100644 index 00000000..74efd045 Binary files /dev/null and b/terraform/applications/nginx-waf/packages/coreruleset-4.7.0-minimal.tar.gz differ diff --git a/terraform/applications/nginx-waf/packages/libmodsecurity3_3.0.9-1_amd64.deb b/terraform/applications/nginx-waf/packages/libmodsecurity3_3.0.9-1_amd64.deb new file mode 100644 index 00000000..59817f52 Binary files /dev/null and b/terraform/applications/nginx-waf/packages/libmodsecurity3_3.0.9-1_amd64.deb differ diff --git a/terraform/applications/nginx-waf/packages/libmodsecurity3t64_3.0.12-1.1build2_amd64.deb b/terraform/applications/nginx-waf/packages/libmodsecurity3t64_3.0.12-1.1build2_amd64.deb new file mode 100644 index 00000000..b92fd5cc Binary files /dev/null and b/terraform/applications/nginx-waf/packages/libmodsecurity3t64_3.0.12-1.1build2_amd64.deb differ diff --git a/terraform/applications/nginx-waf/public/index.html b/terraform/applications/nginx-waf/public/index.html new file mode 100644 index 00000000..187a9be9 --- /dev/null +++ b/terraform/applications/nginx-waf/public/index.html @@ -0,0 +1 @@ +Welcome to cloud.gov! \ No newline at end of file diff --git a/terraform/applications/nginx-waf/start b/terraform/applications/nginx-waf/start new file mode 100755 index 00000000..ee1c8e83 --- /dev/null +++ b/terraform/applications/nginx-waf/start @@ -0,0 +1,19 @@ +#!/bin/bash + +home="/home/vcap" +app_path="${home}/app" +nginx_path="${home}/deps/1/nginx/sbin" + +echo "Intializing Nginx..." + +## Configure nginx. +${app_path}/init +[ $? -ne 0 ] && exit 1 + +echo "Starting Nginx..." +## Start nginx. +${nginx_path}/nginx -p ${app_path} -c nginx.conf & + +echo "Done!" +## Simple entrypoint to hold the container open. +${app_path}/entrypoint diff --git a/terraform/applications/tf-bastion/apt.yml b/terraform/applications/tf-bastion/apt.yml new file mode 100755 index 00000000..5f6bd5ca --- /dev/null +++ b/terraform/applications/tf-bastion/apt.yml @@ -0,0 +1,6 @@ +--- +packages: + - curl + - gettext + - git + - wget diff --git a/terraform/applications/tf-bastion/exports.sh b/terraform/applications/tf-bastion/exports.sh new file mode 100755 index 00000000..f52afeef --- /dev/null +++ b/terraform/applications/tf-bastion/exports.sh @@ -0,0 +1,26 @@ +#!/bin/bash + +home="/home/vcap" + +#app_path="${home}/app" + +PG_CONN_STR=$(echo "${VCAP_SERVICES}" | jq '."aws-rds"[].credentials.uri') +PGDATABASE=$(echo "${VCAP_SERVICES}" | jq '."aws-rds"[].credentials.db_name') +PGHOST=$(echo "${VCAP_SERVICES}" | jq '."aws-rds"[].credentials.host') +PGPASSWORD=$(echo "${VCAP_SERVICES}" | jq '."aws-rds"[].credentials.password') +PGPORT=$(echo "${VCAP_SERVICES}" | jq '."aws-rds"[].credentials.port') +PGUSER=$(echo "${VCAP_SERVICES}" | jq '."aws-rds"[].credentials.username') + +{ + echo "export PATH=${PATH}:${home}/deps/0/bin" | tee "${home}/exports.sh" + echo "alias terraform=tofu" | tee -a "${home}/exports.sh" + echo "alias tf=tofu" | tee -a "${home}/exports.sh" + + echo "export PG_CONN_STR=${PG_CONN_STR}" | tee -a "${home}/exports.sh" + echo "export PGDATABASE=${PGDATABASE}" | tee -a "${home}/exports.sh" + echo "export PGHOST=${PGHOST}" | tee -a "${home}/exports.sh" + echo "export PGPASSWORD=${PGPASSWORD}" | tee -a "${home}/exports.sh" + echo "export PGPORT=${PGPORT}" | tee -a "${home}/exports.sh" + echo "export PGUSER=${PGUSER}" | tee -a "${home}/exports.sh" +} > /dev/null 2>&1 + diff --git a/terraform/applications/tf-bastion/start b/terraform/applications/tf-bastion/start new file mode 100755 index 00000000..bd3454db --- /dev/null +++ b/terraform/applications/tf-bastion/start @@ -0,0 +1,40 @@ +#!/bin/bash + +home="/home/vcap" + +#app_path="${home}/app" + +echo "Downloading OpenTofu v${OPENTOFU_VERSION}..." +rm -f /home/vcap/deps/0/bin/tofu +wget -q "https://github.com/opentofu/opentofu/releases/download/v${OPENTOFU_VERSION}/tofu_${OPENTOFU_VERSION}_amd64.deb" + +echo "Installing OpenTofu..." +dpkg-deb -R "tofu_${OPENTOFU_VERSION}_amd64.deb" ${home}/deps/0/apt/ +ln -s "${home}/deps/0/apt/usr/bin/tofu" "${home}/deps/0/bin/tofu" +rm -f "tofu_${OPENTOFU_VERSION}_amd64.deb" + +echo "Exporting aliases and environmental variables..." + +PG_CONN_STR=$(echo "${VCAP_SERVICES}" | jq '."aws-rds"[].credentials.uri') +PGDATABASE=$(echo "${VCAP_SERVICES}" | jq '."aws-rds"[].credentials.db_name') +PGHOST=$(echo "${VCAP_SERVICES}" | jq '."aws-rds"[].credentials.host') +PGPASSWORD=$(echo "${VCAP_SERVICES}" | jq '."aws-rds"[].credentials.password') +PGPORT=$(echo "${VCAP_SERVICES}" | jq '."aws-rds"[].credentials.port') +PGUSER=$(echo "${VCAP_SERVICES}" | jq '."aws-rds"[].credentials.username') + +{ + echo "export PATH=${PATH}:${home}/deps/0/bin" | tee "${home}/exports.sh" + echo "alias terraform=tofu" | tee -a "${home}/exports.sh" + echo "alias tf=tofu" | tee -a "${home}/exports.sh" + + echo "export PG_CONN_STR=${PG_CONN_STR}" | tee -a "${home}/exports.sh" + echo "export PGDATABASE=${PGDATABASE}" | tee -a "${home}/exports.sh" + echo "export PGHOST=${PGHOST}" | tee -a "${home}/exports.sh" + echo "export PGPASSWORD=${PGPASSWORD}" | tee -a "${home}/exports.sh" + echo "export PGPORT=${PGPORT}" | tee -a "${home}/exports.sh" + echo "export PGUSER=${PGUSER}" | tee -a "${home}/exports.sh" + echo "source exports.sh" | tee -a "${home}/.bashrc" +} > /dev/null 2>&1 + +echo "Bastion ready!" +while : ; do sleep 500 ; done diff --git a/terraform/bootstrap/.terraform.lock.hcl b/terraform/bootstrap/.terraform.lock.hcl new file mode 100644 index 00000000..d7b0b33e --- /dev/null +++ b/terraform/bootstrap/.terraform.lock.hcl @@ -0,0 +1,138 @@ +# This file is maintained automatically by "tofu init". +# Manual edits may be lost in future updates. + +provider "registry.opentofu.org/cloudfoundry-community/cloudfoundry" { + version = "0.53.1" + constraints = "~> 0.5" + hashes = [ + "h1:o6nGtINonmkgsX810QianzlX+y+aJ7WzYRwAvhQu5qE=", + "zh:017a55cdbd444ccf8fe45a3c7cdbc08ddf4f0f13550fcd457c31df9b2cfdb767", + "zh:100e9bd10868547193134082427abebad9db6359f6139a882192232e8e6911e3", + "zh:34467f6504e8527bd3e18e372d5386a43f2bffd88abf54bb72d51f04ab3e4e23", + "zh:3a278f5f71e39d29c7db999e2a34e8135b79cee4f36510b0f2c2dfec47997cf1", + "zh:3be1fbe17382c91561b1985d372606d802513d94bae6368e1bafd8dd49494737", + "zh:3f12bd7a629d547c706c380d9499ff39eab7b8824a14662aa446f230304bdd3a", + "zh:404acaa9ad7f95e83baf2332be54c065c21053bf304e80ac41ae49719462b184", + "zh:5ac5f6159d1e0c989e739cf16aa8dede6cee3562a6262bf9f2c6b53f4da866fe", + "zh:7a440ee173e69fa153ea4baea47adfca34d7171ffc83e7a1c0ec319d28998cbc", + "zh:87e2200bf66443671e249108d1cfa4aa13a31b9fdf445cec88364db8ea6be623", + "zh:b1b20b2b751df7765225cee5b01290b06e245e50faa8053495c2ef5ebe316998", + "zh:c8ddda9cf7dff40d762ea4dc22941c993ae8e9b2388c8d421f43254a56c98482", + "zh:d6ce83f0077a9f6262ffa1f7d777e2b72feac7ea7c8735aa39a5f86b4f3f7084", + "zh:d74126b9189ab4ca137ca634eaa25c571491bdd2456ccd0f3276a6d49163e412", + "zh:db5d415346e03eac0c5e025f9c10afdebfff35487e8a8383b3c4cd867c422fe2", + ] +} + +provider "registry.opentofu.org/hashicorp/archive" { + version = "2.6.0" + hashes = [ + "h1:s1OObC0b95ceQkrAMqL4q6wMYDBWYt8swZbLup+UJXI=", + "zh:046b3ba4223002d1cd1c917e8c21b58a636fcd751073745e3db99beebe254dd8", + "zh:1c1ed2ea0927b491689c3c7d178880cd9902f2a5339da8f46c56279920329a27", + "zh:1f17b47ba1bf18bd7bd30ea35c2ba32eaa23f8d08b3a35126edb31daf6ae10fd", + "zh:4b58aaac88335bb2ca482766e2682514fed78ff8cabe5665b6e5dd7c22ff9c81", + "zh:6c7dd6d4ff061d350fc6eb76866905c47450b8b8c1d2e238aa737afd48b6a267", + "zh:7b376916c5b911a3f887fd296c25ced36d8ba742b8482f1e0f092bf8fb008146", + "zh:8661139125b1ea7b89e0084377863dc820cdcbc433bb9a7c445350480f83b2c2", + "zh:e17c9056f210ec9a8c9cfe8a13ecd09ae59ad0a0197c96589b86eb4f7cf5326d", + "zh:ee15bddc7a596cccd400a762b6dadf1c8889faff7c931ae4b39f2e5404188da1", + "zh:f74355e6588daf88ec210d2967fbf5d22fa18c448d2807b8a7049dc777a2dbcb", + ] +} + +provider "registry.opentofu.org/hashicorp/github" { + version = "6.3.1" + hashes = [ + "h1:vKWgfpIrSNic7pYVi4LKIDQ2MgUpHq7uSj8nA8xfrw0=", + "zh:25ae1cb97ec528e6b7e9330489f4a33acc0fa80b909c113a8445656bc524c5b9", + "zh:3e1f6300dc10e52a54f13352770ed79f25ff4ba9ac49b776c52a655a3488a20b", + "zh:4aaf2877ec22e63358d7c9cd48c7d7947d1a1dc4d03231f0af193d8975d5918a", + "zh:4b904a81fac12a2a7606c8d811cb9c4e13581adcaaa19e503a067ac95c515925", + "zh:54fe7e0dca04e698631a5b86bdd43ef09a31375e68f8f89970b4315cd5fc6312", + "zh:6b14f92cf62784eaf20f43ef58ce966735f30d43deeab077943bd410c0d8b8b2", + "zh:86c49a1c11c024b26b6750c446f104922a3fe8464d3706a5fb9a4a05c6ca0b0a", + "zh:8939fb6332c4a58c4e90245eb9f0110987ccafff06b45a7ed513f2759a2abe6a", + "zh:8b4068a78c1f357325d1151facdb1aff506b9cd79d2bab21a55651255a130e2f", + "zh:ae22f5e52f534f19811d7f9480b4eb442f12ff16367b3893abb4e449b029ff6b", + "zh:afae9cfd9d49002ddfea552aa4844074b9974bd56ff2c2458f2297fe0df56a5b", + "zh:bc7a434408eb16a4fbceec0bd86b108a491408b727071402ad572cdb1afa2eb7", + "zh:c8e4728ea2d2c6e3d2c1bc5e7d92ed1121c02bab687702ec2748e3a6a0844150", + "zh:f6314b2cff0c0a07a216501cda51b35e6a4c66a2418c7c9966ccfe701e01b6b0", + "zh:fbd1fee2c9df3aa19cf8851ce134dea6e45ea01cb85695c1726670c285797e25", + ] +} + +provider "registry.opentofu.org/hashicorp/local" { + version = "2.5.2" + hashes = [ + "h1:MBgBjJljfDl1i2JPcIoH4hW+2XLJ+D1l12iH/xd3uTo=", + "zh:25b95b76ceaa62b5c95f6de2fa6e6242edbf51e7fc6c057b7f7101aa4081f64f", + "zh:3c974fdf6b42ca6f93309cf50951f345bfc5726ec6013b8832bcd3be0eb3429e", + "zh:5de843bf6d903f5cca97ce1061e2e06b6441985c68d013eabd738a9e4b828278", + "zh:86beead37c7b4f149a54d2ae633c99ff92159c748acea93ff0f3603d6b4c9f4f", + "zh:8e52e81d3dc50c3f79305d257da7fde7af634fed65e6ab5b8e214166784a720e", + "zh:9882f444c087c69559873b2d72eec406a40ede21acb5ac334d6563bf3a2387df", + "zh:a4484193d110da4a06c7bffc44cc6b61d3b5e881cd51df2a83fdda1a36ea25d2", + "zh:a53342426d173e29d8ee3106cb68abecdf4be301a3f6589e4e8d42015befa7da", + "zh:d25ef2aef6a9004363fc6db80305d30673fc1f7dd0b980d41d863b12dacd382a", + "zh:fa2d522fb323e2121f65b79709fd596514b293d816a1d969af8f72d108888e4c", + ] +} + +provider "registry.opentofu.org/hashicorp/random" { + version = "3.6.3" + hashes = [ + "h1:32/UZofQoXk8zPj9vpIDiSEmERA3Mx2VPvk1lHTTHvw=", + "zh:1bfd2e54b4eee8c761a40b6d99d45880b3a71abc18a9a7a5319204da9c8363b2", + "zh:21a15ac74adb8ba499aab989a4248321b51946e5431219b56fc827e565776714", + "zh:221acfac3f7a5bcd6cb49f79a1fca99da7679bde01017334bad1f951a12d85ba", + "zh:3026fcdc0c1258e32ab519df878579160b1050b141d6f7883b39438244e08954", + "zh:50d07a7066ea46873b289548000229556908c3be746059969ab0d694e053ee4c", + "zh:54280cdac041f2c2986a585f62e102bc59ef412cad5f4ebf7387c2b3a357f6c0", + "zh:632adf40f1f63b0c5707182853c10ae23124c00869ffff05f310aef2ed26fcf3", + "zh:b8c2876cce9a38501d14880a47e59a5182ee98732ad7e576e9a9ce686a46d8f5", + "zh:f27e6995e1e9fe3914a2654791fc8d67cdce44f17bf06e614ead7dfd2b13d3ae", + "zh:f423f2b7e5c814799ad7580b5c8ae23359d8d342264902f821c357ff2b3c6d3d", + ] +} + +provider "registry.opentofu.org/hashicorp/time" { + version = "0.12.1" + hashes = [ + "h1:D4eN1hzoSjOkkBg1dD13M5bzppQWosH/tkqYkeKjQks=", + "zh:50a9b67d5f5f42adbdb7712f67858aa64b5670070f6710751239b535fb48a4df", + "zh:5a846fae035e363aed75b966d64a56f3489a38083e8407aaa656730437f53ed7", + "zh:6767f1fc8a679b48eaa4cd114da0d8185fb3546375f3a0fb3728f10fa3dbc551", + "zh:85d3da407c828bf057cbc0e86c75ef3d0f9f74a73c4ea1b4aef18e33f41092b1", + "zh:9180721325139431112c638f5382a740ff219782f81d6346cdff5bccc418a43f", + "zh:9ba9989f905a64db1409a9a57649549c89c7aedfb55ae399a7fa9411aafaadac", + "zh:b3d9e7afb6a742e9be0541bc434b00d849fdfab0b4b859ceb0296c26c541af15", + "zh:c87da712d718acd9dd03f544b020c320699cb29df197be4f74783e3c3d80fc17", + "zh:cb1abe07638ef6d7b41d0e86dfb12d60a513aca3395a5da7191947f7459821dd", + "zh:ecff2e823ef49eda03663fa8ee8bdc17d27cd419dbdacbf1719f38812dbf417e", + ] +} + +provider "registry.opentofu.org/integrations/github" { + version = "6.3.1" + constraints = "~> 6.0" + hashes = [ + "h1:vKWgfpIrSNic7pYVi4LKIDQ2MgUpHq7uSj8nA8xfrw0=", + "zh:25ae1cb97ec528e6b7e9330489f4a33acc0fa80b909c113a8445656bc524c5b9", + "zh:3e1f6300dc10e52a54f13352770ed79f25ff4ba9ac49b776c52a655a3488a20b", + "zh:4aaf2877ec22e63358d7c9cd48c7d7947d1a1dc4d03231f0af193d8975d5918a", + "zh:4b904a81fac12a2a7606c8d811cb9c4e13581adcaaa19e503a067ac95c515925", + "zh:54fe7e0dca04e698631a5b86bdd43ef09a31375e68f8f89970b4315cd5fc6312", + "zh:6b14f92cf62784eaf20f43ef58ce966735f30d43deeab077943bd410c0d8b8b2", + "zh:86c49a1c11c024b26b6750c446f104922a3fe8464d3706a5fb9a4a05c6ca0b0a", + "zh:8939fb6332c4a58c4e90245eb9f0110987ccafff06b45a7ed513f2759a2abe6a", + "zh:8b4068a78c1f357325d1151facdb1aff506b9cd79d2bab21a55651255a130e2f", + "zh:ae22f5e52f534f19811d7f9480b4eb442f12ff16367b3893abb4e449b029ff6b", + "zh:afae9cfd9d49002ddfea552aa4844074b9974bd56ff2c2458f2297fe0df56a5b", + "zh:bc7a434408eb16a4fbceec0bd86b108a491408b727071402ad572cdb1afa2eb7", + "zh:c8e4728ea2d2c6e3d2c1bc5e7d92ed1121c02bab687702ec2748e3a6a0844150", + "zh:f6314b2cff0c0a07a216501cda51b35e6a4c66a2418c7c9966ccfe701e01b6b0", + "zh:fbd1fee2c9df3aa19cf8851ce134dea6e45ea01cb85695c1726670c285797e25", + ] +} diff --git a/terraform/bootstrap/README.md b/terraform/bootstrap/README.md new file mode 100644 index 00000000..c309a92d --- /dev/null +++ b/terraform/bootstrap/README.md @@ -0,0 +1,52 @@ + +## Requirements + +| Name | Version | +|------|---------| +| [terraform](#requirement\_terraform) | > 1.7 | +| [cloudfoundry](#requirement\_cloudfoundry) | ~> 0.5 | + +## Providers + +| Name | Version | +|------|---------| +| [cloudfoundry](#provider\_cloudfoundry) | 0.53.1 | + +## Modules + +| Name | Source | Version | +|------|--------|---------| +| [applications](#module\_applications) | ../modules/application | n/a | +| [github](#module\_github) | ../modules/github | n/a | +| [random](#module\_random) | ../modules/random | n/a | +| [services](#module\_services) | ../modules/service | n/a | + +## Resources + +| Name | Type | +|------|------| +| [cloudfoundry_app.external_applications](https://registry.terraform.io/providers/cloudfoundry-community/cloudfoundry/latest/docs/data-sources/app) | data source | +| [cloudfoundry_domain.external](https://registry.terraform.io/providers/cloudfoundry-community/cloudfoundry/latest/docs/data-sources/domain) | data source | +| [cloudfoundry_domain.internal](https://registry.terraform.io/providers/cloudfoundry-community/cloudfoundry/latest/docs/data-sources/domain) | data source | +| [cloudfoundry_org.this](https://registry.terraform.io/providers/cloudfoundry-community/cloudfoundry/latest/docs/data-sources/org) | data source | +| [cloudfoundry_service.this](https://registry.terraform.io/providers/cloudfoundry-community/cloudfoundry/latest/docs/data-sources/service) | data source | +| [cloudfoundry_space.this](https://registry.terraform.io/providers/cloudfoundry-community/cloudfoundry/latest/docs/data-sources/space) | data source | + +## Inputs + +| Name | Description | Type | Default | Required | +|------|-------------|------|---------|:--------:| +| [cloudgov\_organization](#input\_cloudgov\_organization) | The organization for the cloud.gov account. | `string` | n/a | yes | +| [cloudgov\_password](#input\_cloudgov\_password) | The password for the cloud.gov account. | `string` | n/a | yes | +| [cloudgov\_space](#input\_cloudgov\_space) | The organization for the cloud.gov account. | `string` | n/a | yes | +| [cloudgov\_username](#input\_cloudgov\_username) | The username for the cloudfoundry account. | `string` | n/a | yes | +| [github\_organization](#input\_github\_organization) | The organization to use with GitHub. | `string` | `"GSA"` | no | +| [github\_token](#input\_github\_token) | The token used authenticate with GitHub. | `string` | n/a | yes | +| [mtls\_port](#input\_mtls\_port) | The default port to direct traffic to. Envoy proxy listens on 61443 and redirects to 8080, which the application should listen on. | `number` | `61443` | no | + +## Outputs + +| Name | Description | +|------|-------------| +| [name](#output\_name) | n/a | + \ No newline at end of file diff --git a/terraform/bootstrap/data.tf b/terraform/bootstrap/data.tf new file mode 100755 index 00000000..6dae4d0a --- /dev/null +++ b/terraform/bootstrap/data.tf @@ -0,0 +1,51 @@ +locals { + cloudfoundry = { + external_applications = try(data.cloudfoundry_app.external_applications, null) + domain_external = try(data.cloudfoundry_domain.external, null) + domain_internal = try(data.cloudfoundry_domain.internal, null) + organization = try(data.cloudfoundry_org.this, null) + services = try(data.cloudfoundry_service.this, null) + space = try(data.cloudfoundry_space.this, null) + } +} + +data "cloudfoundry_app" "external_applications" { + for_each = { + for key, value in try(local.env.external_applications, []) : value.name => value + if try(value.deployed, false) && + try(data.cloudfoundry_space.this.id, null) != null + } + name_or_id = format(local.env.name_pattern, each.key) + space = try(data.cloudfoundry_space.this.id, null) +} + +data "cloudfoundry_domain" "external" { + //domain = "${split(".", local.env.external_domain)[1]}.${split(".", local.env.external_domain)[2]}" + domain = join(",", slice(split(".", local.env.external_domain), 0, 0)) + sub_domain = split(".", local.env.external_domain)[0] +} + +data "cloudfoundry_domain" "internal" { + domain = join(",", slice(split(".", local.env.external_domain), 0, 0)) + sub_domain = split(".", local.env.internal_domain)[0] +} + +data "cloudfoundry_org" "this" { + name = local.env.organization +} + +data "cloudfoundry_space" "this" { + name = try(format(local.space_pattern, local.env.space), terraform.workspace) + org = data.cloudfoundry_org.this.id +} + + +data "cloudfoundry_service" "this" { + for_each = { + for key, value in try(local.env.services, {}) : key => value + if value.service_type != "user-provided" && try(data.cloudfoundry_space.this.id, null) != null + } + + name = each.value.service_type + space = try(data.cloudfoundry_space.this.id, null) +} \ No newline at end of file diff --git a/terraform/bootstrap/locals.tf b/terraform/bootstrap/locals.tf new file mode 100644 index 00000000..7e642e56 --- /dev/null +++ b/terraform/bootstrap/locals.tf @@ -0,0 +1,273 @@ +locals { + + ## The name of the project. Used to name most applications and services. + ## Default naming convention: ${local.project}-application-name-${terraform.workspace} + project = "digital-gov" + + ## The full name of the project. If their isn't a longer name, this can be set to + ## local.project. + project_full = "${local.project}" + + production_space = "prod" + + repository = "GSA/digital-gov-drupal" + + space_pattern = "%s" + +## The various environment settings to be deployed. + envs = { + + ## Every environment gets settings in 'all'. + all = { + + ## The API URL for cloud.gov. + api_url = "https://api.fr.cloud.gov" + + ## These values are defaults values when options aren't configured in the application block. + defaults = { + + ## The default size of the containers ephemeral disk. + disk_quota = 2048 + + ## Is SSH enabled on the container by default? + enable_ssh = true + + ## The default health check timeout. + health_check_timeout = 60 + + ## Default method of performing a health check. + ## Valid options: "port", "process", or "http" + ## https://docs.cloudfoundry.org/devguide/deploy-apps/healthchecks.html + health_check_type = "port" + + ## Default number of application instances to deploy. + instances = 1 + + ## Default amount of memory to use memory to use for an application. + memory = 64 + + port = 8080 + + ## The default cloudfoundry stack to deploy. + ## https://docs.cloudfoundry.org/devguide/deploy-apps/stacks.html + stack = "cflinuxfs4" + + ## Is the application stopped by default? + stopped = false + + ## Default CloudFoundry deployment strategy. + ## Valid optons: "none", "standard", or "blue-green". + ## https://docs.cloudfoundry.org/devguide/deploy-apps/rolling-deploy.html + strategy = "none" + + ## Default wait time for an application to start. + timeout = 300 + } + + ## Configuration settings for the egress proxy application. + # egress = local.egress + + ## External application based on the Terraform workspace being used. + external_applications = {} + + ## The domain name for applications accessable external of cloud.gov. + external_domain = "app.cloud.gov" + + ## The domain name for applications accessable inside of cloud.gov. + internal_domain = "apps.internal" + + ## The naming convention/pattern for deployed systems and subsystems. + ## %s is replaced with the name of the system. + name_pattern = "${local.project}-%s-${terraform.workspace}" + + ## The name of the cloud.gov organization. + organization = var.cloudgov_organization + + ## Passwords that are generated for workspaces. By default, it's an empty map. + ## If one is defined below in a workspace's settings, it will supersed this one. + passwords = { + # test = {length = 32} + } + + ## A copy of the project name, so it gets added to this setting object. + project = local.project + + ## The name of the current Cloud.gov space. + space = "${terraform.workspace}" + } + + ## + ## + ## The bootstrap workspace. + ## Used to initialize gobal/project wide applications/services. + ## + ## + + bootstrap = { + secrets = { + PGDATABASE = { + encrypted = false + key = "db_name" + } + PGHOST = { + encrypted = false + key = "host" + } + PGPASSWORD = { + encrypted = false + key = "password" + } + PGPORT = { + encrypted = false + key = "port" + } + PG_CONN_STR = { + encrypted = false + key = "uri" + } + PGUSER = { + encrypted = false + key = "pg_user" + } + CF_USER = { + encrypted = false + key = "cf_user" + value = var.cloudgov_username + } + CF_PASSWORD = { + encrypted = false + key = "cf_password" + value = var.cloudgov_password + } + CF_ORG = { + encrypted = false + key = "cf_org" + value = var.cloudgov_organization + } + PROJECT = { + encrypted = false + key = "project" + value = local.project + } + TF_BACKEND_SPACE = { + encrypted = false + key = "tf_backend_space" + value = local.production_space + } + TF_BASTION = { + encrypted = false + key = "tf_bastion" + value = "${local.project}-tf-bastion-bootstrap" + } + } + + services = { + terraform-backend = { + ## Applications to bind to this service. + applications = [ "tf-bastion" ] + + ## The size of the instance to deploy. + service_plan = "micro-psql" + + ## The type of service to be deployed. + service_type = "aws-rds" + + ## Tags to add to the service. + tags = [ + terraform.workspace + ] + } + } + space = "prod" + + variables = { + "UBUNTU_VERSION" = { + key = "UBUNTU_VERSION" + value = "jammy" + } + "MODSECURITY_NGINX_VERSION" = { + key = "MODSECURITY_NGINX_VERSION" + value = "1.0.3" + } + } + } + + "${local.production_space}" = { + apps = { + tf-bastion = { + + ## Should the application have access to the internet? + allow_egress = true + + ## Buildpacks to use with this application. + ## List buildpacks avalible with: cf buildpacks + buildpacks = [ + "https://github.com/cloudfoundry/apt-buildpack", + "binary_buildpack" + ] + + ## Command to run when container starts. + command = "./start" + + ## Ephemeral disk storage. + disk_quota = 1024 + + ## Should SSH be enabled? + enable_ssh = true + + ## Environmental variables. Avoid sensitive variables. + environment = { + CF_ORG = var.cloudgov_organization + CF_PASSWORD = var.cloudgov_password + CF_SPACE = var.cloudgov_space + CF_USER = var.cloudgov_username + OPENTOFU_VERSION = "1.8.4" + } + + ## Timeout for health checks, in seconds. + health_check_timeout = 180 + + ## Type of health check. + ## Options: port, process, http + health_check_type = "process" + + ## Number of instances of application to deploy. + instances = 1 + + ## Labels to add to the application. + labels = { + environment = "prod" + } + + ## Maximum amount of memory the application can use. + memory = 512 + + ## Addional network policies to add to the application. + ## Format: name of the application and the port it is listening on. + network_policies = {} + + ## Port the application uses. + #port = 0 + + ## Can the application be accessed outside of cloud.gov? + public_route = false + + ## The source file should be a directory or a zip file. + source = "../applications/tf-bastion" + + space = local.production_space + + ## Templates take templated files and fill them in with sensitive data. + templates = [] + } + } + } + } + + ## Map of the 'all' environement and the current workspace settings. + env = merge(try(local.envs.all, {}), try(local.envs.bootstrap, {})) +} + +output "name" { + value = local.env.passwords +} \ No newline at end of file diff --git a/terraform/bootstrap/main.tf b/terraform/bootstrap/main.tf new file mode 100755 index 00000000..0a81ee40 --- /dev/null +++ b/terraform/bootstrap/main.tf @@ -0,0 +1,86 @@ +locals { + ## Merging of the various credentials and environmental variables. + service_secrets = merge( + flatten( + [ + for service_key, service_value in try(local.env.services, {}) : [ + for key, value in try(module.services.results.service_key[service_key].credentials, {}) : { + "${key}" = nonsensitive(value) + } + ] if try(module.services.results.service_key[service_key].credentials, null) != null + ] + ) + ...) + + local_secrets = merge( + flatten( + [ + for key, value in try(local.env.secrets, {}) : { + "${value.key}" = nonsensitive(value.value) + } if can(value.value) + ] + ) + ...) + + secrets = merge(local.service_secrets, local.local_secrets) + + variables = merge( + flatten( + [ + for key, value in try(local.env.variables, {}) : { + "${value.key}" = nonsensitive(value.value) + } if can(value.value) + ] + ) + ...) +} + +output "secrets" { + value = nonsensitive(local.secrets) +} + +module "random" { + source = "../modules/random" + names = [ "dev" ] + passwords = local.env.passwords +} + +## The instanced services (i.e. RDS, S3, etc.) get created first. +## This allows their credentials to be injected into "user-provided" services (JSON blobs), if needed. +module "services" { + source = "../modules/service" + + cloudfoundry = local.cloudfoundry + env = local.env + skip_user_provided_services = true +} + +# module "secrets" { +# source = "../modules/service" + +# cloudfoundry = local.cloudfoundry +# env = local.env +# skip_service_instances = true +# secrets = local.secrets +# } + +module "applications" { + #for_each = local.cloudfoundry.spaces + source = "../modules/application" + + cloudfoundry = local.cloudfoundry + env = merge(local.envs.all, local.envs.bootstrap, local.envs[local.production_space]) + secrets = local.secrets + services = module.services.results +} + +module "github" { + source = "../modules/github" + + env = local.env + github_organization = var.github_organization + github_token = var.github_token + repository = local.repository + secrets = local.secrets + variables = local.variables +} diff --git a/terraform/bootstrap/provider.tf b/terraform/bootstrap/provider.tf new file mode 100644 index 00000000..e27de2fa --- /dev/null +++ b/terraform/bootstrap/provider.tf @@ -0,0 +1,21 @@ +terraform { + required_providers { + cloudfoundry = { + source = "cloudfoundry-community/cloudfoundry" + version = "~> 0.5" + } + } + required_version = "> 1.7" +} + +provider "cloudfoundry" { + api_url = local.env.api_url + user = var.cloudgov_username + password = var.cloudgov_password +} + +# Configure the GitHub Provider +provider "github" { + owner = var.github_organization + token = var.github_token +} \ No newline at end of file diff --git a/terraform/bootstrap/variables.tf b/terraform/bootstrap/variables.tf new file mode 100644 index 00000000..69420fc9 --- /dev/null +++ b/terraform/bootstrap/variables.tf @@ -0,0 +1,40 @@ +variable "cloudgov_username" { + description = "The username for the cloudfoundry account." + type = string + sensitive = true +} + +variable "cloudgov_password" { + description = "The password for the cloud.gov account." + type = string + sensitive = true +} + +variable "cloudgov_organization" { + description = "The organization for the cloud.gov account." + type = string + sensitive = true +} + +variable "cloudgov_space" { + description = "The organization for the cloud.gov account." + type = string + sensitive = true +} + +variable "github_organization" { + description = "The organization to use with GitHub." + type = string + default = "GSA" +} +variable "github_token" { + description = "The token used authenticate with GitHub." + type = string + sensitive = true +} + +variable "mtls_port" { + description = "The default port to direct traffic to. Envoy proxy listens on 61443 and redirects to 8080, which the application should listen on." + type = number + default = 61443 +} \ No newline at end of file diff --git a/terraform/docs/locals.tf.MD b/terraform/docs/locals.tf.MD new file mode 100644 index 00000000..026e5086 --- /dev/null +++ b/terraform/docs/locals.tf.MD @@ -0,0 +1,35 @@ +# locals.tf + +This is a high level overview of the `locals.tf` file. The locals.tf file itself is heavily commented and will go into detail about individual settings if further information is required. + +The locals.tf is the main file that needs to be edited to configure your infrastructure. + +### Global variables + +#### project + +This variable holds the prefix of your resource names. For example, this project uses `benefit-finder` as a prefix for service names. + +#### project_full + +This variable is a longer, alternative name used in the project. For example, CircleCI calls this project `benefit-finder-gov`. + +#### bootstrap_workspace + +The name of the `bootstrap` workspace in Terraform. By default, it's `bootstrap`. + +#### global + +An object that sets commonly used applications and services (i.e. the WAF and the database), making configuration easier. + +#### egress + +Settings for the egress proxy that is deployed to the DMZ space. + +#### external_applications + +Settings for applications that aren't managed by Terraform. This is used to save pipeline variables to dynamically configure the other application. + +#### envs + +Settings for the majority of the deployment, that is then merged into a single `object`. The sub-object, `all` are configurations for every environment. The other sub-objects should be the name of your Terraform workspaces. \ No newline at end of file diff --git a/terraform/docs/scripts.MD b/terraform/docs/scripts.MD new file mode 100644 index 00000000..49904319 --- /dev/null +++ b/terraform/docs/scripts.MD @@ -0,0 +1,75 @@ +# Cloud.gov Scripts + +These are scripts that are located in the `scripts` directory. + +## cloudgov-aws-creds.sh + +This script will export credentials to `AWS_ACCESS_KEY_ID`, `AWS_BUCKET`, `AWS_DEFAULT_REGION`, and `AWS_SECRET_ACCESS_KEY`. The export below, `bucket_name` is different than `AWS_BUCKET`, as `bucket_name` is the name of the Cloud.gov service, while `AWS_BUCKET` is the name of the bucket in AWS. + +After exporting the credentials, running `aws s3 ls s3://${AWS_BUCKET}/` should list the files in the bucket. + +After using the script, running the script again will delete the credentials, cleaning them up. + +- `deploy_space`: the space where you would like the account to be provisioned at. +- `bucket_name`: the name of the bucket to generate credentials for. + +``` +export deploy_space="space_name_prod" +export bucket_name="bucket_name" +source ./cloudgov-aws-creds.sh +``` + +## cloud-gov-create-service-account.sh + +This creates pipeline service account credentials for your spaces. If credentials need to be regenerated or rotated, be sure to `tf apply` to the Terraform `bootstrap` environment to update the CircleCI variables. + +- `deploy_space`: the space where you would like the account to be provisioned at. +- `org`: the name of the Cloud.gov organization your account is under. +- `prefix`: A name that can be used as a resource prefix for every resource. It is optional. +- `spaces`: A space separated string with all the spaces the service account should have access to. + +``` +export deploy_space="space_name_prod" +export org="org_name" +export prefix="name_prefix" +export spaces="space_name_dev space_name_stage space_name_prod" +bash init.sh +``` + +## egress-network-policy.sh + +This script allows public internet access from the provided `deploy_space` variable. + +***NOTE: This should only need to be ran once, during project setup.*** + +- `deploy_space`: the space where you would like the account to be provisioned at. +- `org`: the name of the Cloud.gov organization your account is under. + +``` +export deploy_space="space_name_dmz" +export org="org_name" +bash egress-network-policy.sh +``` + +## init.sh + +The `init.sh` script is located in the scripts directory of this repository. This script creates the S3 buckets for the Terraform backend and backups. + +After creating the S3 Buckets, the script will also execute `cloud-gov-create-service-account.sh`. This will create a service account that is used to deploy infrastructure from the pipeline. + +***NOTE: This should only need to be ran once, during project setup.*** + +Before running this script, make sure to login to Cloud.gov with `cf login -a api.fr.cloud.gov --sso`. + +- `deploy_space`: the space where you would like the account to be provisioned at. +- `org`: the name of the Cloud.gov organization your account is under. +- `prefix`: A name that can be used as a resource prefix for every resource. It is optional. +- `spaces`: A space separated string with all the spaces the service account should have access to. + +``` +export deploy_space="space_name_prod" +export org="org_name" +export prefix="name_prefix" +export spaces="space_name_dev space_name_stage space_name_prod" +bash init.sh +``` \ No newline at end of file diff --git a/terraform/infra/.terraform-docs.yaml b/terraform/infra/.terraform-docs.yaml new file mode 100755 index 00000000..b480122f --- /dev/null +++ b/terraform/infra/.terraform-docs.yaml @@ -0,0 +1,2 @@ +header-from: .terraform-docs/header.md +footer-from: .terraform-docs/footer.md diff --git a/terraform/infra/.terraform-docs/footer.md b/terraform/infra/.terraform-docs/footer.md new file mode 100755 index 00000000..5c6a9055 --- /dev/null +++ b/terraform/infra/.terraform-docs/footer.md @@ -0,0 +1,76 @@ +### locals.tf Overview + +This is a high level overview of the `locals.tf` file. The locals.tf file itself is heavily commented and will go into detail about individual settings if further information is required. + +The locals.tf is the main file that needs to be edited to configure your infrastructure. + +#### Global variables + +##### project + +This variable holds the prefix of your resource names. For example, this project uses `benefit-finder` as a prefix for service names. + +##### project_full + +This variable is a longer, alternative name used in the project. For example, CircleCI calls this project `benefit-finder-gov`. + +##### bootstrap_workspace + +The name of the `bootstrap` workspace in Terraform. By default, it's `bootstrap`. + +##### global + +An object that sets commonly used applications and services (i.e. the WAF and the database), making configuration easier. + +##### egress + +Settings for the egress proxy that is deployed to the DMZ space. + +##### external_applications + +Settings for applications that aren't managed by Terraform. This is used to save pipeline variables to dynamically configure the other application. + +##### envs + +Settings for the majority of the deployment, that is then merged into a single `object`. The sub-object, `all` are configurations for every environment. The other sub-objects should be the name of your Terraform workspaces. + +### local.env.apps +This is a `map` of `objects`. + +| Name | Description | Type | Default | Required | +|------|-------------|------|---------|:--------:| +| buildpack | The admin buildpack name or Git URL | `string` | `null` | no | +| buildpacks | A list of buildpack names and/or Git URLs | `list(string)` | `null` | no | +| command | A custom start command for the application. A custom start command for the application. | `string` | `null` | no | +| disk_quota | The size of the buildpack's ephemeral disk in megabytes. | `number` | `1024` | no | +| docker_credentials | A custom start command for the application. | `map` | `null` | no | +| docker_image | The URL to the docker image with tag. | `string` | `null` | no | +| enable_ssh | Whether to enable or disable SSH access to the container. | `bool` | `true` | no | +| environment | Key/value pairs of custom environment variables to set in your app. | `map` | `null` | no | +| health_check_http_endpoint | The endpoint for the http health check type. | `string` | `"/"` | no | +| health_check_invocation_timeout | The timeout in seconds for individual health check requests for "http" and "port" health checks. | `number` | `5` | no | +| health_check_timeout | The timeout in seconds for the health check. | `number` | `180` | no | +| health_check_type | The timeout in seconds for individual health check requests for "http" and "port" health checks. | `string` | `"port"` | no | +| instances | The number of app instances that you want to start. | `number` | `1` | no | +| labels | Adds labels to the application. | `map` | `null` | no | +| memory | The memory limit for each application instance in megabytes. | `number` | `64` | no | +| name | The name of the application. | `string` | n/a | yes | +| path | An URI or path to target a zip file. If the path is a directory, the module will create a zip file. | `string` | n/a | yes | +| space | The GUID of the associated Cloud Foundry space. | `string` | n/a | yes | +| stack | The name of the stack the application will be deployed to. `cf stacks` will list valid options. | `string` | `"cflinuxfs4"` | no | +| stopped | Defines the desired application state. Set to true to have the application remain in a stopped state. | `bool` | `false` | no | +| strategy | Strategy ("none", "blue-green", or "rolling") to use for creating/updating application. | `string` | `"none"` | no | +| timeout | Max wait time for app instance startup, in seconds. | `number` | `60` | no | + +### local.env.services +This is a `map` of `objects`. + +| Name | Description | Type | Default | Required | +|------|-------------|------|---------|:--------:| +| name | The name of the service instance. | `string` | n/a | yes | +| json_params | A json string of arbitrary parameters. | `string` | `null` | no | +| replace_on_params_change | Cloud Foundry will replace the resource on any params change. | `bool` | `false` | no | +| replace_on_service_plan_change | Cloud Foundry will replace the resource on any service plan changes | `bool` | `false` | no | +| space | The ID of the space. | `string` | n/a | yes | +| service_plan | The ID of the service plan. | `string` | n/a | yes | +| tags | List of instance tags. | `list(string)` | `[]` | no | \ No newline at end of file diff --git a/terraform/infra/.terraform-docs/header.md b/terraform/infra/.terraform-docs/header.md new file mode 100755 index 00000000..1575d6ef --- /dev/null +++ b/terraform/infra/.terraform-docs/header.md @@ -0,0 +1 @@ +# Cloud.gov Drupal Infrastructure diff --git a/terraform/infra/.terraform.lock.hcl b/terraform/infra/.terraform.lock.hcl new file mode 100644 index 00000000..913e3ce1 --- /dev/null +++ b/terraform/infra/.terraform.lock.hcl @@ -0,0 +1,93 @@ +# This file is maintained automatically by "tofu init". +# Manual edits may be lost in future updates. + +provider "registry.opentofu.org/cloudfoundry-community/cloudfoundry" { + version = "0.51.2" + constraints = "~> 0.5, 0.51.2" + hashes = [ + "h1:DAAWn0QmE75d6agoavWvchV6Ec5yOsxprPMMU7Q+xfM=", + "zh:2c15c7fbc8f15f6c21935d21c1eb8bab3e1454aec3476bc6fcda2d59bbd235a5", + "zh:3efe88cd4c40f1e90d71ceb94088d3ec2260ff01e4a4d722182c042b958c61f0", + "zh:41ea39daf091516f08cf6a5bc1efb88f19ac17ab8c146bc503d5a44c3c0fdd5a", + "zh:5287f2aade8821211426c8eed0a9dacaf41ab0be5a39ecf1526be2f5b71ab5a2", + "zh:7438d2dca479ace7720125c02c24660d4e928ee8b0ebd1514e2841b95d3563ef", + "zh:7583edf26c3160c4271c5ea473e799bca1f65da249d2e2e96dca69f2dce82c40", + "zh:8003fd57163a259d8005c7efa79d1d4d1cd3d98c26f82c58fa84f1e27c0f50d1", + "zh:8a5c05e59f4078193db1ceabd5350863cea869791e8ee5765472c0f36579cdcd", + "zh:8c0ccf62206c242b116ade68fee48c425b897c53f3da40d233c9c9a4a5fb514f", + "zh:9cc6ba428f1cd8c9a2d9bb3333dd16944cafcd2d2ca5af6fc040e4207cef4ea6", + "zh:a0fd393db027f03bde2b0056bfb04ce54827394eb31498b20e9beb3cb8e198b3", + "zh:a3d5cce15f8f611494c510f3a2dde2bd2841ffb2351541ed7f3177ea17f47a35", + "zh:bcf6edbebeabb36bf9254a2b6cdd3ea5e2f7d26836bd1392e6867a901d34c1ee", + "zh:e5a909abf388aa15af09ea1c9b6ec6fea5dd27ba4cefdf247b3afa90bb97cd3c", + "zh:fc2a42a8dbc41e216f63359087bfdaa02fe98717f4b793436e356013097c907a", + ] +} + +provider "registry.opentofu.org/hashicorp/archive" { + version = "2.4.2" + hashes = [ + "h1:tZcueUOGqjDRRzW9b6BMwV++XRqABodQjgC/K3bRoXM=", + "zh:0fee4f61bc999b5174a1268295e04c91c3f6be0160022cb53943b6ec0a3f1055", + "zh:10a895ee751beec68727d3dc6bf8e670f499618bb4b02649544be2c73e89603e", + "zh:1118373dfc03cf524273573e3aff9c99e0bb7128ab3ce0be211fd30e3928dfb8", + "zh:19c1b4c785f1d864e4fcaec7d96045437494efc333f1e661ea9994cd5c969cdb", + "zh:23f0aa399394ce8aa918a6a16ca9f5451d9d5b021e1b08929eb7972f65cb27da", + "zh:27d5daeec1819019a4b94c4980c09626e9cf71de3f54128a621fddb1b94b9ece", + "zh:56244088a96ff9e3a04b23de0ce2fcfa92c1a5fe6c91c6357cceda4d6d441c17", + "zh:578fcb23e8ebde3c5be6c5c67377b5e0c404cc807a74d7087e70c8fb3bb59b92", + "zh:9709d108559da5066f24a6d28be661b65a02e908f89b91fe42fc493962a5f466", + "zh:ff2a6df5d22bda78ca284756801ba7c86504e4bf0b48b31c8f5af44eefd9d0e8", + ] +} + +provider "registry.opentofu.org/hashicorp/local" { + version = "2.5.1" + hashes = [ + "h1:87L+rpGao062xifb1VuG9YVFwp9vbDP6G2fgfYxUkQs=", + "zh:031c2c2070672b7e78e0aa15560839278dc57fe7cf1e58a617ac13c67b31d5fb", + "zh:1ef64ea4f8382cd538a76f3d319f405d18130dc3280f1c16d6aaa52a188ecaa4", + "zh:422ce45691b2f384dbd4596fdc8209d95cb43d85a82aaa0173089d38976d6e96", + "zh:7415fbd8da72d9363ba55dd8115837714f9534f5a9a518ec42268c2da1b9ed2f", + "zh:92aa22d071339c8ef595f18a9f9245c287266c80689f5746b26e10eaed04d542", + "zh:9cd0d99f5d3be835d6336c19c4057af6274e193e677ecf6370e5b0de12b4aafe", + "zh:a8c1525b389be5809a97f02aa7126e491ba518f97f57ed3095a3992f2134bb8f", + "zh:b336fa75f72643154b07c09b3968e417a41293358a54fe03efc0db715c5451e6", + "zh:c66529133599a419123ad2e42874afbd9aba82bd1de2b15cc68d2a1e665d4c8e", + "zh:c7568f75ba6cb7c3660b69eaab8b0e4278533bd9a7a4c33ee6590cc7e69743ea", + ] +} + +provider "registry.opentofu.org/hashicorp/random" { + version = "3.6.1" + hashes = [ + "h1:egGGMQ18ihxoFBTgL/6aRL2N5/0bTI738Mg+TTsvBHA=", + "zh:1208af24d1f66e858740812dd5da12e8951b1ca75cc6edb1975ba22bfdeefb1b", + "zh:19137e9b4d3c15e1d99d2352888b98ec0e69bd5b2e89049150379d7bbd115063", + "zh:26613834a1a8ac60390c7a4cbd4cb794b01dfe237d2b0c10f132f3e434a21e03", + "zh:2cbe4425918f3f401609d89e6381f7d120493d637a3d103d827f0c0fd00b1600", + "zh:44ef27a972540435efa88f323280f96d6ac77934079225e7fcc3560cc28aae59", + "zh:8c5d4ca7d1ce007f7c055807cde77aad4685eb807ff802c93ffbec8589068f17", + "zh:9a4fa908d6af48805c862cd4f3a1031d552b96d863a94263e390ac92915d74a9", + "zh:ba396849f0f6d488784f6039095634e1c84e67e31375f3d17218fcf8ce952cb8", + "zh:cb695db8798957bd64ce411f061307e39cb2baa69668b4d42ccf010db47d2e39", + "zh:d02704bf99a93dc0b1ca00bd6051df9c431fbe17cd662a1ab58db1b96264a26f", + ] +} + +provider "registry.opentofu.org/hashicorp/time" { + version = "0.11.1" + hashes = [ + "h1:+S9YvR/HeCxFGMS3ITjOFqlWrR6DdarWWowT9Cz18/M=", + "zh:048c56f9f810f67a7460363a26bf3ef939d64f0d7b7342b9e7f24cc85ee1491b", + "zh:49f949cc5cb50fbb65f7b4578b79fbe02b6bafe9e3f5f1c2936114dd445b84b3", + "zh:553174a4fa88f6e186800d7ee155a6b5b4c6c81793643f1a20eab26becc7f823", + "zh:5cae304e21f77091d4b50389c655afd5e4e2e8d4cd9c06de139a31b8e7d343a9", + "zh:7aae20832bd9885f034831aa44db3a6ffcec034a2d5a2815d92c42c40c14ca1d", + "zh:93d715610dce777474b5eff1d7dbe797e72ca0b679cd8636efb3aa45d1cb589e", + "zh:bd29e04645775851eb10e7f3b39104ae57ca3632dec4ae07328d33d4182e7fb5", + "zh:d6ad6a4d52a6989b8452466f2ec3dbcdb00cc44a96bd1ca618d91a5d74895f49", + "zh:e68cfad3ec526631410fa9406938d624fd56b9ab065c76525cb3f731d106fbfe", + "zh:ffee8aa6b7ce56f4b8fdc0c492404be0041137a278388eb1d1180b637fb5b3de", + ] +} diff --git a/terraform/infra/.tflint.hcl b/terraform/infra/.tflint.hcl new file mode 100755 index 00000000..64a6ccc3 --- /dev/null +++ b/terraform/infra/.tflint.hcl @@ -0,0 +1,26 @@ +config { + format = "compact" + plugin_dir = "~/.tflint.d/plugins" + + module = true + force = false + disabled_by_default = false + + varfile = ["terraform.tfvars"] +} + +rule "terraform_unused_declarations" { + enabled = false +} + +plugin "opa" { + enabled = true + version = "0.2.0" + source = "github.com/terraform-linters/tflint-ruleset-opa" +} + +plugin "terraform" { + enabled = true + version = "0.2.2" + source = "github.com/terraform-linters/tflint-ruleset-terraform" +} \ No newline at end of file diff --git a/terraform/infra/TERRAFORM.md b/terraform/infra/TERRAFORM.md new file mode 100644 index 00000000..7012bbd7 --- /dev/null +++ b/terraform/infra/TERRAFORM.md @@ -0,0 +1,132 @@ + +# Cloud.gov Drupal Infrastructure + +## Requirements + +No requirements. + +## Providers + +| Name | Version | +|------|---------| +| [cloudfoundry](#provider\_cloudfoundry) | 0.51.2 | + +## Modules + +| Name | Source | Version | +|------|--------|---------| +| [applications](#module\_applications) | ./modules/application | n/a | +| [certificates](#module\_certificates) | ./modules/certificate | n/a | +| [circleci](#module\_circleci) | ./modules/circleci | n/a | +| [random](#module\_random) | ./modules/random | n/a | +| [secrets](#module\_secrets) | ./modules/service | n/a | +| [services](#module\_services) | ./modules/service | n/a | + +## Resources + +| Name | Type | +|------|------| +| [cloudfoundry_app.egress_proxy](https://registry.terraform.io/providers/hashicorp/cloudfoundry/latest/docs/data-sources/app) | data source | +| [cloudfoundry_app.external_applications](https://registry.terraform.io/providers/hashicorp/cloudfoundry/latest/docs/data-sources/app) | data source | +| [cloudfoundry_domain.external](https://registry.terraform.io/providers/hashicorp/cloudfoundry/latest/docs/data-sources/domain) | data source | +| [cloudfoundry_domain.internal](https://registry.terraform.io/providers/hashicorp/cloudfoundry/latest/docs/data-sources/domain) | data source | +| [cloudfoundry_org.this](https://registry.terraform.io/providers/hashicorp/cloudfoundry/latest/docs/data-sources/org) | data source | +| [cloudfoundry_route.egress_proxy](https://registry.terraform.io/providers/hashicorp/cloudfoundry/latest/docs/data-sources/route) | data source | +| [cloudfoundry_service.this](https://registry.terraform.io/providers/hashicorp/cloudfoundry/latest/docs/data-sources/service) | data source | +| [cloudfoundry_space.egress_proxy](https://registry.terraform.io/providers/hashicorp/cloudfoundry/latest/docs/data-sources/space) | data source | +| [cloudfoundry_space.this](https://registry.terraform.io/providers/hashicorp/cloudfoundry/latest/docs/data-sources/space) | data source | + +## Inputs + +| Name | Description | Type | Default | Required | +|------|-------------|------|---------|:--------:| +| [circleci\_token](#input\_circleci\_token) | CircleCI token. | `string` | n/a | yes | +| [cloudgov\_password](#input\_cloudgov\_password) | The password for the cloud.gov account. | `string` | n/a | yes | +| [cloudgov\_username](#input\_cloudgov\_username) | The username for the cloudfoundry account. | `string` | n/a | yes | +| [mtls\_port](#input\_mtls\_port) | The default port to direct traffic to. Envoy proxy listens on 61443 and redirects to 8080, which the application should listen on. | `number` | `61443` | no | +| [newrelic\_key](#input\_newrelic\_key) | The API key for New Relic. | `string` | n/a | yes | +| [no\_proxy](#input\_no\_proxy) | URIs that shouldn't be using the proxy to communicate. | `string` | `"apps.internal"` | no | +| [proxy\_password](#input\_proxy\_password) | The proxy password. | `string` | n/a | yes | +| [proxy\_username](#input\_proxy\_username) | The proxy username. | `string` | n/a | yes | + +## Outputs + +No outputs. + +### locals.tf Overview + +This is a high level overview of the `locals.tf` file. The locals.tf file itself is heavily commented and will go into detail about individual settings if further information is required. + +The locals.tf is the main file that needs to be edited to configure your infrastructure. + +#### Global variables + +##### project + +This variable holds the prefix of your resource names. For example, this project uses `benefit-finder` as a prefix for service names. + +##### project\_full + +This variable is a longer, alternative name used in the project. For example, CircleCI calls this project `benefit-finder-gov`. + +##### bootstrap\_workspace + +The name of the `bootstrap` workspace in Terraform. By default, it's `bootstrap`. + +##### global + +An object that sets commonly used applications and services (i.e. the WAF and the database), making configuration easier. + +##### egress + +Settings for the egress proxy that is deployed to the DMZ space. + +##### external\_applications + +Settings for applications that aren't managed by Terraform. This is used to save pipeline variables to dynamically configure the other application. + +##### envs + +Settings for the majority of the deployment, that is then merged into a single `object`. The sub-object, `all` are configurations for every environment. The other sub-objects should be the name of your Terraform workspaces. + +### local.env.apps +This is a `map` of `objects`. + +| Name | Description | Type | Default | Required | +|------|-------------|------|---------|:--------:| +| buildpack | The admin buildpack name or Git URL | `string` | `null` | no | +| buildpacks | A list of buildpack names and/or Git URLs | `list(string)` | `null` | no | +| command | A custom start command for the application. A custom start command for the application. | `string` | `null` | no | +| disk\_quota | The size of the buildpack's ephemeral disk in megabytes. | `number` | `1024` | no | +| docker\_credentials | A custom start command for the application. | `map` | `null` | no | +| docker\_image | The URL to the docker image with tag. | `string` | `null` | no | +| enable\_ssh | Whether to enable or disable SSH access to the container. | `bool` | `true` | no | +| environment | Key/value pairs of custom environment variables to set in your app. | `map` | `null` | no | +| health\_check\_http\_endpoint | The endpoint for the http health check type. | `string` | `"/"` | no | +| health\_check\_invocation\_timeout | The timeout in seconds for individual health check requests for "http" and "port" health checks. | `number` | `5` | no | +| health\_check\_timeout | The timeout in seconds for the health check. | `number` | `180` | no | +| health\_check\_type | The timeout in seconds for individual health check requests for "http" and "port" health checks. | `string` | `"port"` | no | +| instances | The number of app instances that you want to start. | `number` | `1` | no | +| labels | Adds labels to the application. | `map` | `null` | no | +| memory | The memory limit for each application instance in megabytes. | `number` | `64` | no | +| name | The name of the application. | `string` | n/a | yes | +| path | An URI or path to target a zip file. If the path is a directory, the module will create a zip file. | `string` | n/a | yes | +| space | The GUID of the associated Cloud Foundry space. | `string` | n/a | yes | +| stack | The name of the stack the application will be deployed to. `cf stacks` will list valid options. | `string` | `"cflinuxfs4"` | no | +| stopped | Defines the desired application state. Set to true to have the application remain in a stopped state. | `bool` | `false` | no | +| strategy | Strategy ("none", "blue-green", or "rolling") to use for creating/updating application. | `string` | `"none"` | no | +| timeout | Max wait time for app instance startup, in seconds. | `number` | `60` | no | + +### local.env.services +This is a `map` of `objects`. + +| Name | Description | Type | Default | Required | +|------|-------------|------|---------|:--------:| +| name | The name of the service instance. | `string` | n/a | yes | +| json\_params | A json string of arbitrary parameters. | `string` | `null` | no | +| replace\_on\_params\_change | Cloud Foundry will replace the resource on any params change. | `bool` | `false` | no | +| replace\_on\_service\_plan\_change | Cloud Foundry will replace the resource on any service plan changes | `bool` | `false` | no | +| space | The ID of the space. | `string` | n/a | yes | +| service\_plan | The ID of the service plan. | `string` | n/a | yes | +| tags | List of instance tags. | `list(string)` | `[]` | no | + \ No newline at end of file diff --git a/terraform/infra/data.tf b/terraform/infra/data.tf new file mode 100755 index 00000000..59bab4b6 --- /dev/null +++ b/terraform/infra/data.tf @@ -0,0 +1,59 @@ +locals { + cloudfoundry = { + external_applications = try(data.cloudfoundry_app.external_applications, null) + domain_external = try(data.cloudfoundry_domain.external, null) + domain_internal = try(data.cloudfoundry_domain.internal, null) + organization = try(data.cloudfoundry_org.this, null) + services = try(data.cloudfoundry_service.this, null) + space = try(data.cloudfoundry_space.this, null) + } +} + +data "cloudfoundry_app" "external_applications" { + for_each = { + for key, value in try(local.env.external_applications, []) : key => value + if try(value.deployed, false) && + try(data.cloudfoundry_space.this.id, null) != null + } + name_or_id = format(local.env.name_pattern, each.key) + space = try(data.cloudfoundry_space.this.id, null) +} + +data "cloudfoundry_domain" "external" { + //domain = "${split(".", local.env.external_domain)[1]}.${split(".", local.env.external_domain)[2]}" + domain = join(",", slice(split(".", local.env.external_domain), 0, 0)) + sub_domain = split(".", local.env.external_domain)[0] +} + +data "cloudfoundry_domain" "internal" { + domain = join(",", slice(split(".", local.env.external_domain), 0, 0)) + sub_domain = split(".", local.env.internal_domain)[0] +} + +data "cloudfoundry_org" "this" { + name = local.env.organization +} + +data "cloudfoundry_space" "this" { + name = try(local.env.space, terraform.workspace) + org = data.cloudfoundry_org.this.id +} + +data "cloudfoundry_service" "this" { + for_each = { + for key, value in try(local.env.services, {}) : key => value + if value.service_type != "user-provided" && + try(data.cloudfoundry_space.this.id, null) != null + } + + name = each.value.service_type + space = try(data.cloudfoundry_space.this.id, null) +} + +data "cloudfoundry_asg" "trusted_local_networks_egress" { + name = "trusted_local_networks_egress" +} + +data "cloudfoundry_asg" "public_networks_egress" { + name = "public_networks_egress" +} \ No newline at end of file diff --git a/terraform/infra/dynamic.tf b/terraform/infra/dynamic.tf new file mode 100644 index 00000000..6404b22a --- /dev/null +++ b/terraform/infra/dynamic.tf @@ -0,0 +1,45 @@ +locals { + + ## Map of service instances and secrets merged together. + services = { + instance = merge( + module.services.results.instance, + module.secrets.results.instance + ) + user_provided = merge( + module.services.results.user_provided, + module.secrets.results.user_provided + ) + service_key = merge( + module.services.results.service_key, + module.secrets.results.service_key + ) + } + + ## Merging of the various credentials and environmental variables. + secrets = merge( + merge( + flatten([ + for app in try(local.env.services, []) : [ + for key, value in try(module.services.results.service_key[app.name].credentials, {}) : { + "${app.name}_${key}" = value + } + ] if try(module.services.results.service_key[app.name].credentials, null) != null + ]) + ...), + merge( + flatten([ + for key, value in try(module.random.results, {}) : { + "${key}" = value.result + } + ]) + ...) + ) + + ## List of the workspaces defined in the configuration above. + workspaces = flatten([ + for key, value in local.envs : [ + key + ] + ]) +} diff --git a/terraform/infra/locals.tf b/terraform/infra/locals.tf new file mode 100755 index 00000000..1653cd5a --- /dev/null +++ b/terraform/infra/locals.tf @@ -0,0 +1,499 @@ +locals { + + ## The name of the project. Used to name most applications and services. + ## Default naming convention: ${local.project}-application-name-${terraform.workspace} + project = "digital-gov" + + ## The full name of the project. If their isn't a longer name, this can be set to + ## local.project. + project_full = "${local.project}" + + ## The names of the project's production workspaces. This is used to adjust + ## settings dynamically throughout this configuration file. + production_workspaces = ["main", "dev"] + + cms_fqdn = "https://bf-cms-${terraform.workspace}.bxdev.net" + static_fqdn = "https://bf-static-${terraform.workspace}.bxdev.net" + + tf_backend = { + type = "pg" + name_pattern_psql = "${local.project}-terraform-backend-bootstrap" + name_pattern_secrets = "${local.project}--pg-secrets-bootstrap" + space = "prod" + } + + ## "Common" applications and services that are deployed to every space. + globals = { + apps = { + ## Nginx Web Application Firewall (WAF). + waf = { + + ## Should the application have access to the internet? + allow_egress = true + + ## Buildpacks to use with this application. + ## List buildpacks avalible with: cf buildpacks + buildpacks = [ + "https://github.com/cloudfoundry/apt-buildpack", + "nginx_buildpack" + ] + + ## Command to run when container starts. + command = "./start" + + ## Ephemeral disk storage. + disk_quota = 1024 + + ## Should SSH be enabled? + enable_ssh = true + + ## Environmental variables. Avoid sensitive variables. + environment = { + + ## IP addresses allowed to connected to the CMS. + ALLOWED_IPS_CMS = base64encode( + jsonencode([ + "allow 0.0.0.0/0;" + ]) + ) + + ## The OWASP CRS rules for modsecurity. + CRS_RULES = "coreruleset-4.7.0-minimal.tar.gz" + + ## IP address that are denied access from the static website. + DENYED_IPS_STATIC = base64encode(jsonencode([])) + + ## The current environment the application is running in. + ENV = terraform.workspace + + ## Linux "Load Library Path", where system libraries are located. (i.e. libzip, gd, etc) + LD_LIBRARY_PATH = "/home/vcap/deps/0/lib/" + + ## Ubuntu patch for newer version of mod security. + MODSECURITY_UPDATE = "libmodsecurity3_3.0.9-1_amd64.deb" + + ## Domains that shouldn't be passed to the egress proxy server (i.e. apps.internal). + #no_proxy = var.no_proxy + } + + ## Timeout for health checks, in seconds. + health_check_timeout = 180 + + ## Type of health check. + ## Options: port, process, http + health_check_type = "port" + + ## Number of instances of application to deploy. + instances = 1 + + ## Labels to add to the application. + labels = { + environment = terraform.workspace + } + + ## Maximum amount of memory the application can use. + memory = 96 + + ## Addional network policies to add to the application. + ## Format: name of the application and the port it is listening on. + network_policies = { + drupal = 61443 + } + + ## Port the application uses. + port = 80 + + ## Can the application be accessed outside of cloud.gov? + public_route = true + + ## The source file should be a directory or a zip file. + source = "${path.cwd}/${var.terraform_working_dir}/applications/nginx-waf" + + ## Templates take templated files and fill them in with sensitive data. + ## The proxy-to-static.conf has the S3 bucket written to it during + ## the 'terraform apply' command, before it the files are zipped up and + ## uploaded to cloud.gov. + templates = [ + { + source = "${path.cwd}/${var.terraform_working_dir}/applications/nginx-waf/nginx/snippets/proxy-to-storage.conf.tmpl" + destination = "${path.cwd}/${var.terraform_working_dir}/applications/nginx-waf/nginx/snippets/proxy-to-storage.conf" + }, + { + source = "${path.cwd}/${var.terraform_working_dir}/applications/nginx-waf/nginx/snippets/proxy-to-static.conf.tmpl" + destination = "${path.cwd}/${var.terraform_working_dir}/applications/nginx-waf/nginx/snippets/proxy-to-static.conf" + }, + { + source = "${path.cwd}/${var.terraform_working_dir}/applications/nginx-waf/nginx/snippets/proxy-to-app.conf.tmpl" + destination = "${path.cwd}/${var.terraform_working_dir}/applications/nginx-waf/nginx/snippets/proxy-to-app.conf" + } + ] + } + database-backup-bastion = { + + ## Should the application have access to the internet? + allow_egress = true + + ## Buildpacks to use with this application. + ## List buildpacks avalible with: cf buildpacks + buildpacks = [ + "https://github.com/cloudfoundry/apt-buildpack", + "binary_buildpack" + ] + + ## Command to run when container starts. + command = "./start" + + ## Ephemeral disk storage. + disk_quota = 1024 + + ## Should SSH be enabled? + enable_ssh = true + + ## Environmental variables. Avoid sensitive variables. + environment = { + ## Linux "Load Library Path", where system libraries are located. (i.e. libzip, gd, etc) + LD_LIBRARY_PATH = "/home/vcap/deps/0/lib/" + } + + ## Timeout for health checks, in seconds. + health_check_timeout = 180 + + ## Type of health check. + ## Options: port, process, http + health_check_type = "process" + + ## Number of instances of application to deploy. + instances = 1 + + ## Labels to add to the application. + labels = { + environment = terraform.workspace + } + + ## Maximum amount of memory the application can use. + memory = 64 + + services_external = [ + "${local.project}-mysql-${terraform.workspace}", + "${local.project}-backup-${terraform.workspace}", + terraform.workspace == local.tf_backend.space ? "${local.project}-terraform-backend-default" : null + ] + + ## The source file should be a directory or a zip file. + source = "../applications/database-backup-bastion" + + space = terraform.workspace + + #stopped = true + } + } + + ## Services to deploy in this environment. + services = { + + ## S3 storage for backups. + "backup" = { + + ## Applications to bind to this service. + applications = [] + + ## Should a service key be generated for other applications to use? + service_key = true + + ## The size of the instance to deploy. + service_plan = "basic" + + ## The type of service to be deployed. + service_type = "s3" + + ## Tags to add to the service. + tags = [ + terraform.workspace + ] + }, + + ## MySQL RDS database. + "mysql" = { + + ## Applications to bind to this service. + applications = ["cms"] + + ## The size of the instance to deploy. + service_plan = contains(local.production_workspaces, terraform.workspace) ? "micro-mysql" : "micro-mysql" + + ## The type of service to be deployed. + service_type = "aws-rds" + + ## Tags to add to the service. + tags = [ + terraform.workspace + ] + }, + + ## Credentials and other sensitive variables. + "secrets" = { + + ## Applications to bind to this service. + applications = ["cms", "waf"] + + ## Credentials that should be added to the json blob. + credentials = [ + "cron_key", + "hash_salt", + "static_bucket", + "static_fips_endpoint", + "static_access_key_id", + "static_secret_access_key", + "storage_bucket", + "storage_fips_endpoint", + "storage_access_key_id", + "storage_secret_access_key" + ] + + ## The type of service to be deployed. + service_type = "user-provided" + + ## Tags to add to the service. + tags = [ + terraform.workspace + ] + }, + + ## S3 storage for public files for Drupal. + ## Typically "sites/default/files/" + "storage" = { + + ## Applications to bind to this service. + applications = ["cms", "waf"] + + ## Should a service key be generated for other applications to use? + service_key = true + + ## The size of the instance to deploy. + service_plan = "basic-public-sandbox" + + ## The type of service to be deployed. + service_type = "s3" + + ## Tags to add to the service. + tags = [ + terraform.workspace + ] + }, + + # S3 storage for the statically generated site. + "static" = { + + ## Applications to bind to this service. + applications = ["waf", "cms"] + + ## Should a service key be generated for other applications to use? + service_key = true + + ## The size of the instance to deploy. + service_plan = "basic-public-sandbox" + + ## The type of service to be deployed. + service_type = "s3" + + ## Tags to add to the service. + tags = [ + terraform.workspace + ] + } + } + } + + ## The mTLS port the proxy application uses. + ## Cloudfoundry will automatically redirect connections on this port to local port 8080. + mtls_port = var.mtls_port + + ## Any applications that are external to this Terraform infrastucture. + ## In this case, the Drupal application is deployed via a manifest.yml in the Drupal + ## Github repostitory. + external_applications = { + cms = { + + environement = "dev" + + ## Port is the application listening on. + port = var.mtls_port + }, + cms = { + + environement = "main" + + ## Port is the application listening on. + port = var.mtls_port + } + } + + ## The various environment settings to be deployed. + envs = { + + ## Every environment gets settings in 'all'. + all = { + + ## The API URL for cloud.gov. + api_url = "https://api.fr.cloud.gov" + + ## These values are defaults values when options aren't configured in the application block. + defaults = { + + ## The default size of the containers ephemeral disk. + disk_quota = 2048 + + ## Is SSH enabled on the container by default? + enable_ssh = true + + ## The default health check timeout. + health_check_timeout = 60 + + ## Default method of performing a health check. + ## Valid options: "port", "process", or "http" + ## https://docs.cloudfoundry.org/devguide/deploy-apps/healthchecks.html + health_check_type = "port" + + ## Default number of application instances to deploy. + instances = 1 + + ## Default amount of memory to use memory to use for an application. + memory = 64 + + port = 8080 + + ## The default cloudfoundry stack to deploy. + ## https://docs.cloudfoundry.org/devguide/deploy-apps/stacks.html + stack = "cflinuxfs4" + + ## Is the application stopped by default? + stopped = false + + ## Default CloudFoundry deployment strategy. + ## Valid optons: "none", "standard", or "blue-green". + ## https://docs.cloudfoundry.org/devguide/deploy-apps/rolling-deploy.html + strategy = "none" + + ## Default wait time for an application to start. + timeout = 300 + } + + ## Configuration settings for the egress proxy application. + # egress = local.egress + + ## External application based on the Terraform workspace being used. + external_applications = try(local.external_applications, []) + + ## The domain name for applications accessable external of cloud.gov. + external_domain = "app.cloud.gov" + + ## The domain name for applications accessable inside of cloud.gov. + internal_domain = "apps.internal" + + ## The naming convention/pattern for deployed systems and subsystems. + ## %s is replaced with the name of the system. + name_pattern = "${local.project}-%s-${terraform.workspace}" + + ## The name of the cloud.gov organization. + organization = "gsa-digitalgov-prototyping" + + ## Passwords that are generated for workspaces. By default, it's an empty map. + ## If one is defined below in a workspace's settings, it will supersed this one. + passwords = { + hash_salt = { + length = 32 + } + cron_key = { + length = 32 + } + } + + ## A copy of the project name, so it gets added to this setting object. + project = local.project + + ## The name of the current Cloud.gov space. + space = terraform.workspace + } + + ################################# + ## + ## ____ + ## | _ \ _____ __ + ## | | | |/ _ \ \ / / + ## | |_| | __/\ V / + ## |____/ \___| \_/ + ## + ################################# + + dev = merge( + { + ## Applications to deploy. + apps = local.globals.apps + services = local.globals.services + }, + { + + ## Passwords that need to be generated for this environment. + ## These will actually use the sha256 result from the random module. + passwords = { + hash_salt = { + length = 32 + } + cron_key = { + length = 32 + } + } + } + ) + + ################################# + ## + ## ____ _ + ## | _ \ _ __ ___ __| | + ## | |_) | '__/ _ \ / _` | + ## | __/| | | (_) | (_| | + ## |_| |_| \___/ \__,_| + ## + ################################# + + + main = merge( + { + ## Applications to deploy. + apps = local.globals.apps + services = local.globals.services + }, + { + + ## Passwords that need to be generated for this environment. + ## These will actually use the sha256 result from the random module. + passwords = { + hash_salt = { + length = 32 + } + cron_key = { + length = 32 + } + } + } + ) + } + + ## Map of the 'all' environement and the current workspace settings. + env = merge(try(local.envs.all, {}), try(local.envs[terraform.workspace], {})) + + service_bindings = merge( + flatten( + [ + for key, value in try(local.env.services, {}) : { + #svc_value.name => svc_value + "${key}" = value + } + ] + ) + ...) +} + +# output "name" { +# value = local.env +# } \ No newline at end of file diff --git a/terraform/infra/main.tf b/terraform/infra/main.tf new file mode 100755 index 00000000..ec186f4a --- /dev/null +++ b/terraform/infra/main.tf @@ -0,0 +1,65 @@ +module "random" { + source = "../modules/random" + names = local.workspaces + passwords = local.env.passwords +} + +## Currently broken in CF provider v0.53.1. +# resource "cloudfoundry_space_asgs" "trusted_local_networks_egress" { +# space = data.cloudfoundry_space.this.id +# running_asgs = [ +# data.cloudfoundry_asg.trusted_local_networks_egress.id, +# data.cloudfoundry_asg.public_networks_egress.id +# ] +# staging_asgs = [] +# } + +## The instanced services (i.e. RDS, S3, etc.) get created first. +## This allows their credentials to be injected into "user-provided" services (JSON blobs), if needed. +module "services" { + source = "../modules/service" + + cloudfoundry = local.cloudfoundry + env = local.env + skip_user_provided_services = true + + depends_on = [ + module.random + ] +} + +module "secrets" { + source = "../modules/service" + + cloudfoundry = local.cloudfoundry + env = local.env + skip_service_instances = true + secrets = local.secrets + + depends_on = [ + module.random + ] +} + +module "applications" { + source = "../modules/application" + + cloudfoundry = local.cloudfoundry + env = local.env + secrets = local.secrets + services = local.services + + depends_on = [ module.services ] +} + +# output "name" { +# value = merge( +# flatten( +# [ +# for service in try(local.env.services, {}) : { +# "${service.name}" = service +# } +# ] +# ) +# ...) +# } \ No newline at end of file diff --git a/terraform/infra/provider.tf b/terraform/infra/provider.tf new file mode 100644 index 00000000..cc8f39a4 --- /dev/null +++ b/terraform/infra/provider.tf @@ -0,0 +1,19 @@ +terraform { + required_providers { + cloudfoundry = { + source = "cloudfoundry-community/cloudfoundry" + version = "~> 0.5" + } + } + required_version = "> 1.7" +} + +terraform { + backend "pg" { } +} + +provider "cloudfoundry" { + api_url = local.env.api_url + user = var.cloudgov_username + password = var.cloudgov_password +} diff --git a/terraform/infra/scripts/cloudgov-aws-creds.sh b/terraform/infra/scripts/cloudgov-aws-creds.sh new file mode 100755 index 00000000..3c72a6d6 --- /dev/null +++ b/terraform/infra/scripts/cloudgov-aws-creds.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +current_path=$(pwd) + +[ -z "${bucket_name}" ] && echo "No bucket name!" && help && exit 1 + +echo "Getting bucket credentials..." +{ + current_space=$(cf target | grep space | awk '{print $2}') + + cf target -s "${deploy_space}" + + service_key="${bucket_name}-key" + s3_credentials=$(cf service-key "${bucket_name}" "${service_key}" | tail -n +2) +} >/dev/null 2>&1 + +if [ "${s3_credentials}" = "FAILED" ] ; then + echo "Key not found. Creating..." + { + cf create-service-key "${bucket_name}" "${service_key}" + s3_credentials=$(cf service-key "${bucket_name}" "${service_key}" | tail -n +2) + aws_access_key=$(echo "${s3_credentials}" | jq -r '.credentials.access_key_id') + aws_bucket_name=$(echo "${s3_credentials}" | jq -r '.credentials.bucket') + aws_bucket_region=$(echo "${s3_credentials}" | jq -r '.credentials.region') + aws_secret_key=$(echo "${s3_credentials}" | jq -r '.credentials.secret_access_key') + export AWS_ACCESS_KEY_ID=${aws_access_key} + export AWS_BUCKET=${aws_bucket_name} + export AWS_DEFAULT_REGION=${aws_bucket_region} + export AWS_SECRET_ACCESS_KEY=${aws_secret_key} + + cf target -s "${current_space}" + + } >/dev/null 2>&1 +else + echo "Key found. Deleting..." + { + current_space=$(cf target | grep space | awk '{print $2}') + + cf target -s "${cf_space}" + + cf delete-service-key "${bucket_name}" "${service_key}" -f + + cf target -s "${current_space}" + + } >/dev/null 2>&1 +fi \ No newline at end of file diff --git a/terraform/infra/scripts/cloudgov-create-service-account.sh b/terraform/infra/scripts/cloudgov-create-service-account.sh new file mode 100755 index 00000000..5022b68e --- /dev/null +++ b/terraform/infra/scripts/cloudgov-create-service-account.sh @@ -0,0 +1,84 @@ +#!/bin/bash + +org="" +prefix="" +deploy_space="" +spaces="" + +current_path=$(pwd) +tfvars_file="terraform.tfvars" + +help(){ + echo "Usage: $0 [options]" >&2 + echo + echo " -d Space to create the service token in. Likely production." + echo " -o Name of the Cloud.gov organization." + echo " -p Name of the service account prefix." + echo " -s List of spaces in your project. This gives the service account developer access to them." +} + +while getopts 'd:ho:p:s:' flag; do + case "${flag}" in + d) deploy_space="${OPTARG}" ;; + h) help && exit 0 ;; + o) org="${OPTARG}" ;; + p) prefix="${OPTARG}-" ;; + s) spaces=(${OPTARG}) ;; + *) help && exit 1 ;; + esac +done + +[[ -z "${org}" ]] && help && exit 1 +[[ -z "${prefix}" ]] && help && exit 1 +[[ -z "${deploy_space}" ]] && help && exit 1 +[[ -z "${spaces}" ]] && help && exit 1 + +current_space=$(cf target | grep space -A 1 | awk '{print $2}') + +echo "Changing target space to the deployment space..." +{ + cf target -s ${deploy_space} +} >/dev/null 2>&1 + +echo "Checking service key..." +while : ; do + { + service_key=$(cf service-key ${prefix}svc ${prefix}svc-key | sed '1,2d') + } >/dev/null 2>&1 + + if [[ ${service_key} == "" ]]; then + echo "Service key is missing!" + echo "Creating service account..." + # { + cf create-service cloud-gov-service-account space-deployer ${prefix}svc + # } >/dev/null 2>&1 + echo "Creating service key..." + # { + cf create-service-key ${prefix}svc ${prefix}svc-key + # } >/dev/null 2>&1 + else + export cloudgov_password=$(echo ${service_key} | jq -r '.credentials.password') + export cloudgov_username=$(echo ${service_key} | jq -r '.credentials.username') + + for space in ${spaces[@]}; do + echo "Adding '${space}' to service account..." + cf set-space-role ${cloudgov_username} ${org} ${space} SpaceDeveloper + # >/dev/null 2>&1 + + echo "Allowing internet access for '${space}' deployment staging..." + cf bind-security-group public_networks ${org} --space ${space} --lifecycle staging + # >/dev/null 2>&1 + done + break + fi + sleep 1 +done + +echo "Changing target space to the previous space..." +{ + cf target -s ${current_space} +} >/dev/null 2>&1 + +cp terraform.tfvars terraform.tfvars.tmp +envsubst '$cloudgov_password,$cloudgov_username' < "${tfvars_file}.tmp" > ${tfvars_file} +rm ${tfvars_file}.tmp \ No newline at end of file diff --git a/terraform/infra/scripts/egress-network-policy.sh b/terraform/infra/scripts/egress-network-policy.sh new file mode 100755 index 00000000..6bd1f3ca --- /dev/null +++ b/terraform/infra/scripts/egress-network-policy.sh @@ -0,0 +1,12 @@ +#!/bin/bash + + +echo "Allowing internet access from '${deploy_space}'..." + +space_current=$(cf spaces | grep $(terraform workspace show)) + +cf target -s ${deploy_space} + +cf bind-security-group public_networks_egress ${org} --space ${deploy_space} + +cf target -s ${space_current} \ No newline at end of file diff --git a/terraform/infra/scripts/init.sh b/terraform/infra/scripts/init.sh new file mode 100755 index 00000000..b7834ce7 --- /dev/null +++ b/terraform/infra/scripts/init.sh @@ -0,0 +1,28 @@ +#!/bin/bash + +## If a project name is just a dash, no project name was set, so remove the dash. +[ "${prefix}" = "-" ] && prefix="" + +echo "Creating terraform backend bucket..." +{ + service="${prefix}terraform-backend" + service_key="${service}-key" + cf create-service s3 basic "${service}" + cf create-service-key "${service}" "${service_key}" + s3_credentials=$(cf service-key "${service}" "${service_key}" | tail -n +2) + + export backend_aws_access_key=$(echo "${s3_credentials}" | jq -r '.credentials.access_key_id') + export backend_aws_secret_key=$(echo "${s3_credentials}" | jq -r '.credentials.secret_access_key') + export backend_aws_bucket_name=$(echo "${s3_credentials}" | jq -r '.credentials.bucket') + export backend_aws_bucket_region=$(echo "${s3_credentials}" | jq -r '.credentials.region') + + envsubst '$backend_aws_bucket_name,$backend_aws_bucket_region' < provider.tf.tmpl > provider.tf +} >/dev/null 2>&1 + +echo "Creating backup bucket..." +{ + service_backup="${prefix}backup" + cf create-service s3 basic "${service_backup}" +} >/dev/null 2>&1 + +./scripts/cloudgov-create-service-account.sh -d ${deploy_space} -o ${org} -p ${prefix} -s ${spaces} \ No newline at end of file diff --git a/terraform/infra/terraform.tfvars.tmpl b/terraform/infra/terraform.tfvars.tmpl new file mode 100644 index 00000000..4b0a21f2 --- /dev/null +++ b/terraform/infra/terraform.tfvars.tmpl @@ -0,0 +1,2 @@ +cloudgov_password="$CF_PASSWORD" +cloudgov_username="$CF_USER" \ No newline at end of file diff --git a/terraform/infra/variables.tf b/terraform/infra/variables.tf new file mode 100755 index 00000000..4720b440 --- /dev/null +++ b/terraform/infra/variables.tf @@ -0,0 +1,23 @@ +variable "cloudgov_username" { + description = "The username for the cloudfoundry account." + type = string + sensitive = true +} + +variable "cloudgov_password" { + description = "The password for the cloud.gov account." + type = string + sensitive = true +} + +variable "terraform_working_dir" { + description = "Working directory for Terraform." + type = string + default = "digital-gov-drupal/terraform" +} + +variable "mtls_port" { + description = "The default port to direct traffic to. Envoy proxy listens on 61443 and redirects to 8080, which the application should listen on." + type = number + default = 61443 +} \ No newline at end of file diff --git a/terraform/modules/application/.terraform-docs.yaml b/terraform/modules/application/.terraform-docs.yaml new file mode 100755 index 00000000..b480122f --- /dev/null +++ b/terraform/modules/application/.terraform-docs.yaml @@ -0,0 +1,2 @@ +header-from: .terraform-docs/header.md +footer-from: .terraform-docs/footer.md diff --git a/terraform/modules/application/.terraform-docs/footer.md b/terraform/modules/application/.terraform-docs/footer.md new file mode 100755 index 00000000..4ee1fd20 --- /dev/null +++ b/terraform/modules/application/.terraform-docs/footer.md @@ -0,0 +1,85 @@ +## Example + +```terraform +module "applications" { + source = "./modules/application" + + cloudfoundry = local.cloudfoundry + env = local.env + secrets = local.secrets + services = local.services +} +``` + +## Variables + +### cloudfoundry + +A variable that contains a `map(string)` of data lookups for pre-existing resources from Cloud.gov. This includes thing such as the organization and space ids. These are defined in `data.tf` in the root directory. + +### env + +A mixed type `object` variable that contains application settings. It is passed as an `any` type to allow optional variables to be ommitted from the object. It is defined in `locals.tf`, in the root directory. The object `local.env[terraform.workspace].apps` stores the values for the specific application that is to be deployed. + +Valid options are the attributes for the [cloudfoundry_app](https://registry.terraform.io/providers/cloudfoundry-community/cloudfoundry/latest/docs/resources/app) resource. + +### secrets + +A variable that has secrets and other credentials that the application uses. The `local.secrets` variable is generated in `locals_dynamic.tf`, as it merges a variety of credentials from the random and services modules. + +### services + +A variable that contains a `map(map(string))` of the services deployed in the environment. `local.services` is generated in `locals_dynamic.tf`, due to needing to be generated after the creation of the services, after the instance id are known. The services are then bound to the application. + +See the [service module](../service/readme.MD) for more information. + +## Usage + +Here is an example of how to define an application in `locals.tf`. + +```terraform +locals { + env = { + workspace1 = { + apps = { + application1 = { + buildpacks = [ + "staticfile_buildpack" + ] + command = "./start" + disk_quota = 256 + enable_ssh = true + environment = { + environment = terraform.workspace + LD_LIBRARY_PATH = "/home/vcap/deps/0/lib/" + } + health_check_timeout = 180 + health_check_type = "port" + instances = 1 + labels = { + environment = terraform.workspace + } + memory = 64 + port = 8080 + public_route = false + + source = "/path/to/application/directory" + + templates = [ + { + source = "${path.cwd}/path/to/templates/template.tmpl" + destination = "${path.cwd}}/path/to/templates/file" + } + ] + } + } + } + } +} +``` + +## Additional Notes + +- Buildpacks + - Valid built-in Cloud.gov buildpacks can be found by running `cf buildpacks` from the CLI. + - External buildpacks, such as the `apt-buildpack` by referencing the URL to the buildpack repository: [https://github.com/cloudfoundry/apt-buildpack](https://github.com/cloudfoundry/apt-buildpack). \ No newline at end of file diff --git a/terraform/modules/application/.terraform-docs/header.md b/terraform/modules/application/.terraform-docs/header.md new file mode 100755 index 00000000..cb180d7c --- /dev/null +++ b/terraform/modules/application/.terraform-docs/header.md @@ -0,0 +1 @@ +# CloudFoundry Application Module diff --git a/terraform/modules/application/README.md b/terraform/modules/application/README.md new file mode 100644 index 00000000..22607fe8 --- /dev/null +++ b/terraform/modules/application/README.md @@ -0,0 +1,137 @@ + +# CloudFoundry Application Module + +## Requirements + +| Name | Version | +|------|---------| +| [terraform](#requirement\_terraform) | > 1.7 | +| [cloudfoundry](#requirement\_cloudfoundry) | ~> 0.5 | + +## Providers + +| Name | Version | +|------|---------| +| [archive](#provider\_archive) | n/a | +| [cloudfoundry](#provider\_cloudfoundry) | ~> 0.5 | +| [local](#provider\_local) | n/a | + +## Modules + +No modules. + +## Resources + +| Name | Type | +|------|------| +| [cloudfoundry_app.this](https://registry.terraform.io/providers/cloudfoundry-community/cloudfoundry/latest/docs/resources/app) | resource | +| [cloudfoundry_network_policy.egress_proxy](https://registry.terraform.io/providers/cloudfoundry-community/cloudfoundry/latest/docs/resources/network_policy) | resource | +| [cloudfoundry_network_policy.ingress_proxy](https://registry.terraform.io/providers/cloudfoundry-community/cloudfoundry/latest/docs/resources/network_policy) | resource | +| [cloudfoundry_route.external](https://registry.terraform.io/providers/cloudfoundry-community/cloudfoundry/latest/docs/resources/route) | resource | +| [cloudfoundry_route.internal](https://registry.terraform.io/providers/cloudfoundry-community/cloudfoundry/latest/docs/resources/route) | resource | +| [local_sensitive_file.this](https://registry.terraform.io/providers/hashicorp/local/latest/docs/resources/sensitive_file) | resource | +| [archive_file.this](https://registry.terraform.io/providers/hashicorp/archive/latest/docs/data-sources/file) | data source | + +## Inputs + +| Name | Description | Type | Default | Required | +|------|-------------|------|---------|:--------:| +| [cloudfoundry](#input\_cloudfoundry) | Cloudfoundry settings. |
object(
{
domain_external = object(
{
domain = string
id = string
internal = bool
name = string
org = string
sub_domain = string
}
)
domain_internal = object(
{
domain = string
id = string
internal = bool
name = string
org = string
sub_domain = string
}
)
external_applications = optional(
map(
object(
{
name = string
environement = string
port = optional(number, 61443)
}
)
),{}
)
organization = object(
{
annotations = map(string)
id = string
labels = map(string)
name = string
}
)
services = map(
object(
{
id = string
name = string
service_broker_guid = string
service_broker_name = string
service_plans = map(string)
space = string
}
)
)
space = object(
{
annotations = map(string)
id = string
labels = map(string)
name = string
org = string
org_name = string
quota = string
}
)
}
)
| n/a | yes | +| [env](#input\_env) | The settings object for this environment. |
object({
api_url = optional(string, "https://api.fr.cloud.gov")
apps = optional(
map(
object({
allow_egress = optional(bool, true)
buildpacks = list(string)
command = optional(string, "entrypoint.sh")
disk_quota = optional(number, 1024)
enable_ssh = optional(bool, false)
environment = optional(map(string), {})
health_check_timeout = optional(number, 180)
health_check_type = optional(string, "port")
instances = optional(number, 1)
labels = optional(map(string), {})
memory = optional(number, 96)
network_policies = optional(map(number),{})
port = optional(number, 80)
public_route = optional(bool, false)
space = optional(string ,null)
source = optional(string, null)
templates = list(map(string))
})
), {}
)
bootstrap_workspace = optional(string, "bootstrap")
defaults = object(
{
disk_quota = optional(number, 2048)
enable_ssh = optional(bool, true)
health_check_timeout = optional(number, 60)
health_check_type = optional(string, "port")
instances = optional(number, 1)
memory = optional(number, 64)
port = optional(number, 8080)
stack = optional(string, "cflinuxfs4")
stopped = optional(bool, false)
strategy = optional(string, "none")
timeout = optional(number, 300)
}
)
external_applications = optional(
map(
object({
enable_ssh = optional(bool, false)
instances = optional(number, 1)
memory = optional(number, 96)
port = optional(number, 61443)
})
), {}
)
external_domain = optional(string, "app.cloud.gov")
internal_domain = optional(string, "apps.internal")
name_pattern = string
organization = optional(string, "gsa-tts-usagov")
passwords = optional(
list(
object(
{
length = optional(number, 32)
}
)
), []
)
project = string
secrets = optional(
map(
object(
{
encrypted = bool
key = string
}
)
), {}
)
services = optional(
map(
object(
{
applications = optional(list(string), [])
environement = optional(string, "dev")
service_key = optional(bool, true)
service_plan = optional(string, "basic")
service_type = optional(string, "s3")
tags = optional(list(string), [])
}
)
), {}
)
space = string
})
| n/a | yes | +| [secrets](#input\_secrets) | Sensitive credentials to be used to set application environmental variables. | `map(string)` | `{}` | no | +| [services](#input\_services) | Services generated from the service module. |
object(
{
instance = map(
object(
{
annotations = optional(string, null)
id = optional(string, null)
json_params = optional(string, null)
labels = optional(map(string), {})
name = optional(string, null)
recursive_delete = optional(bool, null)
replace_on_params_change = optional(bool, false)
replace_on_service_plan_change = optional(bool, false)
service_plan = optional(string, null)
space = optional(string, null)
tags = optional(list(string), null)
}
)
)
user_provided = map(
object(
{
annotations = optional(string, null)
id = optional(string, null)
json_params = optional(string, null)
labels = optional(map(string), {})
name = optional(string, null)
recursive_delete = optional(bool, null)
replace_on_params_change = optional(bool, false)
replace_on_service_plan_change = optional(bool, false)
service_plan = optional(string, null)
space = optional(string, null)
tags = optional(list(string), null)
}
)
)
service_key = map(
object(
{
name = optional(string, null)
service_instance = optional(string, null)
params = optional(map(string), null)
params_json = optional(string, null)
credentials = optional(map(string), {})
}
)
)
}
)
| `null` | no | + +## Outputs + +| Name | Description | +|------|-------------| +| [apps](#output\_apps) | A `map` of [cloudfoundry\_app](https://registry.terraform.io/providers/cloudfoundry-community/cloudfoundry/latest/docs/resources/app) resource outputs. The key is the app name. | +| [external\_endpoints](#output\_external\_endpoints) | A map of external URL's (app.cloud.gov) to used to reach an application. The key is the app name. | +| [internal\_endpoints](#output\_internal\_endpoints) | A map of internal URL's (apps.internal) to used to reach an application. The key is the app name. | + +## Example + +```terraform +module "applications" { + source = "./modules/application" + + cloudfoundry = local.cloudfoundry + env = local.env + secrets = local.secrets + services = local.services +} +``` + +## Variables + +### cloudfoundry + +A variable that contains a `map(string)` of data lookups for pre-existing resources from Cloud.gov. This includes thing such as the organization and space ids. These are defined in `data.tf` in the root directory. + +### env + +A mixed type `object` variable that contains application settings. It is passed as an `any` type to allow optional variables to be ommitted from the object. It is defined in `locals.tf`, in the root directory. The object `local.env[terraform.workspace].apps` stores the values for the specific application that is to be deployed. + +Valid options are the attributes for the [cloudfoundry\_app](https://registry.terraform.io/providers/cloudfoundry-community/cloudfoundry/latest/docs/resources/app) resource. + +### secrets + +A variable that has secrets and other credentials that the application uses. The `local.secrets` variable is generated in `locals_dynamic.tf`, as it merges a variety of credentials from the random and services modules. + +### services + +A variable that contains a `map(map(string))` of the services deployed in the environment. `local.services` is generated in `locals_dynamic.tf`, due to needing to be generated after the creation of the services, after the instance id are known. The services are then bound to the application. + +See the [service module](../service/readme.MD) for more information. + +## Usage + +Here is an example of how to define an application in `locals.tf`. + +```terraform +locals { + env = { + workspace1 = { + apps = { + application1 = { + buildpacks = [ + "staticfile_buildpack" + ] + command = "./start" + disk_quota = 256 + enable_ssh = true + environment = { + environment = terraform.workspace + LD_LIBRARY_PATH = "/home/vcap/deps/0/lib/" + } + health_check_timeout = 180 + health_check_type = "port" + instances = 1 + labels = { + environment = terraform.workspace + } + memory = 64 + port = 8080 + public_route = false + + source = "/path/to/application/directory" + + templates = [ + { + source = "${path.cwd}/path/to/templates/template.tmpl" + destination = "${path.cwd}}/path/to/templates/file" + } + ] + } + } + } + } +} +``` + +## Additional Notes + +- Buildpacks + - Valid built-in Cloud.gov buildpacks can be found by running `cf buildpacks` from the CLI. + - External buildpacks, such as the `apt-buildpack` by referencing the URL to the buildpack repository: [https://github.com/cloudfoundry/apt-buildpack](https://github.com/cloudfoundry/apt-buildpack). + \ No newline at end of file diff --git a/terraform/modules/application/data.tf b/terraform/modules/application/data.tf new file mode 100644 index 00000000..131a2254 --- /dev/null +++ b/terraform/modules/application/data.tf @@ -0,0 +1,27 @@ +locals { + + ## Create a single list of external service names. Multiple applications + ## could reference the same service, but the GUID only needs to be looked up once. + services_external = toset( + compact( + distinct( + flatten( + [ + for value in try(var.env.apps, {}) : [ + try(value.services_external, []) + ] + ] + ) + ) + ) + ) +} + +## Lookup up service instance GUID's for existing services. +## These can be externally deployed services or services deployed from different code sources. +## The GUID can then be refrenced by data.cloudfoundry_service_instance.this["service-name"].id +data "cloudfoundry_service_instance" "this" { + for_each = try(local.services_external, []) + name_or_id = each.value + space = try(var.cloudfoundry.space.id, null) +} \ No newline at end of file diff --git a/terraform/modules/application/main.tf b/terraform/modules/application/main.tf new file mode 100755 index 00000000..4e9c76c9 --- /dev/null +++ b/terraform/modules/application/main.tf @@ -0,0 +1,147 @@ +locals { + domains = merge( + merge( + flatten([ + for key, value in try(var.env.apps, {}) : { + "${key}_internal_endpoint" = try(value.public_route, false) ? "${format(var.env.name_pattern, key)}.${var.env.external_domain}" : "${format(var.env.name_pattern, key)}.${var.env.internal_domain}" + } + ]) + ...), + merge( + flatten([ + for key, value in try(var.env.external_applications, {}) : { + "${key}_internal_endpoint" = try(value.public_route, false) ? "${format(var.env.name_pattern, key)}.${var.env.external_domain}" : "${format(var.env.name_pattern, key)}.${var.env.internal_domain}" + } + ]) + ...) + ) + + service_keys = merge( + flatten([ + for service_key, service_value in try(var.env.services, {}) : [ + for key, value in try(var.services.service_key[service_key].credentials, {}) : { + "${service_key}_${key}" = value + } if try(var.services.service_key[service_key].credentials, null) != null + ] + ]) + ...) + + service_bindings = merge( + flatten( + [ + for key, value in try(var.env.services, {}) : { + #svc_value.name => svc_value + "${key}" = value + } + ] + ) + ...) +} + +resource "local_sensitive_file" "this" { + for_each = { for key, value in flatten([ + for key, value in try(var.env.apps, {}) : [ + for kt, vt in try(value.templates, []) : { + name = basename(vt.destination) + source = vt.source + destination = vt.destination + vars = value.environment + } + ] + ]) : basename(value.destination) => value + } + + content = templatefile( + each.value.source, + merge( + var.secrets, + local.domains, + local.service_keys + ) + ) + filename = each.value.destination +} + +data "archive_file" "this" { + for_each = { + for key, value in try(var.env.apps, {}) : key => value + if try(value.source, null) != null && !endswith(try(value.source, ""), ".zip") + } + + type = "zip" + source_dir = each.value.source + output_path = "/tmp/${var.env.project}-${each.key}-${terraform.workspace}.zip" + + depends_on = [ + local_sensitive_file.this + ] +} + +resource "cloudfoundry_app" "this" { + for_each = { + for key, value in try(var.env.apps, {}) : key => value + } + + buildpack = try(each.value.buildpack, null) + buildpacks = try(each.value.buildpacks, null) + command = try(each.value.command, null) + disk_quota = try(each.value.disk_quota, try(var.env.defaults.disk_quota, 1024)) + docker_credentials = try(each.value.docker_credentials, null) + docker_image = try(each.value.docker_image, null) + enable_ssh = try(each.value.enable_ssh, try(var.env.defaults.enable_ssh, true)) + environment = try(each.value.environment, {}) + health_check_http_endpoint = try(each.value.health_check_http_endpoint, try(var.env.defaults.health_check_http_endpoint, null)) + health_check_invocation_timeout = try(each.value.health_check_invocation_timeout, try(var.env.defaults.health_check_invocation_timeout, 5)) + health_check_timeout = try(each.value.health_check_timeout, try(var.env.defaults.health_check_timeout, 180)) + health_check_type = try(each.value.health_check_type, try(var.env.defaults.health_check_type, "port")) + instances = try(each.value.instances, try(var.env.defaults.instances, 1)) + labels = try(each.value.labels, {}) + memory = try(each.value.memory, try(var.env.defaults.memory, 64)) + name = format(var.env.name_pattern, each.key) + path = endswith(try(each.value.source, ""), ".zip") ? each.value.source : "/tmp/${var.env.project}-${each.key}-${terraform.workspace}.zip" + source_code_hash = endswith(try(each.value.source, ""), ".zip") ? filebase64sha256(each.value.source) : data.archive_file.this[each.key].output_base64sha256 + space = var.cloudfoundry.space.id + stack = try(each.value.stack, try(var.env.defaults.stack, "cflinux4")) + stopped = try(each.value.stopped, try(var.env.defaults.stopped, false)) + strategy = try(each.value.strategy, try(var.env.defaults.strategy, "none")) + timeout = try(each.value.timeout, try(var.env.defaults.timeout, 60)) + + dynamic "service_binding" { + for_each = { + for svc_key, svc_value in try(var.env.services, {}) : svc_key => svc_value + if contains(svc_value.applications, each.key) && svc_value.service_type != "user-provided" + } + content { + service_instance = var.services.instance[service_binding.key].id + params_json = try(var.env.services[service_binding.key].params_json, null) + params = try(var.env.services[service_binding.key].params, {}) + } + } + + dynamic "service_binding" { + for_each = { + for svc_key, svc_value in try(var.env.services, {}) : svc_key => svc_value + if contains(svc_value.applications, each.key) && + svc_value.service_type == "user-provided" + } + content { + service_instance = var.services.user_provided[service_binding.key].id + params_json = try(var.env.services[service_binding.key].params_json, null) + params = try(var.env.services[service_binding.key].params, {}) + } + } + + ## Bind any external services, not deployed by the root code calling this module. + dynamic "service_binding" { + for_each = try(local.services_external, []) + content { + service_instance = data.cloudfoundry_service_instance.this[service_binding.value].id + params_json = try(var.env.services[service_binding.value].params_json, null) + params = try(var.env.services[service_binding.value].params, {}) + } + } + + depends_on = [ + data.archive_file.this, + ] +} diff --git a/terraform/modules/application/networking.tf b/terraform/modules/application/networking.tf new file mode 100755 index 00000000..b10803fe --- /dev/null +++ b/terraform/modules/application/networking.tf @@ -0,0 +1,32 @@ +locals { + merged_applications = merge(cloudfoundry_app.this, var.cloudfoundry.external_applications) +} + +resource "cloudfoundry_network_policy" "ingress_proxy" { + for_each = { + for key, value in try(var.env.apps, []) : value.name => value + if try(value.network_policy, null) != null && + try(var.cloudfoundry.external_applications[value.network_policy.name].id, null) != null + } + policy { + source_app = cloudfoundry_app.this[each.key].id + destination_app = var.cloudfoundry.external_applications[each.value.network_policy.name].id + port = try(var.env.apps[each.key].network_policy_app.port, 8080) + protocol = try(var.env.apps[each.key].network_policy_app.protocol, "tcp") + } +} + +resource "cloudfoundry_network_policy" "egress_proxy" { + for_each = { + for key, value in try(var.env.apps, []) : value.name => value + if try(var.cloudfoundry.egress_app.id, null) != null && + terraform.workspace != try(var.env.egress.workspace, null) + } + + policy { + source_app = cloudfoundry_app.this[each.key].id + destination_app = try(var.cloudfoundry.egress_app.id, null) + port = try(var.env.egress.mtls_port, 61443) + protocol = try(var.env.egress.protocol, "tcp") + } +} diff --git a/terraform/modules/application/outputs.tf b/terraform/modules/application/outputs.tf new file mode 100755 index 00000000..d8e4a883 --- /dev/null +++ b/terraform/modules/application/outputs.tf @@ -0,0 +1,34 @@ +output "apps" { + description = "A `map` of [cloudfoundry_app](https://registry.terraform.io/providers/cloudfoundry-community/cloudfoundry/latest/docs/resources/app) resource outputs. The key is the app name." + value = merge( + flatten([ + for key, value in try(var.env.apps, {}) : { + "${key}" = try(cloudfoundry_app.this[key], null) + } + ]) + ...) +} + +output "external_endpoints" { + description = "A map of external URL's (app.cloud.gov) to used to reach an application. The key is the app name." + sensitive = true + value = merge( + flatten([ + for key, value in try(var.env.apps, {}) : { + "${key}" = try(cloudfoundry_route.external[key].endpoint, null) + } if value.public_route + ]) + ...) +} + +output "internal_endpoints" { + description = "A map of internal URL's (apps.internal) to used to reach an application. The key is the app name." + sensitive = true + value = merge( + flatten([ + for key, value in try(var.env.apps, {}) : { + "${key}" = try(cloudfoundry_route.internal[key].endpoint, null) + } if !value.public_route + ]) + ...) +} \ No newline at end of file diff --git a/terraform/modules/application/providers.tf b/terraform/modules/application/providers.tf new file mode 100644 index 00000000..d106004d --- /dev/null +++ b/terraform/modules/application/providers.tf @@ -0,0 +1,9 @@ +terraform { + required_providers { + cloudfoundry = { + source = "cloudfoundry-community/cloudfoundry" + version = "~> 0.5" + } + } + required_version = "> 1.7" +} diff --git a/terraform/modules/application/routes.tf b/terraform/modules/application/routes.tf new file mode 100755 index 00000000..f240b628 --- /dev/null +++ b/terraform/modules/application/routes.tf @@ -0,0 +1,34 @@ +resource "cloudfoundry_route" "external" { + for_each = { for key, value in try(var.env.apps, {}) : key => value + if value.public_route && try(value.port, -1) != -1 + } + + domain = var.cloudfoundry.domain_external.id + #space = var.cloudfoundry.space.id + space = try(var.cloudfoundry.spaces[each.value.space].id, var.cloudfoundry.space.id) + hostname = format(var.env.name_pattern, each.key) + port = try(cloudfoundry_app.this[each.key].port, null) + + target { + app = cloudfoundry_app.this[each.key].id + port = 0 + } +} + +resource "cloudfoundry_route" "internal" { + for_each = { + for key, value in try(var.env.apps, {}) : key => value + if !value.public_route && try(value.port, -1) != -1 + } + + domain = var.cloudfoundry.domain_internal.id + #space = var.cloudfoundry.space.id + space = try(var.cloudfoundry.spaces[each.value.space].id, var.cloudfoundry.space.id) + hostname = format(var.env.name_pattern, each.key) + port = try(cloudfoundry_app.this[each.key].port, null) + + target { + app = cloudfoundry_app.this[each.key].id + port = 0 + } +} diff --git a/terraform/modules/application/variables.tf b/terraform/modules/application/variables.tf new file mode 100755 index 00000000..3a4649cc --- /dev/null +++ b/terraform/modules/application/variables.tf @@ -0,0 +1,236 @@ +variable "cloudfoundry" { + description = "Cloudfoundry settings." + type = object( + { + domain_external = object( + { + domain = string + id = string + internal = bool + name = string + org = string + sub_domain = string + } + ) + domain_internal = object( + { + domain = string + id = string + internal = bool + name = string + org = string + sub_domain = string + } + ) + external_applications = optional( + map( + object( + { + name = string + environement = string + port = optional(number, 61443) + } + ) + ),{} + ) + organization = object( + { + annotations = map(string) + id = string + labels = map(string) + name = string + } + ) + services = map( + object( + { + id = string + name = string + service_broker_guid = string + service_broker_name = string + service_plans = map(string) + space = string + } + ) + ) + space = object( + { + annotations = map(string) + id = string + labels = map(string) + name = string + org = string + org_name = string + quota = string + } + ) + } + ) +} + +variable "env" { + description = "The settings object for this environment." + type = object({ + api_url = optional(string, "https://api.fr.cloud.gov") + apps = optional( + map( + object({ + allow_egress = optional(bool, true) + buildpacks = list(string) + command = optional(string, "entrypoint.sh") + disk_quota = optional(number, 1024) + enable_ssh = optional(bool, false) + environment = optional(map(string), {}) + health_check_timeout = optional(number, 180) + health_check_type = optional(string, "port") + instances = optional(number, 1) + labels = optional(map(string), {}) + memory = optional(number, 96) + network_policies = optional(map(number),{}) + port = optional(number, -1) + public_route = optional(bool, false) + services_external = optional(list(string), []) + space = optional(string ,null) + source = optional(string, null) + stopped = optional(bool, false) + templates = optional(list(map(string)), []) + }) + ), {} + ) + bootstrap_workspace = optional(string, "bootstrap") + defaults = object( + { + disk_quota = optional(number, 2048) + enable_ssh = optional(bool, true) + health_check_timeout = optional(number, 60) + health_check_type = optional(string, "port") + instances = optional(number, 1) + memory = optional(number, 64) + port = optional(number, 8080) + stack = optional(string, "cflinuxfs4") + stopped = optional(bool, false) + strategy = optional(string, "none") + timeout = optional(number, 300) + } + ) + external_applications = optional( + map( + object({ + enable_ssh = optional(bool, false) + instances = optional(number, 1) + memory = optional(number, 96) + port = optional(number, 61443) + }) + ), {} + ) + external_domain = optional(string, "app.cloud.gov") + internal_domain = optional(string, "apps.internal") + name_pattern = string + organization = optional(string, "gsa-tts-usagov") + passwords = optional( + map( + object( + { + experation_days = optional(number, 0) + length = number + lower = optional(bool, false) + min_lower = optional(number, 0) + min_numeric = optional(number, 0) + min_special = optional(number, 0) + min_upper = optional(number, 0) + numeric = optional(bool, true) + override_special = optional(string, "!@#$%&*()-_=+[]{}<>:?") + special = optional(bool, true) + upper = optional(bool, true) + } + ) + ), {} + ) + project = string + secrets = optional( + map( + object( + { + encrypted = bool + key = string + } + ) + ), {} + ) + services = optional( + map( + object( + { + applications = optional(list(string), []) + environement = optional(string, "dev") + service_key = optional(bool, true) + service_plan = optional(string, "basic") + service_type = optional(string, "s3") + tags = optional(list(string), []) + } + ) + ), {} + ) + space = string + }) +} + +variable "secrets" { + description = "Sensitive credentials to be used to set application environmental variables." + type = map(string) + default = {} +} + +variable "services" { + description = "Services generated from the service module." + type = object( + { + instance = map( + object( + { + annotations = optional(string, null) + id = optional(string, null) + json_params = optional(string, null) + labels = optional(map(string), {}) + name = optional(string, null) + recursive_delete = optional(bool, null) + replace_on_params_change = optional(bool, false) + replace_on_service_plan_change = optional(bool, false) + service_plan = optional(string, null) + space = optional(string, null) + tags = optional(list(string), null) + } + ) + ) + user_provided = map( + object( + { + annotations = optional(string, null) + id = optional(string, null) + json_params = optional(string, null) + labels = optional(map(string), {}) + name = optional(string, null) + recursive_delete = optional(bool, null) + replace_on_params_change = optional(bool, false) + replace_on_service_plan_change = optional(bool, false) + service_plan = optional(string, null) + space = optional(string, null) + tags = optional(list(string), null) + } + ) + ) + service_key = map( + object( + { + name = optional(string, null) + service_instance = optional(string, null) + params = optional(map(string), null) + params_json = optional(string, null) + credentials = optional(map(string), {}) + } + ) + ) + } + ) + default = null +} \ No newline at end of file diff --git a/terraform/modules/circleci/.terraform-docs.yaml b/terraform/modules/circleci/.terraform-docs.yaml new file mode 100755 index 00000000..b480122f --- /dev/null +++ b/terraform/modules/circleci/.terraform-docs.yaml @@ -0,0 +1,2 @@ +header-from: .terraform-docs/header.md +footer-from: .terraform-docs/footer.md diff --git a/terraform/modules/circleci/.terraform-docs/footer.md b/terraform/modules/circleci/.terraform-docs/footer.md new file mode 100755 index 00000000..84137580 --- /dev/null +++ b/terraform/modules/circleci/.terraform-docs/footer.md @@ -0,0 +1,12 @@ +## Examples + +```terraform +module "circleci" { + source = "./modules/circleci" + + env = local.env + services = local.services + secrets = local.secrets + schedules = local.env.circleci.schedules +} +``` diff --git a/terraform/modules/circleci/.terraform-docs/header.md b/terraform/modules/circleci/.terraform-docs/header.md new file mode 100755 index 00000000..c0e2e473 --- /dev/null +++ b/terraform/modules/circleci/.terraform-docs/header.md @@ -0,0 +1,7 @@ +# CircleCI Module + +## Introduction + +This terraform module creates and sets CircleCI project/context variables and scheduled (cron-like) pipelines. + +** NOTE: Unless specific permissions are granted to the GSA project, the project won't have access to contexts. \ No newline at end of file diff --git a/terraform/modules/circleci/README.md b/terraform/modules/circleci/README.md new file mode 100644 index 00000000..b0aa4ea8 --- /dev/null +++ b/terraform/modules/circleci/README.md @@ -0,0 +1,60 @@ + +# CircleCI Module + +## Introduction + +This terraform module creates and sets CircleCI project/context variables and scheduled (cron-like) pipelines. + +** NOTE: Unless specific permissions are granted to the GSA project, the project won't have access to contexts. + +## Requirements + +| Name | Version | +|------|---------| +| [circleci](#requirement\_circleci) | 0.8.2 | + +## Providers + +| Name | Version | +|------|---------| +| [circleci](#provider\_circleci) | 0.8.2 | + +## Modules + +No modules. + +## Resources + +| Name | Type | +|------|------| +| [circleci_context.this](https://registry.terraform.io/providers/healx/circleci/0.8.2/docs/resources/context) | resource | +| [circleci_context_environment_variable.this](https://registry.terraform.io/providers/healx/circleci/0.8.2/docs/resources/context_environment_variable) | resource | +| [circleci_environment_variable.this](https://registry.terraform.io/providers/healx/circleci/0.8.2/docs/resources/environment_variable) | resource | +| [circleci_schedule.schedule](https://registry.terraform.io/providers/healx/circleci/0.8.2/docs/resources/schedule) | resource | +| [circleci_context.this](https://registry.terraform.io/providers/healx/circleci/0.8.2/docs/data-sources/context) | data source | + +## Inputs + +| Name | Description | Type | Default | Required | +|------|-------------|------|---------|:--------:| +| [context\_name](#input\_context\_name) | The CircleCI context to add variables to. | `string` | `null` | no | +| [env](#input\_env) | Project environmental variables. | `any` | n/a | yes | +| [schedules](#input\_schedules) | Set a scheduled pipeline. | `any` | `{}` | no | +| [secrets](#input\_secrets) | Sensitive credentials to be used with the application. | `map(string)` | `{}` | no | + +## Outputs + +No outputs. + +## Examples + +```terraform +module "circleci" { + source = "./modules/circleci" + env = local.env + services = local.services + secrets = local.secrets + schedules = local.env.circleci.schedules +} +``` + \ No newline at end of file diff --git a/terraform/modules/circleci/main.tf b/terraform/modules/circleci/main.tf new file mode 100755 index 00000000..d777b9b9 --- /dev/null +++ b/terraform/modules/circleci/main.tf @@ -0,0 +1,92 @@ +locals { + + ## Used as an "alias" so when 'hours_of_day' is set to '["*"]', it will replace it with every hour in a day. + every_hour = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23] + + ## Used as an "alias" so when 'days_of_week' is set to '["*"]', it will replace it with every day in a week. + every_day = ["MON", "TUE", "WED", "THU", "FRI", "SAT", "SUN"] + + ## Is the current workspace the 'bootstrap' environment? + is_bootstrap = terraform.workspace == var.env.bootstrap_workspace ? true : false + + ## Is the current workspace the 'dmz' environment? + is_dmz = terraform.workspace == "dmz" ? true : false + + ## Ignore 'bootstrap' and 'dmz' workspaces. + is_special_space = local.is_bootstrap || local.is_dmz ? true : false + + ## A map of secrets filtered through the list of CircleCI variables. + /* circleci_variables = merge( + flatten([ + for value in try(var.env.circleci_variables, []) : [ + { + "${value}" = "${var.secrets["${value}"]}" + } + ] + ]) + ...) */ +} + +## Get the context if 'var.context_name' is set and we aren't in the bootstrap environment. +## NOTE: The context gets created in the 'bootstrap' environment. +data "circleci_context" "this" { + count = try(var.context_name, null) != null && terraform.workspace != var.env.bootstrap_workspace ? 1 : 0 + name = var.context_name +} + +# Create the context if 'var.context' is set and we are in the bootstrap environment. +resource "circleci_context" "this" { + count = try(var.context_name, null) != null && terraform.workspace == var.env.bootstrap_workspace ? 1 : 0 + organization = try(var.env.circleci.organization, null) + name = var.context_name +} + +## Creates a new environmental variable that is assigned to a context, if 'var.context_name' is set. +resource "circleci_context_environment_variable" "this" { + for_each = { + for key, value in try(var.env.circleci_variables, []) : value => value + if var.context_name != null + } + + context_id = try(data.circleci_context.this[0].id, circleci_context.this[0].id) + variable = local.is_special_space ? format("%s", each.key) : format("%s_%s", terraform.workspace, each.key) + value = var.secrets[each.key] +} + +## Creates a new environmental that is assigned to the project, if 'var.context_name' is NOT set. +resource "circleci_environment_variable" "this" { + for_each = { + for key, value in try(var.env.circleci_variables, []) : value => value + if var.context_name == null + } + + project = var.env.circleci.project + name = local.is_special_space ? format("%s", each.key) : format("%s_%s", terraform.workspace, each.key) + value = var.secrets[each.key] +} + +## Creates a scheduled pipeline, which runs at reoccuring times. +resource "circleci_schedule" "schedule" { + for_each = { + for key, value in var.schedules : key => value + if !contains(value.ignore_workspace, terraform.workspace) + } + + name = format(var.env.name_pattern, each.key) + organization = try(var.env.circleci.organization, null) + project = var.env.circleci.project + description = try(each.value.description, null) + per_hour = try(each.value.per_hour, 1) + hours_of_day = try( + each.value.hours_of_day[0] == "*" ? local.every_hour : try(each.value.hours_of_day, + [9,23]) + ) + days_of_week = try( + each.value.days_of_week[0] == "*" ? local.every_day : try(each.value.days_of_week, + ["MON", "TUES"]) + ) + use_scheduling_system = try(each.value.scheduling_system, true) + parameters_json = jsonencode( + try(each.value.parameters, {}) + ) +} diff --git a/terraform/modules/circleci/providers.tf b/terraform/modules/circleci/providers.tf new file mode 100644 index 00000000..3c44cb3e --- /dev/null +++ b/terraform/modules/circleci/providers.tf @@ -0,0 +1,9 @@ +terraform { + required_providers { + circleci = { + source = "healx/circleci" + version = "0.8.2" + } + } + required_version = "> 1.7" +} diff --git a/terraform/modules/circleci/variables.tf b/terraform/modules/circleci/variables.tf new file mode 100755 index 00000000..f4eef240 --- /dev/null +++ b/terraform/modules/circleci/variables.tf @@ -0,0 +1,22 @@ +variable "context_name" { + description = "The CircleCI context to add variables to." + type = string + default = null +} + +variable "env" { + description = "Project environmental variables." + type = any +} + +variable "schedules" { + description = "Set a scheduled pipeline." + type = any + default = {} +} + +variable "secrets" { + description = "Sensitive credentials to be used with the application." + type = map(string) + default = {} +} diff --git a/terraform/modules/github/.terraform-docs.yaml b/terraform/modules/github/.terraform-docs.yaml new file mode 100755 index 00000000..b480122f --- /dev/null +++ b/terraform/modules/github/.terraform-docs.yaml @@ -0,0 +1,2 @@ +header-from: .terraform-docs/header.md +footer-from: .terraform-docs/footer.md diff --git a/terraform/modules/github/.terraform-docs/footer.md b/terraform/modules/github/.terraform-docs/footer.md new file mode 100755 index 00000000..e69de29b diff --git a/terraform/modules/github/.terraform-docs/header.md b/terraform/modules/github/.terraform-docs/header.md new file mode 100755 index 00000000..70ad68af --- /dev/null +++ b/terraform/modules/github/.terraform-docs/header.md @@ -0,0 +1 @@ +# Github Secrets and Variables diff --git a/terraform/modules/github/README.md b/terraform/modules/github/README.md new file mode 100644 index 00000000..82bd599a --- /dev/null +++ b/terraform/modules/github/README.md @@ -0,0 +1,42 @@ + +# Github Secrets and Variables + +## Requirements + +| Name | Version | +|------|---------| +| [terraform](#requirement\_terraform) | > 1.7 | +| [github](#requirement\_github) | ~> 6.0 | + +## Providers + +| Name | Version | +|------|---------| +| [github](#provider\_github) | ~> 6.0 | + +## Modules + +No modules. + +## Resources + +| Name | Type | +|------|------| +| [github_actions_secret.this](https://registry.terraform.io/providers/integrations/github/latest/docs/resources/actions_secret) | resource | +| [github_actions_variable.this](https://registry.terraform.io/providers/integrations/github/latest/docs/resources/actions_variable) | resource | +| [github_repository.this](https://registry.terraform.io/providers/integrations/github/latest/docs/data-sources/repository) | data source | + +## Inputs + +| Name | Description | Type | Default | Required | +|------|-------------|------|---------|:--------:| +| [env](#input\_env) | The settings object for this environment. |
object({
api_url = optional(string, "https://api.fr.cloud.gov")
apps = optional(
map(
object({
allow_egress = optional(bool, true)
buildpacks = list(string)
command = optional(string, "entrypoint.sh")
disk_quota = optional(number, 1024)
enable_ssh = optional(bool, false)
environment = optional(map(string), {})
health_check_timeout = optional(number, 180)
health_check_type = optional(string, "port")
instances = optional(number, 1)
labels = optional(map(string), {})
memory = optional(number, 96)
network_policies = optional(map(number),{})
port = optional(number, 80)
public_route = optional(bool, false)
source = optional(string, null)
templates = list(map(string))
})
), {}
)
bootstrap_workspace = optional(string, "bootstrap")
defaults = object(
{
disk_quota = optional(number, 2048)
enable_ssh = optional(bool, true)
health_check_timeout = optional(number, 60)
health_check_type = optional(string, "port")
instances = optional(number, 1)
memory = optional(number, 64)
port = optional(number, 8080)
stack = optional(string, "cflinuxfs4")
stopped = optional(bool, false)
strategy = optional(string, "none")
timeout = optional(number, 300)
}
)
external_applications = optional(
map(
object(
{
name = string
environement = string
port = optional(number, 61443)
}
)
),{}
)
external_domain = optional(string, "app.cloud.gov")
internal_domain = optional(string, "apps.internal")
name_pattern = string
organization = optional(string, "gsa-tts-usagov")
passwords = optional(
list(
object(
{
length = optional(number, 32)
}
)
), []
)
project = string
secrets = optional(
map(
object(
{
encrypted = bool
key = string
}
)
), {}
)
services = optional(
map(
object(
{
applications = optional(list(string), [])
environement = optional(string, "dev")
service_key = optional(bool, true)
service_plan = optional(string, "basic")
service_type = optional(string, "s3")
tags = optional(list(string), [])
}
)
), {}
)
space = string
})
| n/a | yes | +| [github\_organization](#input\_github\_organization) | The organization to use with GitHub. | `string` | `"GSA"` | no | +| [github\_token](#input\_github\_token) | The token used authenticate with GitHub. | `string` | n/a | yes | +| [repository](#input\_repository) | The GitHub respository. | `string` | n/a | yes | +| [secrets](#input\_secrets) | Secrets to create in the respository. | `map(string)` | `{}` | no | + +## Outputs + +No outputs. + \ No newline at end of file diff --git a/terraform/modules/github/main.tf b/terraform/modules/github/main.tf new file mode 100644 index 00000000..d26c8242 --- /dev/null +++ b/terraform/modules/github/main.tf @@ -0,0 +1,18 @@ +data "github_repository" "this" { + full_name = var.repository +} + +resource "github_actions_secret" "this" { + for_each = { for key, value in try(var.env.secrets, []) : key => value } + repository = data.github_repository.this.name + secret_name = each.key + plaintext_value = !try(each.value.encrypted, false) ? try(var.secrets[each.value.key], null) : null + encrypted_value = try(each.value.encrypted, false) ? try(var.secrets[each.value.key], null) : null +} + +resource "github_actions_variable" "this" { + for_each = { for key, value in try(var.variables, []) : key => value } + repository = data.github_repository.this.name + variable_name = each.key + value = each.value +} diff --git a/terraform/modules/github/provider.tf b/terraform/modules/github/provider.tf new file mode 100644 index 00000000..a4f1bf27 --- /dev/null +++ b/terraform/modules/github/provider.tf @@ -0,0 +1,15 @@ +terraform { + required_providers { + github = { + source = "integrations/github" + version = "~> 6.0" + } + } + required_version = "> 1.7" +} + +# Configure the GitHub Provider +provider "github" { + owner = var.github_organization + token = var.github_token +} \ No newline at end of file diff --git a/terraform/modules/github/variables.tf b/terraform/modules/github/variables.tf new file mode 100644 index 00000000..b4fd4c50 --- /dev/null +++ b/terraform/modules/github/variables.tf @@ -0,0 +1,132 @@ +variable "env" { + description = "The settings object for this environment." + type = object({ + api_url = optional(string, "https://api.fr.cloud.gov") + apps = optional( + map( + object({ + allow_egress = optional(bool, true) + buildpacks = list(string) + command = optional(string, "entrypoint.sh") + disk_quota = optional(number, 1024) + enable_ssh = optional(bool, false) + environment = optional(map(string), {}) + health_check_timeout = optional(number, 180) + health_check_type = optional(string, "port") + instances = optional(number, 1) + labels = optional(map(string), {}) + memory = optional(number, 96) + network_policies = optional(map(number),{}) + port = optional(number, 80) + public_route = optional(bool, false) + source = optional(string, null) + templates = list(map(string)) + }) + ), {} + ) + bootstrap_workspace = optional(string, "bootstrap") + defaults = object( + { + disk_quota = optional(number, 2048) + enable_ssh = optional(bool, true) + health_check_timeout = optional(number, 60) + health_check_type = optional(string, "port") + instances = optional(number, 1) + memory = optional(number, 64) + port = optional(number, 8080) + stack = optional(string, "cflinuxfs4") + stopped = optional(bool, false) + strategy = optional(string, "none") + timeout = optional(number, 300) + } + ) + external_applications = optional( + map( + object( + { + name = string + environement = string + port = optional(number, 61443) + } + ) + ),{} + ) + external_domain = optional(string, "app.cloud.gov") + internal_domain = optional(string, "apps.internal") + name_pattern = string + organization = optional(string, "gsa-tts-usagov") + passwords = optional( + map( + object( + { + experation_days = optional(number, 0) + length = number + lower = optional(bool, false) + min_lower = optional(number, 0) + min_numeric = optional(number, 0) + min_special = optional(number, 0) + min_upper = optional(number, 0) + numeric = optional(bool, true) + override_special = optional(string, "!@#$%&*()-_=+[]{}<>:?") + special = optional(bool, true) + upper = optional(bool, true) + } + ) + ), {} + ) + project = string + secrets = optional( + map( + object( + { + encrypted = bool + key = string + } + ) + ), {} + ) + services = optional( + map( + object( + { + applications = optional(list(string), []) + environement = optional(string, "dev") + service_key = optional(bool, true) + service_plan = optional(string, "basic") + service_type = optional(string, "s3") + tags = optional(list(string), []) + } + ) + ), {} + ) + space = string + }) +} + +variable "github_organization" { + description = "The organization to use with GitHub." + type = string + default = "GSA" +} +variable "github_token" { + description = "The token used authenticate with GitHub." + type = string + sensitive = true +} + +variable "repository" { + description = "The GitHub respository." + type = string +} + +variable "secrets" { + default = {} + description = "Secrets to create in the respository." + type = map(string) +} + +variable "variables" { + default = {} + description = "Variables to create in the respository." + type = map(string) +} diff --git a/terraform/modules/random/.terraform-docs.yaml b/terraform/modules/random/.terraform-docs.yaml new file mode 100755 index 00000000..b480122f --- /dev/null +++ b/terraform/modules/random/.terraform-docs.yaml @@ -0,0 +1,2 @@ +header-from: .terraform-docs/header.md +footer-from: .terraform-docs/footer.md diff --git a/terraform/modules/random/.terraform-docs/footer.md b/terraform/modules/random/.terraform-docs/footer.md new file mode 100755 index 00000000..794502f2 --- /dev/null +++ b/terraform/modules/random/.terraform-docs/footer.md @@ -0,0 +1,55 @@ +## Example + +```terraform +module "random" { + source = "./modules/random" + + names = ["dev", "stage", "prod"] + passwords = local.env.passwords +} +``` + + +## Usage + +### locals.tf + +Passwords to be generated are set in `local.env.passwords`. + +```terraform +locals { + env = { + ... + workspace_name = { + ... + passwords = { + password1 = { + length = 16 + special = false + } + } + } + } +} +``` + +If the attribute `per_workspace` is set for `true`, then `multiple` resources will be created. It will prefix each resource name with each workspace name. It is useful to set this in the `bootstrap` "environment", allowing the passwords to be added as pipeline variables for each environment. + +```terraform +locals { + env = { + ... + bootstrap = { + ... + passwords = { + password2 = { + length = 32 + per_workspace = true + } + } + } + } +} +``` + +If the `per_workspace` value isn't set or is `false`, only `single` resource will be created. diff --git a/terraform/modules/random/.terraform-docs/header.md b/terraform/modules/random/.terraform-docs/header.md new file mode 100755 index 00000000..330c132f --- /dev/null +++ b/terraform/modules/random/.terraform-docs/header.md @@ -0,0 +1,5 @@ +# Random Module + +## Introduction + +This module generates random credentials and hashes that can be used in various applications. \ No newline at end of file diff --git a/terraform/modules/random/README.md b/terraform/modules/random/README.md new file mode 100644 index 00000000..2f99e8a5 --- /dev/null +++ b/terraform/modules/random/README.md @@ -0,0 +1,102 @@ + +# Random Module + +## Introduction + +This module generates random credentials and hashes that can be used in various applications. + +## Requirements + +No requirements. + +## Providers + +| Name | Version | +|------|---------| +| [random](#provider\_random) | n/a | +| [time](#provider\_time) | n/a | + +## Modules + +No modules. + +## Resources + +| Name | Type | +|------|------| +| [random_password.multiple](https://registry.terraform.io/providers/hashicorp/random/latest/docs/resources/password) | resource | +| [random_password.single](https://registry.terraform.io/providers/hashicorp/random/latest/docs/resources/password) | resource | +| [time_rotating.multiple](https://registry.terraform.io/providers/hashicorp/time/latest/docs/resources/rotating) | resource | +| [time_rotating.single](https://registry.terraform.io/providers/hashicorp/time/latest/docs/resources/rotating) | resource | +| [time_static.multiple](https://registry.terraform.io/providers/hashicorp/time/latest/docs/resources/static) | resource | +| [time_static.single](https://registry.terraform.io/providers/hashicorp/time/latest/docs/resources/static) | resource | + +## Inputs + +| Name | Description | Type | Default | Required | +|------|-------------|------|---------|:--------:| +| [names](#input\_names) | List of unique names for the multiple resources. | `list(string)` | `[]` | no | +| [passwords](#input\_passwords) | A map of objects with password settings. |
map(
object(
{
experation_days = optional(number, 0)
length = number
lower = optional(bool, false)
min_lower = optional(number, 0)
min_numeric = optional(number, 0)
min_special = optional(number, 0)
min_upper = optional(number, 0)
numeric = optional(bool, true)
override_special = optional(string, "!@#$%&*()-_=+[]{}<>:?")
special = optional(bool, true)
upper = optional(bool, true)
}
)
)
| n/a | yes | +| [per\_workspace](#input\_per\_workspace) | Generate a password for each workspace. | `bool` | `false` | no | + +## Outputs + +| Name | Description | +|------|-------------| +| [results](#output\_results) | A map(string) with the following attributes: result, md5, sha1sha256, and sha512. | + +## Example + +```terraform +module "random" { + source = "./modules/random" + + names = ["dev", "stage", "prod"] + passwords = local.env.passwords +} +``` + +## Usage + +### locals.tf + +Passwords to be generated are set in `local.env.passwords`. + +```terraform +locals { + env = { + ... + workspace_name = { + ... + passwords = { + password1 = { + length = 16 + special = false + } + } + } + } +} +``` + +If the attribute `per_workspace` is set for `true`, then `multiple` resources will be created. It will prefix each resource name with each workspace name. It is useful to set this in the `bootstrap` "environment", allowing the passwords to be added as pipeline variables for each environment. + +```terraform +locals { + env = { + ... + bootstrap = { + ... + passwords = { + password2 = { + length = 32 + per_workspace = true + } + } + } + } +} +``` + +If the `per_workspace` value isn't set or is `false`, only `single` resource will be created. + \ No newline at end of file diff --git a/terraform/modules/random/main.tf b/terraform/modules/random/main.tf new file mode 100755 index 00000000..0c63e241 --- /dev/null +++ b/terraform/modules/random/main.tf @@ -0,0 +1,85 @@ +locals { + passwords = merge( + flatten([ + for name in var.names : [ + for key, value in var.passwords : { + "${name}_${key}" = value + } if try(value.per_workspace, false) + ] + ]) + ...) +} + +resource "time_rotating" "single" { + for_each = { + for key, value in var.passwords : key => value + if try(value.expiration, 0) > 0 + } + rotation_days = each.value.expiration_days +} + +resource "time_static" "single" { + for_each = { + for key, value in var.passwords : key => value + if !var.per_workspace && try(value.expiration, 0) == 0 + } +} + +resource "random_password" "single" { + for_each = { + for key, value in var.passwords : key => value + if !var.per_workspace + } + + length = try(each.value.length, 16) + lower = try(each.value.lower, true) + min_lower = try(each.value.min_lower, 0) + min_numeric = try(each.value.min_numeric, 0) + min_special = try(each.value.min_special, 0) + numeric = try(each.value.numeric, true) + override_special = try(each.value.override_special, "!#$%&*()-_=+[]{}<>:?") + special = try(each.value.special, false) + upper = try(each.value.upper, true) + + + keepers = { + id = try(time_rotating.single[each.key].id, null) != null ? time_rotating.single[each.key].id : time_static.single[each.key].id + } +} + +resource "time_rotating" "multiple" { + for_each = { + for key, value in local.passwords : key => value + if var.per_workspace && try(value.expiration, 0) > 0 + } + rotation_days = each.value.expiration_days +} + +resource "time_static" "multiple" { + for_each = { + for key, value in local.passwords : key => value + if var.per_workspace && try(value.expiration, 0) == 0 + } +} + +resource "random_password" "multiple" { + for_each = { + for key, value in local.passwords : key => value + if var.per_workspace + } + + length = try(each.value.length, 16) + lower = try(each.value.lower, true) + min_lower = try(each.value.min_lower, 0) + min_numeric = try(each.value.min_numeric, 0) + min_special = try(each.value.min_special, 0) + numeric = try(each.value.numeric, true) + override_special = try(each.value.override_special, "!#$%&*()-_=+[]{}<>:?") + special = try(each.value.special, false) + upper = try(each.value.upper, true) + + keepers = { + id = try(time_rotating.multiple[each.key].id, null) != null ? time_rotating.multiple[each.key].id : time_static.multiple[each.key].id + } +} + diff --git a/terraform/modules/random/outputs.tf b/terraform/modules/random/outputs.tf new file mode 100755 index 00000000..9752a4fc --- /dev/null +++ b/terraform/modules/random/outputs.tf @@ -0,0 +1,31 @@ +output "results" { + description = "A map(string) with the following attributes: result, md5, sha1sha256, and sha512." + value = merge( + merge( + flatten([ + for key, value in var.passwords : { + "${key}" = { + md5 = md5(random_password.single[key].result) + result = random_password.single[key].result + sha1 = sha1(random_password.single[key].result) + sha256 = sha256(random_password.single[key].result) + sha512 = sha512(random_password.single[key].result) + } + } if !var.per_workspace + ]) + ...), + merge( + flatten([ + for key, value in local.passwords : { + "${key}" = { + md5 = md5(random_password.multiple[key].result) + result = random_password.multiple[key].result + sha1 = sha1(random_password.multiple[key].result) + sha256 = sha256(random_password.multiple[key].result) + sha512 = sha512(random_password.multiple[key].result) + } + } if var.per_workspace + ]) + ...) + ) +} \ No newline at end of file diff --git a/terraform/modules/random/variables.tf b/terraform/modules/random/variables.tf new file mode 100755 index 00000000..337e6a84 --- /dev/null +++ b/terraform/modules/random/variables.tf @@ -0,0 +1,32 @@ +variable "names" { + type = list(string) + description = "List of unique names for the multiple resources." + default = [] +} + +variable "passwords" { + description = "A map of objects with password settings." + type = map( + object( + { + experation_days = optional(number, 0) + length = number + lower = optional(bool, false) + min_lower = optional(number, 0) + min_numeric = optional(number, 0) + min_special = optional(number, 0) + min_upper = optional(number, 0) + numeric = optional(bool, true) + override_special = optional(string, "!@#$%&*()-_=+[]{}<>:?") + special = optional(bool, true) + upper = optional(bool, true) + } + ) + ) +} + +variable "per_workspace" { + type = bool + description = "Generate a password for each workspace." + default = false +} \ No newline at end of file diff --git a/terraform/modules/service/.terraform-docs.yaml b/terraform/modules/service/.terraform-docs.yaml new file mode 100755 index 00000000..b480122f --- /dev/null +++ b/terraform/modules/service/.terraform-docs.yaml @@ -0,0 +1,2 @@ +header-from: .terraform-docs/header.md +footer-from: .terraform-docs/footer.md diff --git a/terraform/modules/service/.terraform-docs/footer.md b/terraform/modules/service/.terraform-docs/footer.md new file mode 100755 index 00000000..c5a416d6 --- /dev/null +++ b/terraform/modules/service/.terraform-docs/footer.md @@ -0,0 +1,42 @@ +## Examples + +### Basic +```terraform +module "services" { + source = "./modules/service" + + cloudfoundry = local.cloudfoundry + env = local.env +} +``` + +### Advanced + +This advanced example will first generate service instances, such as RDS, along with other defined services, except for the `user defined` services. `User defined` services are useful for providing variables at runtime to applications. The issue is that until a service, such as RDS is deployed, their isn't a username and password created for that instance. + +The first step is to initalize any services that are not `user defined`, but setting `skip_user_provided_services` to `true`. + +```terraform +module "services" { + source = "./modules/service" + + cloudfoundry = local.cloudfoundry + env = local.env + + skip_user_provided_services = true +} +``` + +After the services are generated, another module block can be defined, which will pass a merged `map(string)` called `secrets`, that have the various information that is to be added to the `user defined` service. Setting the `skip_service_instances` to `true` will prevent the module from trying to redploy any non `user defined` service. + +```terraform +module "secrets" { + source = "./modules/service" + + cloudfoundry = local.cloudfoundry + env = local.env + + secrets = local.secrets + skip_service_instances = true +} +``` \ No newline at end of file diff --git a/terraform/modules/service/.terraform-docs/header.md b/terraform/modules/service/.terraform-docs/header.md new file mode 100755 index 00000000..b8adab08 --- /dev/null +++ b/terraform/modules/service/.terraform-docs/header.md @@ -0,0 +1 @@ +# CloudFoundry Service Module diff --git a/terraform/modules/service/README.md b/terraform/modules/service/README.md new file mode 100644 index 00000000..a212a6c4 --- /dev/null +++ b/terraform/modules/service/README.md @@ -0,0 +1,87 @@ + +# CloudFoundry Service Module + +## Requirements + +| Name | Version | +|------|---------| +| [terraform](#requirement\_terraform) | > 1.7 | +| [cloudfoundry](#requirement\_cloudfoundry) | ~> 0.5 | + +## Providers + +| Name | Version | +|------|---------| +| [cloudfoundry](#provider\_cloudfoundry) | ~> 0.5 | + +## Modules + +No modules. + +## Resources + +| Name | Type | +|------|------| +| [cloudfoundry_service_instance.this](https://registry.terraform.io/providers/cloudfoundry-community/cloudfoundry/latest/docs/resources/service_instance) | resource | +| [cloudfoundry_service_key.this](https://registry.terraform.io/providers/cloudfoundry-community/cloudfoundry/latest/docs/resources/service_key) | resource | +| [cloudfoundry_user_provided_service.this](https://registry.terraform.io/providers/cloudfoundry-community/cloudfoundry/latest/docs/resources/user_provided_service) | resource | + +## Inputs + +| Name | Description | Type | Default | Required | +|------|-------------|------|---------|:--------:| +| [cloudfoundry](#input\_cloudfoundry) | Cloudfoundry settings. |
object(
{
domain_external = object(
{
domain = string
id = string
internal = bool
name = string
org = string
sub_domain = string
}
)
domain_internal = object(
{
domain = string
id = string
internal = bool
name = string
org = string
sub_domain = string
}
)
external_applications = map(string)
organization = object(
{
annotations = map(string)
id = string
labels = map(string)
name = string
}
)
services = map(
object(
{
id = string
name = string
service_broker_guid = string
service_broker_name = string
service_plans = map(string)
space = string
}
)
)
space = object(
{
annotations = map(string)
id = string
labels = map(string)
name = string
org = string
org_name = string
quota = string
}
)
}
)
| n/a | yes | +| [env](#input\_env) | The settings object for this environment. |
object({
api_url = optional(string, "https://api.fr.cloud.gov")
apps = optional(
map(
object({
allow_egress = optional(bool, true)
buildpacks = list(string)
command = optional(string, "entrypoint.sh")
disk_quota = optional(number, 1024)
enable_ssh = optional(bool, false)
environment = optional(map(string), {})
health_check_timeout = optional(number, 180)
health_check_type = optional(string, "port")
instances = optional(number, 1)
labels = optional(map(string), {})
memory = optional(number, 96)
network_policies = optional(map(number),{})
port = optional(number, 80)
public_route = optional(bool, false)
source = optional(string, null)
templates = list(map(string))
})
), {}
)
bootstrap_workspace = optional(string, "bootstrap")
defaults = object(
{
disk_quota = optional(number, 2048)
enable_ssh = optional(bool, true)
health_check_timeout = optional(number, 60)
health_check_type = optional(string, "port")
instances = optional(number, 1)
memory = optional(number, 64)
port = optional(number, 8080)
stack = optional(string, "cflinuxfs4")
stopped = optional(bool, false)
strategy = optional(string, "none")
timeout = optional(number, 300)
}
)
external_applications = optional(
map(
object(
{
environement = string
port = optional(number, 61443)
}
)
),{}
)
external_domain = optional(string, "app.cloud.gov")
internal_domain = optional(string, "apps.internal")
name_pattern = string
organization = optional(string, "gsa-tts-usagov")
passwords = optional(
list(
object(
{
length = optional(number, 32)
}
)
), []
)
project = string
secrets = optional(
map(
object(
{
encrypted = bool
key = string
}
)
), {}
)
services = optional(
map(
object(
{
applications = optional(list(string), [])
environement = optional(string, "dev")
service_key = optional(bool, true)
service_plan = optional(string, "basic")
service_type = optional(string, "s3")
tags = optional(list(string), [])
}
)
), {}
)
space = string
})
| n/a | yes | +| [secrets](#input\_secrets) | Sensitive strings to be added to the apps environmental variables. | `map` | `{}` | no | +| [skip\_service\_instances](#input\_skip\_service\_instances) | Allows the skipping of service instances. Useful to inject service secrets into a user provided secret. | `bool` | `false` | no | +| [skip\_user\_provided\_services](#input\_skip\_user\_provided\_services) | Allows the skipping of user provided services. Useful to inject service secrets into a user provided secret. | `bool` | `false` | no | + +## Outputs + +| Name | Description | +|------|-------------| +| [results](#output\_results) | n/a | + +## Examples + +### Basic +```terraform +module "services" { + source = "./modules/service" + + cloudfoundry = local.cloudfoundry + env = local.env +} +``` + +### Advanced + +This advanced example will first generate service instances, such as RDS, along with other defined services, except for the `user defined` services. `User defined` services are useful for providing variables at runtime to applications. The issue is that until a service, such as RDS is deployed, their isn't a username and password created for that instance. + +The first step is to initalize any services that are not `user defined`, but setting `skip_user_provided_services` to `true`. + +```terraform +module "services" { + source = "./modules/service" + + cloudfoundry = local.cloudfoundry + env = local.env + + skip_user_provided_services = true +} +``` + +After the services are generated, another module block can be defined, which will pass a merged `map(string)` called `secrets`, that have the various information that is to be added to the `user defined` service. Setting the `skip_service_instances` to `true` will prevent the module from trying to redploy any non `user defined` service. + +```terraform +module "secrets" { + source = "./modules/service" + + cloudfoundry = local.cloudfoundry + env = local.env + + secrets = local.secrets + skip_service_instances = true +} +``` + \ No newline at end of file diff --git a/terraform/modules/service/main.tf b/terraform/modules/service/main.tf new file mode 100755 index 00000000..ea18b7c8 --- /dev/null +++ b/terraform/modules/service/main.tf @@ -0,0 +1,61 @@ +locals { + credentials = merge( + flatten([ + for key, value in try(var.env.services,{}) : { + "${key}" = { + applications = value.applications + service_type = value.service_type + tags = value.tags + credentials = merge( + [ + for name in try(value.credentials, {}) : { + name = try(var.secrets[name], null) + } + ] + ...) + } + } if !var.skip_user_provided_services && + value.service_type == "user-provided" + ]) + ...) +} + + +resource "cloudfoundry_service_key" "this" { + for_each = { + for key, value in try(var.env.services, {}) : key => value + if !var.skip_service_instances && + value.service_type != "user-provided" && + try(value.service_key, false) + } + + name = format("%s-%s-%s", format(var.env.name_pattern, each.key), each.key, "svckey") + service_instance = cloudfoundry_service_instance.this[each.key].id +} + +resource "cloudfoundry_service_instance" "this" { + for_each = { + for key, value in try(var.env.services, {}) : key => value + if !var.skip_service_instances && + value.service_type != "user-provided" + } + + name = format(var.env.name_pattern, each.key) + json_params = try(each.value.json_params, null) + replace_on_params_change = try(each.value.replace_on_service_plan_change, false) + replace_on_service_plan_change = try(each.value.replace_on_service_plan_change, false) + space = var.cloudfoundry.space.id + service_plan = var.cloudfoundry.services[each.key].service_plans[each.value.service_plan] + tags = try(each.value.tags, []) +} + +resource "cloudfoundry_user_provided_service" "this" { + for_each = { + for key, value in local.credentials : key => value + } + + name = format(var.env.name_pattern, each.key) + space = var.cloudfoundry.space.id + credentials_json = jsonencode(try(each.value.credentials, {})) + tags = try(each.value.tags, []) +} diff --git a/terraform/modules/service/output.tf b/terraform/modules/service/output.tf new file mode 100755 index 00000000..8bb00463 --- /dev/null +++ b/terraform/modules/service/output.tf @@ -0,0 +1,7 @@ +output "results" { + value = { + instance = try(cloudfoundry_service_instance.this, null) + user_provided = try(cloudfoundry_user_provided_service.this, null) + service_key = try(cloudfoundry_service_key.this, {}) + } +} diff --git a/terraform/modules/service/providers.tf b/terraform/modules/service/providers.tf new file mode 100644 index 00000000..d106004d --- /dev/null +++ b/terraform/modules/service/providers.tf @@ -0,0 +1,9 @@ +terraform { + required_providers { + cloudfoundry = { + source = "cloudfoundry-community/cloudfoundry" + version = "~> 0.5" + } + } + required_version = "> 1.7" +} diff --git a/terraform/modules/service/variables.tf b/terraform/modules/service/variables.tf new file mode 100755 index 00000000..e4e64d73 --- /dev/null +++ b/terraform/modules/service/variables.tf @@ -0,0 +1,183 @@ +variable "cloudfoundry" { + description = "Cloudfoundry settings." + type = object( + { + domain_external = object( + { + domain = string + id = string + internal = bool + name = string + org = string + sub_domain = string + } + ) + domain_internal = object( + { + domain = string + id = string + internal = bool + name = string + org = string + sub_domain = string + } + ) + external_applications = map(string) + organization = object( + { + annotations = map(string) + id = string + labels = map(string) + name = string + } + ) + services = map( + object( + { + id = string + name = string + service_broker_guid = string + service_broker_name = string + service_plans = map(string) + space = string + } + ) + ) + space = object( + { + annotations = map(string) + id = string + labels = map(string) + name = string + org = string + org_name = string + quota = string + } + ) + } + ) +} + +variable "env" { + description = "The settings object for this environment." + type = object({ + api_url = optional(string, "https://api.fr.cloud.gov") + apps = optional( + map( + object({ + allow_egress = optional(bool, true) + buildpacks = list(string) + command = optional(string, "entrypoint.sh") + disk_quota = optional(number, 1024) + enable_ssh = optional(bool, false) + environment = optional(map(string), {}) + health_check_timeout = optional(number, 180) + health_check_type = optional(string, "port") + instances = optional(number, 1) + labels = optional(map(string), {}) + memory = optional(number, 96) + network_policies = optional(map(number),{}) + port = optional(number, 80) + public_route = optional(bool, false) + source = optional(string, null) + stopped = optional(bool, false) + templates = optional(list(map(string)), []) + }) + ), {} + ) + bootstrap_workspace = optional(string, "bootstrap") + defaults = object( + { + disk_quota = optional(number, 2048) + enable_ssh = optional(bool, true) + health_check_timeout = optional(number, 60) + health_check_type = optional(string, "port") + instances = optional(number, 1) + memory = optional(number, 64) + port = optional(number, 8080) + stack = optional(string, "cflinuxfs4") + stopped = optional(bool, false) + strategy = optional(string, "none") + timeout = optional(number, 300) + } + ) + external_applications = optional( + map( + object( + { + environement = string + port = optional(number, 61443) + } + ) + ),{} + ) + external_domain = optional(string, "app.cloud.gov") + internal_domain = optional(string, "apps.internal") + name_pattern = string + organization = optional(string, "gsa-tts-usagov") + passwords = optional( + map( + object( + { + experation_days = optional(number, 0) + length = number + lower = optional(bool, false) + min_lower = optional(number, 0) + min_numeric = optional(number, 0) + min_special = optional(number, 0) + min_upper = optional(number, 0) + numeric = optional(bool, true) + override_special = optional(string, "!@#$%&*()-_=+[]{}<>:?") + special = optional(bool, true) + upper = optional(bool, true) + } + ) + ), {} + ) + project = string + secrets = optional( + map( + object( + { + encrypted = bool + key = string + } + ) + ), {} + ) + services = optional( + map( + object( + { + applications = optional(list(string), []) + environement = optional(string, "dev") + service_key = optional(bool, true) + service_plan = optional(string, "basic") + service_type = optional(string, "s3") + tags = optional(list(string), []) + } + ) + ), {} + ) + space = string + }) +} + +variable "skip_service_instances" { + description = "Allows the skipping of service instances. Useful to inject service secrets into a user provided secret." + type = bool + default = false +} + +variable "skip_user_provided_services" { + description = "Allows the skipping of user provided services. Useful to inject service secrets into a user provided secret." + type = bool + default = false +} + +variable "secrets" { + default = {} + description = "Sensitive strings to be added to the apps environmental variables." + type = map + sensitive = true +} \ No newline at end of file