diff --git a/.default.docker.env b/.default.docker.env new file mode 100644 index 0000000..8f5d0ac --- /dev/null +++ b/.default.docker.env @@ -0,0 +1,6 @@ +MAVEN_BUILDER_VERSION=3-openjdk-17-slim +SONARQUBE_VERSION=10.6.0-community +POSTGRES_VERSION=12 +SONAR_LOG_LEVEL_WEB=INFO +DOCKER_BUILDKIT=1 +COMPOSE_DOCKER_CLI_BUILD=1 diff --git a/.gitattributes b/.gitattributes index 12b960c..1a621d8 100644 --- a/.gitattributes +++ b/.gitattributes @@ -4,4 +4,4 @@ # Ensure BAT files will always be checked out with CRLFs (regardless of the # OS they were checked out on). -*.bat text eol=crlf \ No newline at end of file +*.bat text eol=crlf diff --git a/.github/workflows/bash_tests.yml b/.github/workflows/bash_tests.yml new file mode 100644 index 0000000..9610e11 --- /dev/null +++ b/.github/workflows/bash_tests.yml @@ -0,0 +1,49 @@ +name: Bash Tests + +on: + pull_request: + push: + branches: + - main + tags: + - '*' + +jobs: + test: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + + # Setup Python + - uses: actions/setup-python@v2 + with: + python-version: '3.12' + + # Cache the installation of Poetry itself + - name: cache poetry install + uses: actions/cache@v2 + with: + path: ~/.local + key: poetry-1.7.1-0 + + # Install Poetry + - uses: snok/install-poetry@v1 + with: + version: 1.8.3 + virtualenvs-create: true + virtualenvs-in-project: true + + # Cache dependencies + - name: cache deps + id: cache-deps + uses: actions/cache@v2 + with: + path: .venv + key: pydeps-${{ hashFiles('**/poetry.lock') }} + + # Install dependencies + - run: poetry install --no-interaction --no-root + if: steps.cache-deps.outputs.cache-hit != 'true' + + # Run tests + - run: poetry run pytest tests/test_*.py diff --git a/.github/workflows/build_container.yml b/.github/workflows/build_container.yml index eda94a3..235ffa4 100644 --- a/.github/workflows/build_container.yml +++ b/.github/workflows/build_container.yml @@ -71,6 +71,7 @@ jobs: - name: Publish image id: push uses: docker/build-push-action@v4 + if: github.event_name == 'pull_request' && github.event.action == 'closed' with: push: true tags: ${{ steps.meta.outputs.tags }} diff --git a/.gitignore b/.gitignore index 67cc592..7fcea49 100644 --- a/.gitignore +++ b/.gitignore @@ -1,13 +1,18 @@ +# Ignore IDE files +*.iml +.idea + # Ignore all files and folders starting with ".", except a few exceptions .* !.gitignore !.gitattributes !.github/ +!.default.docker.env # Ignore generated files target bin dependency-reduced-pom.xml - -# Ignore IDE files -*.iml +release.properties +pom.xml.* +tests/__pycache__ diff --git a/Dockerfile b/Dockerfile index 421eacd..579dcf7 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,16 +1,21 @@ -ARG MAVEN_BUILDER=3-openjdk-17-slim -ARG SONARQUBE_VERSION=10.6.0-community +# syntax=docker/dockerfile:1 -FROM maven:${MAVEN_BUILDER} AS builder +ARG MAVEN_BUILDER_VERSION +ARG SONARQUBE_VERSION -COPY . /usr/src/ecocode +FROM maven:${MAVEN_BUILDER_VERSION} AS builder -WORKDIR /usr/src/ecocode -COPY src src/ -COPY pom.xml tool_build.sh ./ +ARG ECOCODE_SRC_PATH=/usr/src/ecocode -RUN ./tool_build.sh +COPY . ${ECOCODE_SRC_PATH} + +WORKDIR ${ECOCODE_SRC_PATH} +RUN ${ECOCODE_SRC_PATH}/toolbox.sh build FROM sonarqube:${SONARQUBE_VERSION} -COPY --from=builder /usr/src/ecocode/target/ecocode-*.jar /opt/sonarqube/extensions/plugins/ + +ARG ECOCODE_SRC_PATH=/usr/src/ecocode +ARG SONARQUBE_PLUGINS_PATH=/opt/sonarqube/extensions/plugins/ + +COPY --from=builder ${ECOCODE_SRC_PATH}/target/ecocode-*.jar ${SONARQUBE_PLUGINS_PATH} USER sonarqube diff --git a/docker-compose.yml b/docker-compose.yml index e84e389..2ff0805 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,8 +1,13 @@ +--- name: sonarqube_ecocode_java - services: sonar: - build: . + build: + context: . + dockerfile: Dockerfile + args: + - MAVEN_BUILDER_VERSION=${MAVEN_BUILDER_VERSION} + - SONARQUBE_VERSION=${SONARQUBE_VERSION} container_name: sonar_ecocode_java ports: - ":9000" @@ -16,18 +21,13 @@ services: SONAR_JDBC_PASSWORD: sonar SONAR_JDBC_URL: jdbc:postgresql://db:5432/sonarqube SONAR_ES_BOOTSTRAP_CHECKS_DISABLE: 'true' - env_file: - - path: ./.default.docker.env - required: true - - path: ./.override.docker.env - required: false volumes: - "extensions:/opt/sonarqube/extensions" - "logs:/opt/sonarqube/logs" - "data:/opt/sonarqube/data" db: - image: postgres:12 + image: postgres:${POSTGRES_VERSION} container_name: postgresql_ecocode_java networks: - sonarnet diff --git a/docs/index.md b/docs/index.md new file mode 100644 index 0000000..974d065 --- /dev/null +++ b/docs/index.md @@ -0,0 +1,137 @@ +# toolbox.sh + +## Overview + +This toolbox enables you to install the SonarQube dev environment. + +## Index + +* [build](#build) +* [compile](#compile) +* [docker_env_source](#dockerenvsource) +* [docker_build](#dockerbuild) +* [init](#init) +* [start](#start) +* [stop](#stop) +* [clean](#clean) +* [display_logs](#displaylogs) +* [release](#release) +* [release_push](#releasepush) +* [display_help](#displayhelp) + +### build + +Compile and package source code with maven. + +#### Exit codes + +* **0**: If successful. +* **1**: If an error was encountered when building source code. +* **2**: If the ecoCode plugin in target directory cannot be found. + +### compile + +Compile source code with maven. + +#### Exit codes + +* **0**: If successful. +* **1**: If an error was encountered when compiling the source code. + +### docker_env_source + +Export environment variables from .default.docker.env file. + +#### Exit codes + +* **0**: If successful. +* **1**: If the environment file cannot be found. + +### docker_build + +Build Docker services. + +#### Exit codes + +* **0**: If successful. +* **1**: If an error was encountered retrieving environment variables. +* **2**: If an error has been encountered when building services. + +### init + +Building the ecoCode plugin and creating containers. + +#### Exit codes + +* **0**: If successful. +* **1**: If an error was encountered when building project code in the target folder. +* **2**: If an error was encountered retrieving environment variables. +* **3**: If an error was encountered during container creating. + +### start + +Starting Docker containers. + +#### Exit codes + +* **0**: If successful. +* **1**: If an error was encountered retrieving environment variables. +* **2**: If the ecoCode plugin is not present in the target folder. +* **3**: If an error was encountered during container startup. + +### stop + +Stopping Docker containers. + +#### Exit codes + +* **0**: If successful. +* **1**: If an error was encountered retrieving environment variables. +* **2**: If an error was encountered during container shutdown. + +### clean + +Stop and remove containers, networks and volumes. + +#### Exit codes + +* **0**: If successful. +* **1**: If an error was encountered retrieving environment variables. +* **2**: If an error was encountered during deletion. + +### display_logs + +Display Docker container logs. + +#### Exit codes + +* **0**: If successful. +* **1**: If an error was encountered retrieving environment variables. + +### release + +Use maven plugin release to prepare locally next release and next SNAPSHOT. + +#### Exit codes + +* **0**: If successful. +* **1**: If an error is encountered when prepare the release. +* **2**: If an error is encountered when cleaning files. + +### release_push + +Create a push and a new branch with commits previously prepared. + +#### Exit codes + +* **0**: If successful. +* **1**: If the last commit tag does not match the last git tag. + +### display_help + +Display help. + +#### Exit codes + +* **0**: If successful. + diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 0000000..ce44941 --- /dev/null +++ b/poetry.lock @@ -0,0 +1,224 @@ +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. + +[[package]] +name = "attrs" +version = "23.2.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "distro" +version = "1.9.0" +description = "Distro - an OS platform information API" +optional = false +python-versions = ">=3.6" +files = [ + {file = "distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2"}, + {file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"}, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "packaging" +version = "24.1" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, + {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, +] + +[[package]] +name = "pluggy" +version = "1.5.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "psutil" +version = "6.0.0" +description = "Cross-platform lib for process and system monitoring in Python." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +files = [ + {file = "psutil-6.0.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a021da3e881cd935e64a3d0a20983bda0bb4cf80e4f74fa9bfcb1bc5785360c6"}, + {file = "psutil-6.0.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:1287c2b95f1c0a364d23bc6f2ea2365a8d4d9b726a3be7294296ff7ba97c17f0"}, + {file = "psutil-6.0.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:a9a3dbfb4de4f18174528d87cc352d1f788b7496991cca33c6996f40c9e3c92c"}, + {file = "psutil-6.0.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:6ec7588fb3ddaec7344a825afe298db83fe01bfaaab39155fa84cf1c0d6b13c3"}, + {file = "psutil-6.0.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:1e7c870afcb7d91fdea2b37c24aeb08f98b6d67257a5cb0a8bc3ac68d0f1a68c"}, + {file = "psutil-6.0.0-cp27-none-win32.whl", hash = "sha256:02b69001f44cc73c1c5279d02b30a817e339ceb258ad75997325e0e6169d8b35"}, + {file = "psutil-6.0.0-cp27-none-win_amd64.whl", hash = "sha256:21f1fb635deccd510f69f485b87433460a603919b45e2a324ad65b0cc74f8fb1"}, + {file = "psutil-6.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:c588a7e9b1173b6e866756dde596fd4cad94f9399daf99ad8c3258b3cb2b47a0"}, + {file = "psutil-6.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ed2440ada7ef7d0d608f20ad89a04ec47d2d3ab7190896cd62ca5fc4fe08bf0"}, + {file = "psutil-6.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fd9a97c8e94059b0ef54a7d4baf13b405011176c3b6ff257c247cae0d560ecd"}, + {file = "psutil-6.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2e8d0054fc88153ca0544f5c4d554d42e33df2e009c4ff42284ac9ebdef4132"}, + {file = "psutil-6.0.0-cp36-cp36m-win32.whl", hash = "sha256:fc8c9510cde0146432bbdb433322861ee8c3efbf8589865c8bf8d21cb30c4d14"}, + {file = "psutil-6.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:34859b8d8f423b86e4385ff3665d3f4d94be3cdf48221fbe476e883514fdb71c"}, + {file = "psutil-6.0.0-cp37-abi3-win32.whl", hash = "sha256:a495580d6bae27291324fe60cea0b5a7c23fa36a7cd35035a16d93bdcf076b9d"}, + {file = "psutil-6.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:33ea5e1c975250a720b3a6609c490db40dae5d83a4eb315170c4fe0d8b1f34b3"}, + {file = "psutil-6.0.0-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:ffe7fc9b6b36beadc8c322f84e1caff51e8703b88eee1da46d1e3a6ae11b4fd0"}, + {file = "psutil-6.0.0.tar.gz", hash = "sha256:8faae4f310b6d969fa26ca0545338b21f73c6b15db7c4a8d934a5482faa818f2"}, +] + +[package.extras] +test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] + +[[package]] +name = "pytest" +version = "8.3.1" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-8.3.1-py3-none-any.whl", hash = "sha256:e9600ccf4f563976e2c99fa02c7624ab938296551f280835ee6516df8bc4ae8c"}, + {file = "pytest-8.3.1.tar.gz", hash = "sha256:7e8e5c5abd6e93cb1cc151f23e57adc31fcf8cfd2a3ff2da63e23f732de35db6"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=1.5,<2" + +[package.extras] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-helpers-namespace" +version = "2021.12.29" +description = "Pytest Helpers Namespace Plugin" +optional = false +python-versions = ">=3.5.6" +files = [ + {file = "pytest-helpers-namespace-2021.12.29.tar.gz", hash = "sha256:792038247e0021beb966a7ea6e3a70ff5fcfba77eb72c6ec8fd6287af871c35b"}, + {file = "pytest_helpers_namespace-2021.12.29-py3-none-any.whl", hash = "sha256:d5c0262642998437a73d85cb6ae0db57d574facc551c4a4695e92ec50469eb98"}, +] + +[package.dependencies] +pytest = ">=6.0.0" + +[package.extras] +changelog = ["towncrier (==21.9.0rc1)"] +docs = ["furo", "sphinx", "sphinx-copybutton", "sphinx-prompt", "sphinxcontrib-spelling", "sphinxcontrib-towncrier (>=0.2.0a0)", "towncrier (==21.3.0)"] +lint = ["black", "pyenchant", "pylint (==2.12.2)", "reorder-python-imports"] + +[[package]] +name = "pytest-shell-utilities" +version = "1.9.0" +description = "Pytest plugin to simplify running shell commands against the system" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-shell-utilities-1.9.0.tar.gz", hash = "sha256:e9ae85f8c64dd2b95e35ed1f110a90e25d498cb1af946be7cf1b7f069c089499"}, + {file = "pytest_shell_utilities-1.9.0-py3-none-any.whl", hash = "sha256:6c05c90cc8bc04fe4e3cbe342acfff5559f88816d1d20c80e7713895cc79687c"}, +] + +[package.dependencies] +attrs = ">=22.1.0" +psutil = ">=5.0.0" +pytest = ">=7.4.0" +pytest-helpers-namespace = "*" +pytest-skip-markers = "*" + +[package.extras] +changelog = ["towncrier (==21.9.0rc1)"] +docs = ["furo", "sphinx", "sphinx-copybutton", "sphinx-prompt", "sphinxcontrib-spelling", "sphinxcontrib-towncrier (>=0.2.1a0)"] +lint = ["black", "flake8 (>=4.0.1)", "flake8-docstrings", "flake8-mypy-fork", "flake8-typing-imports", "pyenchant", "pylint (==2.12.2)", "reorder-python-imports"] +tests = ["pytest-skip-markers", "pytest-subtests"] + +[[package]] +name = "pytest-skip-markers" +version = "1.5.1" +description = "Pytest Salt Plugin" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-skip-markers-1.5.1.tar.gz", hash = "sha256:b208b6b804a320691f9e3a0d0721345a8710b9791c4324499294b6a8e4b0e82b"}, + {file = "pytest_skip_markers-1.5.1-py3-none-any.whl", hash = "sha256:9e3f5ede6e1e6d6dc65e83136e9335abe7d5c4f2f55b94b4fb83d3da5b8d8f0c"}, +] + +[package.dependencies] +attrs = ">=19.2.0" +distro = "*" +pytest = ">=7.1.0" +pywin32 = {version = "*", markers = "sys_platform == \"win32\""} + +[package.extras] +changelog = ["towncrier (==21.9.0rc1)"] +docker = ["docker"] +docs = ["furo", "sphinx", "sphinx-copybutton", "sphinx-prompt", "sphinxcontrib-spelling", "sphinxcontrib-towncrier (>=0.2.1a0)"] +lint = ["black", "flake8 (>=4.0.1)", "flake8-docstrings", "flake8-mypy-fork", "flake8-typing-imports", "pyenchant", "pylint (==2.12.2)", "reorder-python-imports"] +salt = ["salt (>=3001)"] +tests = ["cryptography", "docker", "pyfakefs", "pyfakefs (==4.4.0)", "pytest-subtests"] + +[[package]] +name = "pywin32" +version = "306" +description = "Python for Window Extensions" +optional = false +python-versions = "*" +files = [ + {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"}, + {file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"}, + {file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"}, + {file = "pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e"}, + {file = "pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a"}, + {file = "pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b"}, + {file = "pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e"}, + {file = "pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040"}, + {file = "pywin32-306-cp37-cp37m-win32.whl", hash = "sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65"}, + {file = "pywin32-306-cp37-cp37m-win_amd64.whl", hash = "sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36"}, + {file = "pywin32-306-cp38-cp38-win32.whl", hash = "sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a"}, + {file = "pywin32-306-cp38-cp38-win_amd64.whl", hash = "sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0"}, + {file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"}, + {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, +] + +[metadata] +lock-version = "2.0" +python-versions = "^3.12" +content-hash = "52d1ac66e8fdd8d71e8a7eeda6e466f1dc9fa619c49b1d03fed13a074de09714" diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..83a9c5b --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,21 @@ +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" + +[tool.poetry] +name = "ecoCode-java" +version = "1.0.0" +description = "" +authors = ["ecoCode"] +license = "GPL-3.0" +readme = "README.md" +repository = "https://github.com/green-code-initiative/ecoCode-java" +packages = [{include = "tests/*.py"}] + +[tool.poetry.dependencies] +python = "^3.12" + +[tool.poetry.group.test] +[tool.poetry.group.test.dependencies] +pytest = "^8.2.2" +pytest-shell-utilities = "^1.9.0" diff --git a/pytest.ini b/pytest.ini new file mode 100644 index 0000000..41ed379 --- /dev/null +++ b/pytest.ini @@ -0,0 +1,3 @@ +[pytest] +filterwarnings = + ignore:'pipes' is deprecated:DeprecationWarning diff --git a/tests/test_toolbox.py b/tests/test_toolbox.py new file mode 100644 index 0000000..ca16183 --- /dev/null +++ b/tests/test_toolbox.py @@ -0,0 +1,215 @@ +import inspect +import os +import pytest + + +current_dir: str = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) +if os.environ['HOME'] == "/app": + current_dir = "/app/tests" + +project_path: str = os.path.abspath(f"{current_dir}/..") +script: str = os.path.abspath(f"{current_dir}/../toolbox.sh") + + +def test_function_empty(shell): + ret = shell.run(script, "--test") + assert ret.stderr.rstrip() == "No function to execute" + assert ret.returncode == 1 + + +def test_function_not_exist(shell): + ret = shell.run(script, "test_function", "--test") + assert ret.stderr.rstrip() == "Function with name test_function does not exist" + assert ret.returncode == 2 + + +@pytest.mark.parametrize("color_key, color_value", [ + ("R","\x1b[0;31m"), + ("G", "\x1b[0;32m"), + ("B", "\x1b[0;34m"), + ("Y", "\x1b[0;33m"), + ("W", "\x1b[0;37m"), + ("N", "\x1b[0;0m") +]) +def test_colors(shell, color_key, color_value): + ret = shell.run(script, "colors", color_key, "--test") + assert ret.stdout.rstrip() == color_value + assert ret.returncode == 0 + + +def test_info(shell): + ret = shell.run(script, "info", "msg_info", "--test") + assert ret.stdout.rstrip() == "msg_info" + assert ret.returncode == 0 + + +def test_debug(shell): + ret = shell.run(script, "debug", "msg_debug", "--test") + assert ret.stdout.rstrip() == "msg_debug" + assert ret.returncode == 0 + + +def test_error(shell): + ret = shell.run(script, "error", "msg_error", "--test") + assert ret.stderr.rstrip() == "msg_error" + assert ret.returncode == 0 + + +def test_build(shell): + ret = shell.run(script, "build", "--test") + assert len(ret.stdout.splitlines()) == 2 + assert ret.stdout.splitlines()[0] == "Building source code in the target folder" + assert ret.stdout.splitlines()[1] == "mvn clean package -Dmaven.clean.failOnError=false -DskipTests" + assert ret.returncode == 0 + + +def test_compile(shell): + ret = shell.run(script, "compile", "--test") + assert len(ret.stdout.splitlines()) == 2 + assert ret.stdout.splitlines()[0] == "Compile source code" + assert ret.stdout.splitlines()[1] == "mvn clean compile" + assert ret.returncode == 0 + + +def test_docker_env_source_not_exist(shell): + ret = shell.run(script, "docker_env_source", "--test", "--fixture=1") + assert len(ret.stdout.splitlines()) == 1 + assert ret.stdout.splitlines()[0] == "source test_docker_env" + assert ret.stderr.rstrip() == "Cannot find test_docker_env" + assert ret.returncode == 1 + + +def test_docker_env_source(shell): + ret = shell.run(script, "docker_env_source", "--test") + assert len(ret.stdout.splitlines()) == 1 + assert ret.stdout.splitlines()[0] == f"source {project_path}/.default.docker.env" + assert ret.returncode == 0 + + +def test_docker_build_env_source_not_exist(shell): + ret = shell.run(script, "docker_build", "--test", "--fixture=1") + assert len(ret.stdout.splitlines()) == 1 + assert ret.stdout.splitlines()[0] == "source test_docker_env" + assert ret.stderr.rstrip() == "Cannot find test_docker_env" + assert ret.returncode == 1 + + +def test_docker_build(shell): + ret = shell.run(script, "docker_build", "--test") + assert len(ret.stdout.splitlines()) == 3 + assert ret.stdout.splitlines()[0] == f"source {project_path}/.default.docker.env" + assert ret.stdout.splitlines()[1] == "Build Docker services" + assert ret.stdout.splitlines()[2] == f"docker compose -f {project_path}/docker-compose.yml build" + assert ret.returncode == 0 + + +def test_init_env_source_not_exist(shell): + ret = shell.run(script, "init", "--test", "--fixture=1") + assert len(ret.stdout.splitlines()) == 3 + assert ret.stdout.splitlines()[0] == "Building source code in the target folder" + assert ret.stdout.splitlines()[1] == "mvn clean package -Dmaven.clean.failOnError=false -DskipTests" + assert ret.stdout.splitlines()[2] == "source test_docker_env" + assert ret.stderr.rstrip() == "Cannot find test_docker_env" + assert ret.returncode == 2 + + +def test_init(shell): + ret = shell.run(script, "init", "--test") + assert len(ret.stdout.splitlines()) == 5 + assert ret.stdout.splitlines()[0] == "Building source code in the target folder" + assert ret.stdout.splitlines()[1] == "mvn clean package -Dmaven.clean.failOnError=false -DskipTests" + assert ret.stdout.splitlines()[2] == f"source {project_path}/.default.docker.env" + assert ret.stdout.splitlines()[3] == "Creating and starting Docker containers" + assert ret.stdout.splitlines()[4] == f"docker compose -f {project_path}/docker-compose.yml up --build -d" + assert ret.returncode == 0 + + +def test_start_env_source_not_exist(shell): + ret = shell.run(script, "start", "--test", "--fixture=1") + assert len(ret.stdout.splitlines()) == 1 + assert ret.stdout.splitlines()[0] == "source test_docker_env" + assert ret.stderr.rstrip() == "Cannot find test_docker_env" + assert ret.returncode == 1 + + +def test_start(shell): + ret = shell.run(script, "start", "--test") + assert len(ret.stdout.splitlines()) == 3 + assert ret.stdout.splitlines()[0] == f"source {project_path}/.default.docker.env" + assert ret.stdout.splitlines()[1] == "Starting Docker containers" + assert ret.stdout.splitlines()[2] == f"docker compose -f {project_path}/docker-compose.yml start" + assert ret.returncode == 0 + + +def test_stop_env_source_not_exist(shell): + ret = shell.run(script, "stop", "--test", "--fixture=1") + assert len(ret.stdout.splitlines()) == 1 + assert ret.stdout.splitlines()[0] == "source test_docker_env" + assert ret.stderr.rstrip() == "Cannot find test_docker_env" + assert ret.returncode == 1 + + +def test_stop(shell): + ret = shell.run(script, "stop", "--test") + assert len(ret.stdout.splitlines()) == 3 + assert ret.stdout.splitlines()[0] == f"source {project_path}/.default.docker.env" + assert ret.stdout.splitlines()[1] == "Stopping Docker containers" + assert ret.stdout.splitlines()[2] == f"docker compose -f {project_path}/docker-compose.yml stop" + assert ret.returncode == 0 + + +def test_clean_env_source_not_exist(shell): + ret = shell.run(script, "clean", "--test", "--fixture=1") + assert len(ret.stdout.splitlines()) == 1 + assert ret.stdout.splitlines()[0] == "source test_docker_env" + assert ret.stderr.rstrip() == "Cannot find test_docker_env" + assert ret.returncode == 1 + + +def test_clean(shell): + ret = shell.run(script, "clean", "--test") + assert len(ret.stdout.splitlines()) == 3 + assert ret.stdout.splitlines()[0] == f"source {project_path}/.default.docker.env" + assert ret.stdout.splitlines()[1] == "Remove Docker containers, networks and volumes" + assert ret.stdout.splitlines()[2] == f"docker compose -f {project_path}/docker-compose.yml down --volumes" + assert ret.returncode == 0 + + +def test_display_logs_env_source_not_exist(shell): + ret = shell.run(script, "display_logs", "--test", "--fixture=1") + assert len(ret.stdout.splitlines()) == 1 + assert ret.stdout.splitlines()[0] == "source test_docker_env" + assert ret.stderr.rstrip() == "Cannot find test_docker_env" + assert ret.returncode == 1 + + +def test_display_logs(shell): + ret = shell.run(script, "display_logs", "--test") + assert len(ret.stdout.splitlines()) == 3 + assert ret.stdout.splitlines()[0] == f"source {project_path}/.default.docker.env" + assert ret.stdout.splitlines()[1] == "Display Docker container logs" + assert ret.stdout.splitlines()[2] == f"docker compose -f {project_path}/docker-compose.yml logs -f" + assert ret.returncode == 0 + + +def test_release(shell): + ret = shell.run(script, "release", "--test") + assert len(ret.stdout.splitlines()) == 4 + assert ret.stdout.splitlines()[0] == "Creation of 2 commits with release and next SNAPSHOT" + assert ret.stdout.splitlines()[1] == "mvn release:prepare -B -ff -DpushChanges=false -DtagNameFormat=@{project.version}" + assert ret.stdout.splitlines()[2] == "Clean temporary files" + assert ret.stdout.splitlines()[3] == "mvn release:clean" + assert ret.returncode == 0 + + +def test_release_push(shell): + ret = shell.run(script, "release_push", "--test") + assert len(ret.stdout.splitlines()) == 1 + assert ret.stdout.splitlines()[0] == "Create a push and a new branch with commits previously prepared" + assert ret.returncode == 0 + + +def test_display_help(shell): + ret = shell.run(script, "display_help", "--test") + assert len(ret.stdout.splitlines()) == 18 + assert ret.returncode == 0 diff --git a/toolbox.Dockerfile b/toolbox.Dockerfile new file mode 100644 index 0000000..ef18a3a --- /dev/null +++ b/toolbox.Dockerfile @@ -0,0 +1,55 @@ +FROM maven:3.9.6-eclipse-temurin-21-alpine AS maven + +FROM python:3.12-alpine3.20 AS builder + +ENV POETRY_NO_INTERACTION=1 \ + POETRY_VIRTUALENVS_IN_PROJECT=1 \ + POETRY_VIRTUALENVS_CREATE=1 \ + POETRY_CACHE_DIR=/tmp/poetry_cache \ + PYTHONFAULTHANDLER=1 \ + PYTHONUNBUFFERED=1 \ + PYTHONHASHSEED=random \ + PIP_DISABLE_PIP_VERSION_CHECK=on \ + PIP_DEFAULT_TIMEOUT=100 \ + POETRY_VERSION=1.8.3 + +RUN apk add --update --no-cache gcc libc-dev musl-dev linux-headers python3-dev +RUN pip install "poetry==$POETRY_VERSION" +WORKDIR /app +COPY pyproject.toml poetry.lock ./ +RUN --mount=type=cache,target=$POETRY_CACHE_DIR poetry install --no-root --no-ansi + +FROM python:3.12-alpine3.20 AS runtime + +ENV VIRTUAL_ENV=/app/.venv \ + PATH="/app/.venv/bin:$PATH" + +# Installing prerequisites +RUN apk add --update --no-cache bash curl shellcheck gawk git make docker docker-cli-compose openrc \ + && rm -rf /var/cache/apk/* \ +RUN rc-update add docker boot + +# Install shdoc +RUN git clone --recursive https://github.com/reconquest/shdoc /tmp/shdoc +RUN make install -C /tmp/shdoc + +# Install java and maven +COPY --from=maven /opt/java/openjdk /opt/java/openjdk +COPY --from=maven /usr/share/maven /usr/share/maven +RUN ln -s /usr/share/maven/bin/mvn /usr/bin/mvn + +COPY --from=maven /usr/bin/mvn /usr/bin/mvn +ENV PATH="/opt/java/openjdk/bin:$PATH" +ENV MAVEN_HOME="/usr/share/maven" +ENV JAVA_HOME="/opt/java/openjdk" + +# Create user +RUN addgroup -g 1000 app \ + && adduser --home /app -G app -u 1000 app -D +USER app +WORKDIR /app + +# Copy the Python virtual environment +COPY --chown=app:app --from=builder ${VIRTUAL_ENV} ${VIRTUAL_ENV} + +CMD ["/bin/bash"] diff --git a/toolbox.sh b/toolbox.sh new file mode 100755 index 0000000..855f58e --- /dev/null +++ b/toolbox.sh @@ -0,0 +1,365 @@ +#!/usr/bin/env bash +# @name toolbox.sh +# @description +# This toolbox enables you to install the SonarQube dev environment. + +# Global variables +CURRENT_PATH="$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" &>/dev/null && pwd)" +ECOCODE_DC_FILE="$CURRENT_PATH/docker-compose.yml" +ECOCODE_DOCKER_ENV="$CURRENT_PATH/.default.docker.env" +ECOCODE_JAVA_PLUGIN_VERSION=$(< "$CURRENT_PATH/pom.xml" grep ""|head -n1|sed 's/<\(\/\)*version>//g'|xargs) +ECOCODE_JAVA_PLUGIN_JAR="$CURRENT_PATH/target/ecocode-java-plugin-$ECOCODE_JAVA_PLUGIN_VERSION.jar" + +# Shell coloring +function colors() { + case $1 in + 'R') echo -e '\033[0;31m' ;; # RED + 'G') echo -e '\033[0;32m' ;; # GREEN + 'B') echo -e '\033[0;34m' ;; # BLUE + 'Y') echo -e '\033[0;33m' ;; # YELLOW + 'W') echo -e '\033[0;37m' ;; # WHITE + 'N') echo -e '\033[0;0m' ;; # NOCOLOR + esac +} + +function info() { + if [[ $TEST -gt 0 ]]; then + echo "$*" + else + echo -e "$(colors 'W')$*$(colors 'N')" + fi + return 0 +} + +function debug() { + if [[ $((VERBOSE+TEST)) -gt 0 ]]; then + if [[ $TEST -gt 0 ]]; then + echo "$*" + else + echo -e "$(colors 'B')$*$(colors 'N')" + fi + fi + return 0 +} + +function error() { + if [[ $TEST -gt 0 ]]; then + >&2 echo -e "$*" + else + >&2 echo -e "$(colors 'R')$*$(colors 'N')" + fi + return 0 +} + +# @description Compile and package source code with maven. +# @exitcode 0 If successful. +# @exitcode 1 If an error was encountered when building source code. +# @exitcode 2 If the ecoCode plugin in target directory cannot be found. +function build() { + info "Building source code in the target folder" + if ! [[ -f $ECOCODE_JAVA_PLUGIN_JAR ]] || [[ $FORCE -gt 0 ]] || [[ $TEST -gt 0 ]]; then + debug "mvn clean package -Dmaven.clean.failOnError=false -DskipTests" ; [[ $TEST -gt 0 ]] && return 0 + if ! mvn clean package -Dmaven.clean.failOnError=false -DskipTests; then + return 1 + fi + fi + # Check that the plugin is present in the target folder + if ! [[ -f $ECOCODE_JAVA_PLUGIN_JAR ]]; then + error "Cannot find ecoCode plugin in target directory" && return 2 + fi + return 0 +} + +# @description Compile source code with maven. +# @exitcode 0 If successful. +# @exitcode 1 If an error was encountered when compiling the source code. +function compile() { + info "Compile source code" + debug "mvn clean compile" ; [[ $TEST -gt 0 ]] && return 0 + if ! mvn clean compile; then + return 1 + fi + return 0 +} + +# @description Export environment variables from .default.docker.env file. +# @exitcode 0 If successful. +# @exitcode 1 If the environment file cannot be found. +function docker_env_source() { + debug "source $ECOCODE_DOCKER_ENV" + # To export variables + set -a + # shellcheck source=.default.docker.env + ! source "$ECOCODE_DOCKER_ENV" 2&>/dev/null && error "Cannot find $ECOCODE_DOCKER_ENV" && return 1 + set +a + return 0 +} + +# @description Build Docker services. +# @exitcode 0 If successful. +# @exitcode 1 If an error was encountered retrieving environment variables. +# @exitcode 2 If an error has been encountered when building services. +function docker_build() { + ! docker_env_source && return 1 + [[ $FORCE -gt 0 ]] && rm -rf "$CURRENT_PATH/target/*" + info "Build Docker services" + debug "docker compose -f $ECOCODE_DC_FILE build" ; [[ $TEST -gt 0 ]] && return 0 + ! docker compose -f "$ECOCODE_DC_FILE" build && return 2 + return 0 +} + +# @description Building the ecoCode plugin and creating containers. +# @exitcode 0 If successful. +# @exitcode 1 If an error was encountered when building project code in the target folder. +# @exitcode 2 If an error was encountered retrieving environment variables. +# @exitcode 3 If an error was encountered during container creating. +function init() { + ! build && return 1 + ! docker_env_source && return 2 + info "Creating and starting Docker containers" + debug "docker compose -f $ECOCODE_DC_FILE up --build -d" ; [[ $TEST -gt 0 ]] && return 0 + ! docker compose -f "$ECOCODE_DC_FILE" up --build -d && return 3 + return 0 +} + +# @description Starting Docker containers. +# @exitcode 0 If successful. +# @exitcode 1 If an error was encountered retrieving environment variables. +# @exitcode 2 If the ecoCode plugin is not present in the target folder. +# @exitcode 3 If an error was encountered during container startup. +function start() { + ! docker_env_source && return 1 + # Check that the plugin is present in the target folder + if [[ $TEST -eq 0 ]] && ! [[ -f $ECOCODE_JAVA_PLUGIN_JAR ]]; then + error "Cannot find ecoCode plugin in target directory" && return 2 + fi + info "Starting Docker containers" + debug "docker compose -f $ECOCODE_DC_FILE start" ; [[ $TEST -gt 0 ]] && return 0 + ! TOKEN=$ECOCODE_TOKEN docker compose -f "$ECOCODE_DC_FILE" start && return 3 + return 0 +} + +# @description Stopping Docker containers. +# @exitcode 0 If successful. +# @exitcode 1 If an error was encountered retrieving environment variables. +# @exitcode 2 If an error was encountered during container shutdown. +function stop() { + ! docker_env_source && return 1 + info "Stopping Docker containers" + debug "docker compose -f $ECOCODE_DC_FILE stop" ; [[ $TEST -gt 0 ]] && return 0 + ! docker compose -f "$ECOCODE_DC_FILE" stop && return 2 + return 0 +} + +# @description Stop and remove containers, networks and volumes. +# @exitcode 0 If successful. +# @exitcode 1 If an error was encountered retrieving environment variables. +# @exitcode 2 If an error was encountered during deletion. +function clean() { + ! docker_env_source && return 1 + info "Remove Docker containers, networks and volumes" + debug "docker compose -f $ECOCODE_DC_FILE down --volumes" ; [[ $TEST -gt 0 ]] && return 0 + ! docker compose -f "$ECOCODE_DC_FILE" down --volumes && return 2 + [[ $FORCE -gt 0 ]] && rm -rf "$CURRENT_PATH/target" + return 0 +} + +# @description Display Docker container logs. +# @exitcode 0 If successful. +# @exitcode 1 If an error was encountered retrieving environment variables. +function display_logs() { + ! docker_env_source && return 1 + info "Display Docker container logs" + debug "docker compose -f $ECOCODE_DC_FILE logs -f" ; [[ $TEST -gt 0 ]] && return 0 + docker compose -f "$ECOCODE_DC_FILE" logs -f + return 0 +} + +# @description Use maven plugin release to prepare locally next release and next SNAPSHOT. +# @exitcode 0 If successful. +# @exitcode 1 If an error is encountered when prepare the release. +# @exitcode 2 If an error is encountered when cleaning files. +function release() { + info "Creation of 2 commits with release and next SNAPSHOT" + debug "mvn release:prepare -B -ff -DpushChanges=false -DtagNameFormat=@{project.version}" + if [[ $TEST -eq 0 ]]; then + if ! mvn release:prepare -B -ff -DpushChanges=false -DtagNameFormat=@{project.version}; then + return 1 + fi + fi + info "Clean temporary files" + debug "mvn release:clean" ; [[ $TEST -gt 0 ]] && return 0 + sleep 2 + if ! mvn release:clean; then + return 2 + fi + return 0 +} + +# @description Create a push and a new branch with commits previously prepared. +# @exitcode 0 If successful. +# @exitcode 1 If the last commit tag does not match the last git tag. +function release_push() { + info "Create a push and a new branch with commits previously prepared" + [[ $TEST -gt 0 ]] && return 0 + local last_tag_prepare="" last_tag="" branch_name="" + # Check that the release has been properly prepared + last_tag_prepare=$(git log -2 --pretty=%B|grep "prepare release"|awk '{print $NF}') + # Retrieve last tag + last_tag=$(git tag --sort=-version:refname | head -n 1) + # Check that the tag is correct + if ! [[ "$last_tag_prepare" = "$last_tag" ]]; then + error "The last commit tag does not match the last git tag" + return 1 + fi + # Checkout released tag and creation of branch to push (because of main protection) + branch_name="release_${last_tag}" + git checkout -b "${branch_name}" + # push branch associated to new tag release + git push --set-upstream origin "${branch_name}" + return 0 +} + +# @description Display help. +# @exitcode 0 If successful. +function display_help() { + local output="" + output=" +$(colors 'Y')Usage$(colors 'W') $(basename "$0") [OPTIONS] COMMAND +$(colors 'Y')Commands:$(colors 'N') +$(colors 'G')init$(colors 'W') Initialize and creating containers +$(colors 'G')start$(colors 'W') Starting Docker containers +$(colors 'G')stop$(colors 'W') Stopping Docker containers +$(colors 'G')clean$(colors 'W') Stop and remove containers, networks and volumes +$(colors 'G')uild$(colors 'W') Build the ecoCode plugin +$(colors 'G')compile$(colors 'W') Compile the ecoCode plugin +$(colors 'G')build-docker$(colors 'W') Build Docker services +$(colors 'G')release$(colors 'W') Create a new release +$(colors 'G')release-push$(colors 'W') Push the new release +$(colors 'Y')Options:$(colors 'N') +$(colors 'G')--token=$(colors 'W') Creating containers with previously created token +$(colors 'G')-v, --verbose$(colors 'W') Make the command more talkative +$(colors 'G')-l, --logs$(colors 'W') Display Docker container logs +$(colors 'G')-f, --force$(colors 'W') To delete the target folder or recompile the source code +$(colors 'G')-h, --help$(colors 'W') Display help + " + echo -e "$output\n"|sed '1d; $d' + return 0 +} + +# Check options passed as script parameters. +function check_opts() { + read -ra opts <<< "$@" + for opt in "${opts[@]}"; do + case "$opt" in + init) INIT=1 ; ARGS+=("$opt") ;; + start) START=1 ; ARGS+=("$opt") ;; + stop) STOP=1 ; ARGS+=("$opt") ;; + clean) CLEAN=1 ; ARGS+=("$opt") ;; + release) RELEASE=1 ; ARGS+=("$opt") ;; + release-push) RELEASE_PUSH=1 ; ARGS+=("$opt") ;; + build) BUILD=1 ; ARGS+=("$opt") ;; + compile) COMPILE=1 ; ARGS+=("$opt") ;; + build-docker) BUILD_DOCKER=1 ; ARGS+=("$opt") ;; + --token=*) ECOCODE_TOKEN=$(echo "$opt"|awk -F= '{print $2}') ;; + --verbose|-v) VERBOSE=1 ;; + --logs|-l) DISPLAY_LOGS=1 ;; + --force|-f) FORCE=1 ;; + --test) TEST=1 ;; + --fixture=*) FIXTURE=$(echo "$opt"|awk -F= '{print $2}') ;; + --help) HELP=1 ;; + *) ARGS+=("$opt") ;; + esac + done + # Help is displayed if no option is passed as script parameter + if [[ $((HELP+INIT+START+STOP+CLEAN+RELEASE+RELEASE_PUSH+BUILD+COMPILE+BUILD_DOCKER+DISPLAY_LOGS)) -eq 0 ]]; then + HELP=1 + fi + return 0 +} + +# Used by unit tests to execute a function. +function execute_unit_test() { + if [[ -z "${ARGS[0]}" ]]; then + error "No function to execute" && return 1 + fi + # If a function is passed as the first argument, we check that it exists + if ! [[ $(type -t "${ARGS[0]}") == function ]]; then + error "Function with name ${ARGS[0]} does not exist" && return 2 + fi + # Initialize fixtures + [[ $FIXTURE -eq 1 ]] && ECOCODE_DOCKER_ENV="test_docker_env" + # execute function + "${ARGS[@]}" + return $? +} + +# Execute tasks based on script parameters or user actions. +function execute_tasks() { + # Display help + if [[ $HELP -gt 0 ]]; then + ! display_help && return 1 + return 0 + fi + # Building the ecoCode plugin and creating Docker containers + if [[ $INIT -gt 0 ]]; then + ! init && return 2 + fi + # Starting Docker containers + if [[ $START -gt 0 ]]; then + ! start && return 3 + fi + # Stopping Docker containers + if [[ $STOP -gt 0 ]]; then + ! stop && return 4 + fi + # Stop and remove containers, networks and volumes + if [[ $CLEAN -gt 0 ]]; then + ! clean && return 5 + fi + # Build the ecoCode plugin + if [[ $BUILD -gt 0 ]]; then + ! build && return 6 + fi + # Compile the ecoCode plugin + if [[ $COMPILE -gt 0 ]]; then + ! compile && return 7 + fi + # Build Docker services + if [[ $BUILD_DOCKER -gt 0 ]]; then + ! docker_build && return 8 + fi + # Use maven plugin to create a new release + if [[ $RELEASE -gt 0 ]]; then + ! release && return 9 + fi + # create an push an new branch with commits previously prepared + if [[ $RELEASE_PUSH -gt 0 ]]; then + ! release_push && return 10 + fi + # Display Docker container logs + if [[ $DISPLAY_LOGS -gt 0 ]]; then + ! display_logs && return 11 + fi + return 0 +} + +# Main function. +function main() { + ARGS=() + HELP=0 VERBOSE=0 FORCE=0 TEST=0 FIXTURE=0 + INIT=0 START=0 STOP=0 CLEAN=0 + RELEASE=0 BUILD=0 COMPILE=0 BUILD_DOCKER=0 DISPLAY_LOGS=0 + # Check options passed as script parameters and execute tasks + ! check_opts "$@" && return 1 + # Used by unit tests to execute a function + if [[ $TEST -gt 0 ]]; then + execute_unit_test + return $? + fi + # Execute one or more tasks according to script parameters + ! execute_tasks && return $? + return 0 +} + +main "$@" diff --git a/tool_build.sh b/tools_backup/tool_build.sh similarity index 100% rename from tool_build.sh rename to tools_backup/tool_build.sh diff --git a/tool_compile.sh b/tools_backup/tool_compile.sh similarity index 100% rename from tool_compile.sh rename to tools_backup/tool_compile.sh diff --git a/tool_docker-clean.sh b/tools_backup/tool_docker-clean.sh similarity index 100% rename from tool_docker-clean.sh rename to tools_backup/tool_docker-clean.sh diff --git a/tool_docker-init.sh b/tools_backup/tool_docker-init.sh similarity index 100% rename from tool_docker-init.sh rename to tools_backup/tool_docker-init.sh diff --git a/tool_docker-logs.sh b/tools_backup/tool_docker-logs.sh similarity index 100% rename from tool_docker-logs.sh rename to tools_backup/tool_docker-logs.sh diff --git a/tool_release_1_prepare.sh b/tools_backup/tool_release_1_prepare.sh similarity index 100% rename from tool_release_1_prepare.sh rename to tools_backup/tool_release_1_prepare.sh diff --git a/tool_release_2_branch.sh b/tools_backup/tool_release_2_branch.sh similarity index 100% rename from tool_release_2_branch.sh rename to tools_backup/tool_release_2_branch.sh diff --git a/tool_start.sh b/tools_backup/tool_start.sh similarity index 100% rename from tool_start.sh rename to tools_backup/tool_start.sh diff --git a/tool_start_withtoken.sh b/tools_backup/tool_start_withtoken.sh similarity index 100% rename from tool_start_withtoken.sh rename to tools_backup/tool_start_withtoken.sh diff --git a/tool_stop.sh b/tools_backup/tool_stop.sh similarity index 100% rename from tool_stop.sh rename to tools_backup/tool_stop.sh diff --git a/utils.sh b/utils.sh new file mode 100755 index 0000000..bdd3c13 --- /dev/null +++ b/utils.sh @@ -0,0 +1,55 @@ +#!/usr/bin/env bash + +DOCKER_IMAGE="toolbox:1.0.0" + +function check_env_docker() { + ! [[ -x "$(command -v docker)" ]] && echo "Please install docker" && return 1 + return 0 +} + +function docker_image_remove() { + docker image rm "$DOCKER_IMAGE" 2> /dev/null + return 0 +} + +function docker_build() { + DOCKER_BUILDKIT=1 docker build -f toolbox.Dockerfile --target=runtime -t="$DOCKER_IMAGE" . + return 0 +} + +function docker_run() { + docker run --rm -it \ + --name ecocode_java_utils \ + -u $(id -u):$(getent group docker | cut -d: -f3) \ + -p :8000 \ + -v "//var/run/docker.sock:/var/run/docker.sock" \ + -v "$HOME/.m2:/app/.m2" \ + -v "$(pwd)/toolbox.sh:/app/toolbox.sh" \ + -v "$(pwd)/utils_bash.sh:/app/utils_bash.sh" \ + -v "$(pwd)/.default.docker.env:/app/.default.docker.env" \ + -v "$(pwd)/docker-compose.yml:/app/docker-compose.yml" \ + -v "$(pwd)/Dockerfile:/app/Dockerfile" \ + -v "$(pwd)/pom.xml:/app/pom.xml" \ + -v "$(pwd)/src:/app/src" \ + -v "$(pwd)/target:/app/target:rw" \ + -v "$(pwd)/pytest.ini:/app/pytest.ini" \ + -v "$(pwd)/mkdocs.yml:/app/mkdocs.yml" \ + -v "$(pwd)/docs:/app/docs" \ + -v "$(pwd)/tests:/app/tests" \ + $DOCKER_IMAGE /bin/bash + return 0 +} + +function main() { + FORCE=0 + [[ "$1" = "--force" ]] && FORCE=1 + ! check_env_docker && return 1 + if [[ $FORCE -gt 0 ]]; then + ! docker_image_remove && return 2 + fi + ! docker_build && return 3 + ! docker_run && return 4 + return 0 +} + +main "$@" diff --git a/utils_bash.sh b/utils_bash.sh new file mode 100755 index 0000000..ab9dc15 --- /dev/null +++ b/utils_bash.sh @@ -0,0 +1,139 @@ +#!/usr/bin/env bash +# @name utils_bash.sh +# @description +# This utility script enables you to perform the following actions: +# +# * Run unit tests with **pytest** +# * Linter the code with the **shellcheck** utility +# * Generating the API documentation with the **shdoc** utility +# * Generating a site from markdown files with **mkdocs** + +# Global variables +CURRENT_PATH="$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" &>/dev/null && pwd)" +DOC_PATH="$CURRENT_PATH/docs" + +# Shell coloring +declare -A COLORS=( + [RED]='\033[0;31m' + [GREEN]='\033[0;32m' + [YELLOW]='\033[0;33m' + [BLUE]='\033[0;34m' + [WHITE]='\033[0;37m' + [NOCOLOR]='\033[0;0m' +) + +# Display an information message. +function info() { + echo -e "${COLORS[WHITE]}$*${COLORS[NOCOLOR]}" + return 0 +} + +# Display an debug message. +function debug() { + [[ $VERBOSE -gt 0 ]] && echo -e "${COLORS[BLUE]}$*${COLORS[NOCOLOR]}" + return 0 +} + +# Display an error message. +function error() { + >&2 echo -e "${COLORS[RED]}$*${COLORS[NOCOLOR]}" + return 0 +} + +# @description Run unit tests. +# @exitcode 0 If successful. +function unit_tests() { + info "Run unit tests" + pytest tests/test_*.py + return 0 +} + +# @description Linter the application's bash code. +# @exitcode 0 If successful. +function lint() { + info "Linting bash code" + shellcheck -e SC1083 -x toolbox.sh + return 0 +} + +# @description Generate API documentation in markdown format. +# @exitcode 0 If successful. +function generate_doc() { + info "Generating the toolbox API documentation" + shdoc < "$CURRENT_PATH/toolbox.sh" > "$DOC_PATH/index.md" + return 0 +} + +# @description Display help. +# @exitcode 0 If successful. +function display_help() { + local output="" + output=" +${COLORS[YELLOW]}Usage${COLORS[WHITE]} $(basename "$0") [OPTIONS] COMMAND +${COLORS[YELLOW]}Commands:${COLORS[NOCOLOR]} +${COLORS[GREEN]}test${COLORS[WHITE]} Run unit tests +${COLORS[GREEN]}lint${COLORS[WHITE]} Linter the application's bash code +${COLORS[GREEN]}doc${COLORS[WHITE]} Generate API documentation +${COLORS[YELLOW]}Options:${COLORS[NOCOLOR]} +${COLORS[GREEN]}-h, --help${COLORS[WHITE]} Display help +${COLORS[GREEN]}-v, --verbose${COLORS[WHITE]} Make the command more talkative + " + echo -e "$output\n"|sed '1d; $d' + return 0 +} + +# Check options passed as script parameters. +function check_opts() { + read -ra opts <<< "$@" + for opt in "${opts[@]}"; do + case "$opt" in + test) UNIT_TEST=1 ;; + lint) LINT=1 ;; + doc) GENERATE_DOC=1 ;; + --verbose) VERBOSE=1 ;; + --help) HELP=1 ;; + *) ARGS+=("$opt") ;; + esac + done + # Help is displayed if no option is passed as script parameter + if [[ $((HELP+UNIT_TEST+LINT+GENERATE_DOC)) -eq 0 ]]; then + HELP=1 + fi + return 0 +} + +# Execute tasks based on script parameters or user actions. +function execute_tasks() { + # Display help + if [[ $HELP -gt 0 ]]; then + ! display_help && return 1 + return 0 + fi + # Run unit tests + if [[ $UNIT_TEST -gt 0 ]]; then + ! unit_tests && return 2 + fi + # Linter the application's bash code + if [[ $LINT -gt 0 ]]; then + ! lint && return 3 + fi + # Generate API documentation in markdown format + if [[ $GENERATE_DOC -gt 0 ]]; then + ! generate_doc && return 4 + fi + return 0 +} + +# Main function. +function main() { + ARGS=() + HELP=0 VERBOSE=0 + UNIT_TEST=0 LINT=0 GENERATE_DOC=0 + # Check options passed as script parameters and execute tasks + ! check_opts "$@" && return 1 + # Execute one or more tasks according to script parameters + ! execute_tasks && return 2 + return 0 +} + +main "$@"