From 0fbbf82980ac23a93a4d2b91cb891f6be1cb6fc5 Mon Sep 17 00:00:00 2001 From: SylivanKenobi Date: Fri, 13 Oct 2023 14:38:13 +0200 Subject: [PATCH 1/9] remove all methods who are deprecated or use bad plugins --- vars/addDeployLinks.groovy | 7 -- vars/addDeployLinks.txt | 21 ----- vars/check.groovy | 7 -- vars/check.txt | 17 ---- vars/cleanBuilds.groovy | 37 --------- vars/cleanBuilds.txt | 22 ----- vars/deployChangedComponents.groovy | 119 ---------------------------- vars/deployChangedComponents.txt | 36 --------- vars/openshiftUtils.groovy | 75 ------------------ vars/openshiftUtils.txt | 87 -------------------- vars/replaceFromVault.groovy | 10 --- vars/replaceFromVault.txt | 32 -------- vars/trackComponentVersions.groovy | 91 --------------------- vars/trackComponentVersions.txt | 47 ----------- vars/withGitCrypt.groovy | 9 --- vars/withGitCrypt.txt | 38 --------- 16 files changed, 655 deletions(-) delete mode 100644 vars/addDeployLinks.groovy delete mode 100644 vars/addDeployLinks.txt delete mode 100644 vars/check.groovy delete mode 100644 vars/check.txt delete mode 100644 vars/cleanBuilds.groovy delete mode 100644 vars/cleanBuilds.txt delete mode 100644 vars/deployChangedComponents.groovy delete mode 100644 vars/deployChangedComponents.txt delete mode 100644 vars/openshiftUtils.groovy delete mode 100644 vars/openshiftUtils.txt delete mode 100644 vars/replaceFromVault.groovy delete mode 100644 vars/replaceFromVault.txt delete mode 100644 vars/trackComponentVersions.groovy delete mode 100644 vars/trackComponentVersions.txt delete mode 100644 vars/withGitCrypt.groovy delete mode 100644 vars/withGitCrypt.txt diff --git a/vars/addDeployLinks.groovy b/vars/addDeployLinks.groovy deleted file mode 100644 index 3220181..0000000 --- a/vars/addDeployLinks.groovy +++ /dev/null @@ -1,7 +0,0 @@ -import com.puzzleitc.jenkins.command.AddDeployLinksCommand -import com.puzzleitc.jenkins.command.context.JenkinsPipelineContext - -def call(Map params = [:]) { - AddDeployLinksCommand command = new AddDeployLinksCommand(new JenkinsPipelineContext(this, params)) - command.execute() -} diff --git a/vars/addDeployLinks.txt b/vars/addDeployLinks.txt deleted file mode 100644 index 24bd124..0000000 --- a/vars/addDeployLinks.txt +++ /dev/null @@ -1,21 +0,0 @@ - -

- Adds a deployment link to a specific build.
- This can be useful if a deploy job should be triggered when a build was successfull. -

-

- parameters: -

-

-

- examples: -

- addDeployLinks deployJob: 'integration-pipeline-deploy' -

-

- - \ No newline at end of file diff --git a/vars/check.groovy b/vars/check.groovy deleted file mode 100644 index bcaaf14..0000000 --- a/vars/check.groovy +++ /dev/null @@ -1,7 +0,0 @@ -// Deprecated: Will be implemented as a step. -def mandatoryParameter(parameterName) { - if (!params.containsKey(parameterName)) { - currentBuild.result = 'ABORTED' - error('missing parameter: ' + parameterName) - } -} diff --git a/vars/check.txt b/vars/check.txt deleted file mode 100644 index 3badd5c..0000000 --- a/vars/check.txt +++ /dev/null @@ -1,17 +0,0 @@ - -

- Part of @Library('jenkins-pipeline-shared-libraries') -

-
-
check.mandatoryParameter()
-
-

- Deprecated: Will be implemented as a step. -

-

- Checks if a parameter with the given name is available. If not, the build will be aborted with an error message. -

-
-
- - diff --git a/vars/cleanBuilds.groovy b/vars/cleanBuilds.groovy deleted file mode 100644 index e91e4a4..0000000 --- a/vars/cleanBuilds.groovy +++ /dev/null @@ -1,37 +0,0 @@ -#!/usr/bin/env groovy - -import com.jenkinsci.plugins.badge.action.BadgeAction - -// the function implements how many build per environment should be kept -// parameter is maxKeepBuilds -def call(Map args) { - def maxNumberToKeepBuilds = args?.maxKeepBuilds ?: 10; - - def environmentBuildCount = [:] - Jenkins.instance.getItemByFullName(args.job) - .getBuilds() - .findAll { it.isKeepLog() } - .each { build -> - deployedEnvironment = [] - build.getActions(BadgeAction.class).each { - deployedEnvironment << it.id - environmentBuildCount[it.id] = environmentBuildCount.get(it.id, 0) + 1 - } - - // each Build that should be kept will be stored in keepBuild map - def keepBuild = [] - deployedEnvironment.each { - if (environmentBuildCount[it] <= maxNumberToKeepBuilds) { - keepBuild << it - } - } - - // print out reason of/not keeping the build - if (keepBuild) { - echo "Keeping build ${build} because of the following promotions: ${keepBuild.join(' ')}" - } else { - echo "Deleting build ${build}" - build.delete() - } - } -} diff --git a/vars/cleanBuilds.txt b/vars/cleanBuilds.txt deleted file mode 100644 index 0a6cf1b..0000000 --- a/vars/cleanBuilds.txt +++ /dev/null @@ -1,22 +0,0 @@ - -

- The cleanBuilds function offers a housekeeeping for the integration pipeline.
- Every build of the integration pipeline which was deployed on an environment is automatically kept. - To prevent a numerous amount of kept builds this variable is cleaning up these jobs. It distinguishes between deployment for every environment. -

-

- parameters: -

-

-

- examples: -

- cleanBuilds job: 'project-a/integration-pipeline'
- cleanBuilds job: 'project-a/integration-pipeline', maxKeepBuilds: 10 -

-

- - \ No newline at end of file diff --git a/vars/deployChangedComponents.groovy b/vars/deployChangedComponents.groovy deleted file mode 100644 index c4e8e60..0000000 --- a/vars/deployChangedComponents.groovy +++ /dev/null @@ -1,119 +0,0 @@ -#!/usr/bin/env groovy - -import hudson.model.Run -import com.jenkinsci.plugins.badge.action.AbstractAction -import com.jenkinsci.plugins.badge.action.BadgeAction -import com.jenkinsci.plugins.badge.action.BadgeSummaryAction - -Map readComponentVersionsFromArtifact(String job, Object selector, String file) { - try { - copyArtifacts filter: file, projectName: job, selector: selector - def versions = readYaml(file: file) - if (versions instanceof Map) { - return versions - } else { - return [:] - } - // when no artifact available (e.g. its the first run) - // return an empty map instead of throwing an error - } catch (Exception e) { - return [:] - } -} - -void writeComponentVersionsToArtifact(String dataFile, Map componentVersions) { - // always remove the old file first - sh "rm -f ${dataFile}" - writeYaml file: dataFile, data: componentVersions - archiveArtifacts dataFile -} - -// always remove the old badge action before appling anything new -void removeBadgeAction(Run currentBuild, Class actionClass, String id) { - def badgeAction = currentBuild.getActions(actionClass).find { it.id == id } - if (badgeAction != null) { - currentBuild.removeAction(badgeAction) - } -} - -void setBadgeInBuildHistory(Run currentBuild, String icon, String altText, String id, String link) { - removeBadgeAction(currentBuild, BadgeAction.class, id) - - // as the badge-plugin does not support the adding of badges - // to other builds, we have to use the class directly - def badgeAction = BadgeAction.createBadge(icon, altText, link) - badgeAction.setId(id) - currentBuild.addAction(badgeAction) -} - -void setBadgeAndLinkInSummary(Run currentBuild, String icon, String id, String altText) { - removeBadgeAction(currentBuild, BadgeSummaryAction.class, id) - - // as the badge-plugin does not support the adding of badges - // to other builds, we have to use the class directly - def badgeSummaryAction = new BadgeSummaryAction(icon) - badgeSummaryAction.setId(id) - badgeSummaryAction.appendText(altText) - currentBuild.addAction(badgeSummaryAction) -} - -// Depending on the Badgelocation the Icon is either "large" or "small" -String getDeployIcon(String targetEnv, boolean isLarge = false) { - size = "16x16" - if (isLarge) { - size = "32x32" - } - if (targetEnv == "test") { - return '/plugin/promoted-builds/icons/' + size + '/star-silver.png' - } else if (targetEnv == "prod") { - return '/plugin/promoted-builds/icons/' + size + '/star-gold.png' - } else if (targetEnv == "int") { - return '/plugin/promoted-builds/icons/' + size + '/star-purple.png' - } - return '/plugin/promoted-builds/icons/' + size + '/star-orange.png' -} - -void addDeployedBadges() { - def built = Jenkins.instance.getItemByFullName(built_name).getBuild(built_number) - def deploy = currentBuild.rawBuild - - setBadgeInBuildHistory(deploy, getDeployIcon(target_env), "Deployed ${built_name} #${built_number} to ${target_env}", target_env, "/${built.getUrl()}") - setBadgeAndLinkInSummary(deploy, getDeployIcon(target_env, true), target_env, "Deployed ${built_name} #${built_number} to ${target_env}") - - setBadgeInBuildHistory(built, getDeployIcon(target_env), "Deployed to ${target_env} by ${env.JOB_NAME} #${env.BUILD_NUMBER}", target_env, "/${currentBuild.rawBuild.getUrl()}") - setBadgeAndLinkInSummary(built, getDeployIcon(target_env, true), target_env, "Deployed to ${target_env} by ${env.JOB_NAME} #${env.BUILD_NUMBER}") - // save is required to persist badges on other builds than the current - built.save() - - built.keepLog(true) -} - -def call() { - final DEPLOYED_VERSIONS_FILE = 'deployed-versions.yaml' - final COMPONENT_VERSIONS_FILE = 'component-versions.yaml' - final EXECUTION_JOB_FILE = 'rundeck-jobs.yaml' - - def newComponentVersions = readComponentVersionsFromArtifact(built_name, specific(built_number), COMPONENT_VERSIONS_FILE) - def deployedVersions = readComponentVersionsFromArtifact(env.JOB_NAME, lastSuccessful(), DEPLOYED_VERSIONS_FILE) - - def currentComponentVersions = deployedVersions[target_env] - deployedVersions[target_env] = newComponentVersions - - executionJobs = readYaml(file: EXECUTION_JOB_FILE) - - newComponentVersions.each { k, v -> - - if (!currentComponentVersions) { - echo "Installing component ${k} on environment ${target_env} with version ${v.version}." - echo "Executing rundeck job ${executionJobs[k].jobId} with env ${target_env} and version ${v.version}." // TODO: Replace with rundeck call - } else if (v.version == currentComponentVersions[k].version) { - echo "Component ${k} version ${v.version} already deployed, skipping." - } else { - echo "Updating component ${k} on environment ${target_env} from version ${currentComponentVersions[k].version} to ${v.version}." - echo "Executing rundeck job ${executionJobs[k].jobId} with env ${target_env} and version ${v.version}." // TODO: Replace with rundeck call - } - } - - writeComponentVersionsToArtifact DEPLOYED_VERSIONS_FILE, deployedVersions - addDeployedBadges() -} diff --git a/vars/deployChangedComponents.txt b/vars/deployChangedComponents.txt deleted file mode 100644 index 54c517a..0000000 --- a/vars/deployChangedComponents.txt +++ /dev/null @@ -1,36 +0,0 @@ - -

- This function can be used to deploy a component (or a set of components) from a specific integration pipeline to a distinct environment.
- It provides multiple features: -

- The integration pipeline has to provide a valid component-versions.yaml file.
- deployChangedComponents() has to be called in a parameterized job as it depends on parameters to be set. -

-

- parameters for the containing job: -

-

-

-

- Attention:
- There is no implementation of the actual deployment.
- The function only prints out the functionality that can be used for the deployment process. -
-

- - \ No newline at end of file diff --git a/vars/openshiftUtils.groovy b/vars/openshiftUtils.groovy deleted file mode 100644 index fae1d81..0000000 --- a/vars/openshiftUtils.groovy +++ /dev/null @@ -1,75 +0,0 @@ -// Deprecated: Use the openshiftApply() step. - -// -// OpenShift resource update by applying a template. -// -// method parameters: -// ocpUrl -> url of the OpenShift server -// ocpProject -> project-name/namespace of the OpenShift project -// templateFile -> OpenShift template -// credentialsId -> credentials for the OpenShift login -// namespace -> true adds NAMESPACE_NAME param -// -def void applyTemplate(String ocpUrl, String ocpProject, String templateFile, String credentialsId, boolean namespace) { - - echo "-- start resource update by template --" - echo "OpenShift server URL: $ocpUrl" - echo "OpenShift project: $ocpProject" - echo "resource file: $templateFile" - - withCredentials([[$class : 'StringBinding', - credentialsId : "${credentialsId}", - variable : 'openshift_token']]) { - withEnv(["KUBECONFIG=${pwd()}/.kube", "PATH+OC_HOME=${tool 'oc'}/bin", "ocpUrl=${ocpUrl}"]) { - sh "oc login $ocpUrl --insecure-skip-tls-verify=true --token=$openshift_token" - sh "oc project $ocpProject" - sh "oc project" - sh "oc whoami" - - // apply template - if (namespace) { - sh "oc process -f $templateFile -p NAMESPACE_NAME=\$(oc project -q) | oc apply -f -" - } else { - sh "oc process -f $templateFile | oc apply -f -" - } - } - } -} - -// -// OpenShift resource update by applying a template with environment file. -// -// method parameters: -// ocpUrl -> url of the OpenShift server -// ocpProject -> project-name/namespace of the OpenShift project -// templateFile -> OpenShift template -// credentialsId -> credentials for the OpenShift login -// envFile -> environment file -// namespace -> true adds NAMESPACE_NAME param -// -def void applyTemplateWithEnvFile(String ocpUrl, String ocpProject, String templateFile, String credentialsId, String envFile, boolean namespace) { - - echo "-- start resource update by template with environment file --" - echo "OpenShift server URL: $ocpUrl" - echo "OpenShift project: $ocpProject" - echo "resource file: $templateFile" - echo "environment file: $envFile" - - withCredentials([[$class : 'StringBinding', - credentialsId : "${credentialsId}", - variable : 'openshift_token']]) { - withEnv(["KUBECONFIG=${pwd()}/.kube", "PATH+OC_HOME=${tool 'oc'}/bin", "ocpUrl=${ocpUrl}"]) { - sh "oc login $ocpUrl --insecure-skip-tls-verify=true --token=$openshift_token" - sh "oc project $ocpProject" - sh "oc project" - sh "oc whoami" - - // apply template - if (namespace) { - sh "oc process -f $templateFile -p NAMESPACE_NAME=\$(oc project -q) --param-file $envFile | oc apply -f -" - } else { - sh "oc process -f $templateFile --param-file $envFile | oc apply -f -" - } - } - } -} diff --git a/vars/openshiftUtils.txt b/vars/openshiftUtils.txt deleted file mode 100644 index bfda8fc..0000000 --- a/vars/openshiftUtils.txt +++ /dev/null @@ -1,87 +0,0 @@ - -

- Part of @Library('jenkins-pipeline-shared-libraries') -

-
-
- openshiftUtils.applyTemplate(ocpUrl:String, ocpProject:String, templateFile:String, credentialsId:String, namespace:Boolean):void -
-
-

- Deprecated: Use the openshiftApply() step. -

-

- OpenShift resource update by applying a template. -
- Example call: -
- openshiftUtils.applyTemplate("https://console.appuio.ch/", "my-appuio-project", "template.yaml", "APPUiO_login_token", false) -

-
    -
  • - ocpUrl - URL of the OpenShift server API endpoint. -
  • -
  • - ocpProject - Project / Namespace name of the OpenShift project to apply the template. -
  • -
  • - templateFile - Path to the OpenShift template file. -
  • -
  • - credentialsId - The credentialsId of an OpenShift Auth token stored in the Jenkins credential - manager with which to authenticate. Type of Credential is Secret text. -
  • -
  • - namespace - Boolean parameter. True adds a parameter with the name NAMESPACE_NAME - and as value the name of the actual OpenShift project. -
  • -
-
-
- openshiftUtils.applyTemplateWithEnvFile(ocpUrl:String, ocpProject:String, templateFile:String, credentialsId:String, envFile:String, namespace:Boolean):void -
-
-

- Deprecated: Use the openshiftApply() step. -

-

- OpenShift resource update by applying a template with parameter from an environment file. -

-

- Environment file contains key and value separated by double point. -
- Example: -
- KEY: 'value' -

-

- Example call: -
- openshiftUtils.applyTemplateWithEnvFile("https://console.appuio.ch/", "my-appuio-project", "template.yaml", "APPUiO_login_token", "test.yaml", false) -

-
    -
  • - ocpUrl - URL of the OpenShift server API endpoint. -
  • -
  • - ocpProject - Project / Namespace name of the OpenShift project to apply the template. -
  • -
  • - templateFile - Path to the OpenShift template file. -
  • -
  • - credentialsId - The credentialsId of an OpenShift Auth token stored in the Jenkins credential - manager with which to authenticate. Type of Credential is Secret text. -
  • -
  • - envFile - Path to the environment file holding all parameter with their values. -
  • -
  • - namespace - Boolean parameter. True adds a parameter with the name NAMESPACE_NAME - and as value the name of the actual OpenShift project. -
  • -
-
-
- - diff --git a/vars/replaceFromVault.groovy b/vars/replaceFromVault.groovy deleted file mode 100644 index f0ab575..0000000 --- a/vars/replaceFromVault.groovy +++ /dev/null @@ -1,10 +0,0 @@ -import com.puzzleitc.jenkins.command.ReplaceFromVaultCommand -import com.puzzleitc.jenkins.command.context.JenkinsPipelineContext - -import static com.puzzleitc.jenkins.util.Args.parseArgs - -def call(Map namedArgs = [:], Object... positionalArgs) { - def args = parseArgs(namedArgs, positionalArgs, ['text']) - ReplaceFromVaultCommand command = new ReplaceFromVaultCommand(new JenkinsPipelineContext(this, args)) - return command.execute() -} \ No newline at end of file diff --git a/vars/replaceFromVault.txt b/vars/replaceFromVault.txt deleted file mode 100644 index 0680a3e..0000000 --- a/vars/replaceFromVault.txt +++ /dev/null @@ -1,32 +0,0 @@ - -

- Part of @Library('jenkins-pipeline-shared-libraries') -

-
-
- replaceFromVault(text:String):String -
-
-

- Substitutes variables the a provided string with the secrets retrieved from HashiCorp Vault. The string - will be scanned for the pattern {{ vault.get("<VAULT_PATH>", "<VAULT_KEY>") }}, - where VAULT_PATH defines the fully qualified path in vault of the secret and - VAULT_KEY the key to lookup of the specified secret. Example pattern: - {{ vault.get("/kv/spaces/outerspace/wekan-secret-test", "mongodb_user") }}. -

-
    -
  • - text (required) - The string which contains a predefined pattern that should be substituted by - secrets from Hashicorp Vault. -
  • -
-

- Example: -

-

-replaceFromVault(text: kustomize(path: 'openshift/postgresql/overlays/dev'))
-        
-
-
- - diff --git a/vars/trackComponentVersions.groovy b/vars/trackComponentVersions.groovy deleted file mode 100644 index 9f5071d..0000000 --- a/vars/trackComponentVersions.groovy +++ /dev/null @@ -1,91 +0,0 @@ -#!/usr/bin/env groovy -import hudson.model.Cause -import org.jenkinsci.plugins.pipeline.maven.publishers.MavenReport - -Map readComponentVersionsFromArtifact(String job, Object selector, String file) { - try { - copyArtifacts filter: file, projectName: job, selector: selector - def versions = readYaml(file: file) - if (versions instanceof Map) { - return versions - } else { - return [:] - } - // when no artifact available (e.g. its the first run) - // return an empty map instead of throwing an error - } catch (Exception e) { - return [:] - } -} - -void writeComponentVersionsToArtifact(String dataFile, Map componentInfos) { - // always remove the old file first - sh "rm -f ${dataFile}" - writeYaml file: dataFile, data: componentInfos - archiveArtifacts dataFile -} - -def getUpstreamCauses() { - def result = [] - - def buildCauses = currentBuild.rawBuild.getCauses() - buildCauses.each { - if (it instanceof Cause.UpstreamCause) { - def upstreamCause = it as Cause.UpstreamCause - result << [job: upstreamCause.upstreamProject, build: upstreamCause.upstreamBuild.toString()] - } - } - - return result -} - -def getComponentInformations(String componentVersion, Boolean externalComponent = false) { - def infos = [version: componentVersion + '-' + env.BUILD_NUMBER, job: env.JOB_NAME, buildNumber: env.BUILD_NUMBER as int, buildUrl: env.BUILD_URL] - if(externalComponent) { - infos = [version: componentVersion] - } - - // does the current build contain a maven build? - def mavenReport = currentBuild.rawBuild.getActions(MavenReport.class) - if (mavenReport) { - def mavenArtifacts = [] - mavenReport[0].getDeployedArtifacts().each { - mavenArtifacts << it.url - } - infos['artifacts'] = mavenArtifacts - } - - return infos -} - -def call(Map args) { - final COMPONENT_VERSIONS_FILE = 'component-versions.yaml' - - lock(resource: 'trackComponentVersions', inversePrecedence: false) { - def componentInfos = readComponentVersionsFromArtifact(env.JOB_NAME, lastSuccessful(), COMPONENT_VERSIONS_FILE) - - // If a build is taking a long time multiple other build requests can - // occour so there may be multiple build causes: http://javadoc.jenkins-ci.org/hudson/model/Run.html#getCauses-- - getUpstreamCauses().each { - def upstreamVersions = readComponentVersionsFromArtifact(it.job, specific(it.build), COMPONENT_VERSIONS_FILE) - componentInfos.putAll(upstreamVersions) - } - // check if pomFile location is passed - if (args?.pomFile) { - def mavenCoordinates = readMavenPom(file: args.pomFile) - componentInfos[mavenCoordinates.artifactId] = getComponentInformations(mavenCoordinates.version) - // is version yaml passed? - } else if (args?.versionFile) { - releaseVersion = readYaml(file: args.versionFile) - if (args?.containsExternalComponents) { - releaseVersion.each { k, v -> componentInfos[k] = getComponentInformations(v, args.containsExternalComponents) } - } else { - releaseVersion.each { k, v -> componentInfos[k] = getComponentInformations(v) } - } - } else { - error(getClass().getName() + ': Either pomFile or versionFile must be set!') - } - - writeComponentVersionsToArtifact(COMPONENT_VERSIONS_FILE, componentInfos) - } -} diff --git a/vars/trackComponentVersions.txt b/vars/trackComponentVersions.txt deleted file mode 100644 index a188178..0000000 --- a/vars/trackComponentVersions.txt +++ /dev/null @@ -1,47 +0,0 @@ - -

- Tracks the version of a component and it upstreams.
- For upstream jobs the information about the component versions is retrieved from a file called component-versions.yaml.
- The structure of this file: -

-

- - [componentname]:
-   version: [componentversion]
-   job: [name of job]
-   buildNumber: [number of last successful build]
-   buildUrl: [URL for last successful build]
-   artifacts: [contains list of artifacts]
-
-

-

- Stores these informations in the file component-versions.yaml.
- If trackComponentVersions is called from multiple upstream projects it will aggregate the information about every component in the component-versions.yaml. -

-

- parameters: -

- If the file cannot be found the function returns an empty array. -

-

- return: -

- The result file must exist in the repository but can be emtpy. -

-

-

- examples: -

- trackComponentVersions versionFile: 'project-a/version.yml'
- trackComponentVersions pomFile: 'project-a/pom.xml' -

-

- - \ No newline at end of file diff --git a/vars/withGitCrypt.groovy b/vars/withGitCrypt.groovy deleted file mode 100644 index d6d17a9..0000000 --- a/vars/withGitCrypt.groovy +++ /dev/null @@ -1,9 +0,0 @@ -import com.puzzleitc.jenkins.command.GitCryptCommand -import com.puzzleitc.jenkins.command.context.JenkinsPipelineContext -import static com.puzzleitc.jenkins.util.Args.parseArgs - -def call(Map namedArgs = [:], Object... positionalArgs) { - def args = parseArgs(namedArgs, positionalArgs, ['credentialsId', 'body']) - GitCryptCommand command = new GitCryptCommand(new JenkinsPipelineContext(this, args)) - command.execute() -} diff --git a/vars/withGitCrypt.txt b/vars/withGitCrypt.txt deleted file mode 100644 index c3c09c7..0000000 --- a/vars/withGitCrypt.txt +++ /dev/null @@ -1,38 +0,0 @@ - -

- Part of @Library('jenkins-pipeline-shared-libraries') -

-
-
- withGitCrypt(credentialisId:String):Object {…} -
-
-

- Unlocks the Git repository in the current directory with git-crypt for the duration of the scope of the step - and locks it again afterwards. The repository is also relocked when an error occurs in the scope of the step. - The return value is the value returned by (or the value of the last statement within) the scope of the step. - If credentialsId is null git-crypt is not called but the passed block/closure is still executed. This can - be used to run git-crypt conditionally. -

-
    -
  • - credentialsId - The credentials id of a git-crypt keyfile stored in the Jenkins credential - manager. -
  • -
-

- Example: -

-

-withGitCrypt(credentialsId: 'my-git-crypt-keyfile') {
-    // work with decrypted files
-}
-// files are encrypted again
-        
-

- For further information see git-crypt documentation. -

-
-
- - From 780eafcb06b58aa27fbd0f0c61bcd1424c5d1bb5 Mon Sep 17 00:00:00 2001 From: SylivanKenobi Date: Tue, 17 Oct 2023 10:32:31 +0200 Subject: [PATCH 2/9] remove unused tests --- README.md | 42 --------------- test/groovy/vars/CheckPipelineTest.groovy | 51 ------------------- test/groovy/vars/CheckTest.groovy | 56 -------------------- test/groovy/vars/OpenshiftUtilsSpec.groovy | 59 ---------------------- 4 files changed, 208 deletions(-) delete mode 100644 test/groovy/vars/CheckPipelineTest.groovy delete mode 100644 test/groovy/vars/CheckTest.groovy delete mode 100644 test/groovy/vars/OpenshiftUtilsSpec.groovy diff --git a/README.md b/README.md index 862162f..f50971b 100644 --- a/README.md +++ b/README.md @@ -8,31 +8,6 @@ Documentation: ## global variables -### addDeployLinks - -Adds a link(to another job) for a specific build from the build history of a jenkins job. -See it's [documentation](vars/addDeployLinks.txt). -This function is a component of the [Integration Pipeline](doc/Integration-Pipeline.md). - -Source: [vars/addDeployLinks.groovy](vars/addDeployLinks.groovy) - -### cleanBuilds - -Cleans up the build history of the jenkins job. It can differentiate deployments to specific environments. -See it's [documentation](vars/cleanBuilds.txt). -This function is a component of the [Integration Pipeline](doc/Integration-Pipeline.md). - -Source: [vars/cleanBuilds.groovy](vars/cleanBuilds.groovy) - -### deployChangedComponents - -This function can be used as a template for deploying a component on a specific environment. -There is no actual implementation of a deployment but a placeholder with an echo output. -See it's [documentation](vars/deployChangedComponents.txt). -This function is a component of the [Integration Pipeline](doc/Integration-Pipeline.md). - -Source: [vars/deployChangedComponents.groovy](vars/deployChangedComponents.groovy) - ### executable Ensures that the given executable is available on the current Jenkins agent, installing it with a Jenkins tool installer @@ -63,23 +38,6 @@ Requests OpenShift to start build from the specified build config and waits for Runs the OWASP dependency-check tool. -### replaceFromVault - -Substitutes variables in a provided String with secrets retrieved from HashiCorp Vault. - -### trackComponentVersions - -This function tracks information about the current deployment state of a component on a specific environment. -See it's [documentation](vars/trackComponentVersions.txt). -This function is a component of the [Integration Pipeline](doc/Integration-Pipeline.md). - -Source: [vars/trackComponentVersions.groovy](vars/trackComponentVersions.groovy) - -### withGitCrypt - -Unlocks the Git repository in the current directory with `git-crypt` for the duration of the scope of the - step and locks it again afterwards. - ## Groovy source files ### DockerHub diff --git a/test/groovy/vars/CheckPipelineTest.groovy b/test/groovy/vars/CheckPipelineTest.groovy deleted file mode 100644 index 5d2a75b..0000000 --- a/test/groovy/vars/CheckPipelineTest.groovy +++ /dev/null @@ -1,51 +0,0 @@ -package groovy.pipeline - -import com.lesfurets.jenkins.unit.declarative.DeclarativePipelineTest -import org.junit.Before -import org.junit.Test - -import static com.lesfurets.jenkins.unit.MethodCall.callArgsToString -import static com.lesfurets.jenkins.unit.global.lib.LibraryConfiguration.library -import static com.lesfurets.jenkins.unit.global.lib.ProjectSource.projectSource -import static org.assertj.core.api.Assertions.assertThat - -class CheckPipelineTest extends DeclarativePipelineTest { - - @Before - @Override - void setUp() throws Exception { - super.setUp() - helper.libLoader.preloadLibraryClasses = false - def library = library().name('jenkins-shared-library') - .defaultVersion('master') - .allowOverride(true) - .implicit(true) - .targetPath('') - .retriever(projectSource()) - .build() - helper.registerSharedLibrary(library) - } - - @Test - void itShouldAbortForMissingMandatoryParameter() { - // when - runScript('test/resources/vars/check/mandatoryParameterMissing.groovy') - - // then - assertThat(helper.callStack.findAll { call -> - call.methodName == "error" - }.any { call -> - callArgsToString(call).contains("missing parameter: hallo") - }).isTrue() - printCallStack() - } - - @Test - void itShouldNotAbortForAvailableMandatoryParameter() { - // when - runScript('test/resources/vars/check/mandatoryParameter.groovy') - - // then - assertJobStatusSuccess() - } -} diff --git a/test/groovy/vars/CheckTest.groovy b/test/groovy/vars/CheckTest.groovy deleted file mode 100644 index 952e52b..0000000 --- a/test/groovy/vars/CheckTest.groovy +++ /dev/null @@ -1,56 +0,0 @@ -package groovy.vars - -import static org.assertj.core.api.Assertions.* -import static com.lesfurets.jenkins.unit.MethodCall.callArgsToString - -import com.lesfurets.jenkins.unit.BasePipelineTest -import org.junit.Before -import org.junit.Test - -/** - * Tests for vars/check.groovy - */ -class CheckTest extends BasePipelineTest { - - private Script check - - @Override - @Before - void setUp() { - super.setUp() - check = loadScript('vars/check.groovy') - } - - @Test - void itShouldAbortForMissingMandatoryParameter() { - // given - // Override error behaviour as it is not correctly implemented by the test framework - helper.registerAllowedMethod('error', [String.class]) - check.binding.setVariable('params', [:]) - - // when - check.mandatoryParameter('hallo') - - // then - assertJobStatusAborted() - assertThat(helper.callStack.findAll { call -> - call.methodName == 'error' - }.any { call -> - callArgsToString(call).contains('missing parameter: hallo') - }).isTrue() - } - - @Test - void itShouldNotAbortForAvailableMandatoryParameter() { - // given - check.binding.setVariable('currentBuild', [result: 'null']) - check.binding.setVariable('params', [hallo: 'test']) - - // when - check.mandatoryParameter('hallo') - - // then - assertThat(check.binding.getVariable('currentBuild').result as String).isEqualTo('null') - assertThat(helper.callStack).filteredOn({ it.methodName == 'error' }).isEmpty() - } -} diff --git a/test/groovy/vars/OpenshiftUtilsSpec.groovy b/test/groovy/vars/OpenshiftUtilsSpec.groovy deleted file mode 100644 index 6dc3032..0000000 --- a/test/groovy/vars/OpenshiftUtilsSpec.groovy +++ /dev/null @@ -1,59 +0,0 @@ -package groovy.vars - -import com.homeaway.devtools.jenkins.testing.JenkinsPipelineSpecification - -class OpenshiftUtilsSpec extends JenkinsPipelineSpecification { - - def openshiftUtils = loadPipelineScriptForTest('vars/openshiftUtils.groovy') - - def 'Applys the template without namespace'() { - setup: - explicitlyMockPipelineVariable('openshift_token') - when: - openshiftUtils.applyTemplate("https://console.appuio.ch/", "my-appuio-project", "template.yaml", "APPUiO_login_token", false) - then: - 1 * getPipelineMock('echo').call('OpenShift server URL: https://console.appuio.ch/') - 1 * getPipelineMock('echo').call('OpenShift project: my-appuio-project') - 1 * getPipelineMock('echo').call('resource file: template.yaml') - 1 * getPipelineMock('withCredentials').call(_) >> { _arguments -> - def credentialsId = 'APPUiO_login_token' - credentialsId == _arguments[0][0].credentialsId[0] - } - 1 * getPipelineMock('tool').call(_) >> '/home/jenkins' - 1 * getPipelineMock('withEnv').call(_) >> { _arguments -> -// TODO: pwd mock - def env = ["KUBECONFIG=null/test/path/.kube", "PATH+OC_HOME=/home/jenkins/bin", "ocpUrl=https://console.appuio.ch/"] - env == _arguments[0][0] - } - 1 * getPipelineMock('sh').call('oc login https://console.appuio.ch/ --insecure-skip-tls-verify=true --token=Mock Generator for [openshift_token]') - 1 * getPipelineMock('sh').call('oc process -f template.yaml | oc apply -f -') - } - - def 'Applys the template with namespace'() { - setup: - explicitlyMockPipelineVariable('openshift_token') - when: - openshiftUtils.applyTemplate("https://console.appuio.ch/", "my-appuio-project", "template.yaml", "APPUiO_login_token", true) - then: - 1 * getPipelineMock('sh').call('oc process -f template.yaml -p NAMESPACE_NAME=$(oc project -q) | oc apply -f -') - } - - def 'Applys the template with envFile and without namespace'() { - setup: - explicitlyMockPipelineVariable('openshift_token') - when: - openshiftUtils.applyTemplateWithEnvFile("https://console.appuio.ch/", "my-appuio-project", "template.yaml", "APPUiO_login_token", 'envFile.env', false) - then: - 1 * getPipelineMock('echo').call('environment file: envFile.env') - 1 * getPipelineMock('sh').call('oc process -f template.yaml --param-file envFile.env | oc apply -f -') - } - - def 'Applys the template with namespace and envFile'() { - setup: - explicitlyMockPipelineVariable('openshift_token') - when: - openshiftUtils.applyTemplateWithEnvFile("https://console.appuio.ch/", "my-appuio-project", "template.yaml", "APPUiO_login_token", 'envFile.env', true) - then: - 1 * getPipelineMock('sh').call('oc process -f template.yaml -p NAMESPACE_NAME=$(oc project -q) --param-file envFile.env | oc apply -f -') - } -} From ab8a3b546e88f1bca425cff3a0ec3351823cdd6a Mon Sep 17 00:00:00 2001 From: Lukas Koller Date: Fri, 27 Oct 2023 10:17:12 +0200 Subject: [PATCH 3/9] add rocketSend function --- README.md | 4 ++++ vars/rocketSend.groovy | 22 ++++++++++++++++++++++ vars/rocketSend.txt | 34 ++++++++++++++++++++++++++++++++++ 3 files changed, 60 insertions(+) create mode 100644 vars/rocketSend.groovy create mode 100644 vars/rocketSend.txt diff --git a/README.md b/README.md index f50971b..8ad75a9 100644 --- a/README.md +++ b/README.md @@ -38,6 +38,10 @@ Requests OpenShift to start build from the specified build config and waits for Runs the OWASP dependency-check tool. +### rocketSend + +Sends message to the specified WebHook. + ## Groovy source files ### DockerHub diff --git a/vars/rocketSend.groovy b/vars/rocketSend.groovy new file mode 100644 index 0000000..ec24ba4 --- /dev/null +++ b/vars/rocketSend.groovy @@ -0,0 +1,22 @@ +def void call(String webHook, String message, String avatar = null, Boolean rawMessage = true) { + Map data = [:] as Map + data['text'] = message + data['rawMessage'] = rawMessage + + if (avatar != null) { + data['avatar'] = avatar + } + + def curlCommand = 'curl ' + + ' -X POST "' + webHook + '" ' + + ' -H "Content-Type: application/json" ' + + ' --data \'' + groovy.json.JsonOutput.toJson(data) + '\' ' + + def returnCode = sh(script: curlCommand, returnStatus: true) + + if (returnCode != 0) { + error("RocketChat notification failed!") + } else { + echo "RocketChat notification sent successfully" + } +} \ No newline at end of file diff --git a/vars/rocketSend.txt b/vars/rocketSend.txt new file mode 100644 index 0000000..77cb054 --- /dev/null +++ b/vars/rocketSend.txt @@ -0,0 +1,34 @@ + +

+ Part of @Library('jenkins-pipeline-shared-libraries') +

+
+
+ rocketSend(webHook:String, message:String, avatar:String=null, rawMessage:Boolean=true):void +
+
+

+ Publish message at chat.puzzle.ch. The channel destination is disabled for the integration. Therefore you need to create the Webhook first. +
+ Example call: +
+ rocketSend("https://chat.puzzle.ch/hooks/../..", "Build Successful", "https://chat.puzzle.ch/emoji-custom/success.png", true) +

+
    +
  • + webHook - WebHook Url +
  • +
  • + message - Content of the Rocket Chat message +
  • +
  • + avatar - Avatar image used in Rocket Chat +
  • +
  • + rawMessage - Message should be sent as raw text +
  • +
+
+
+ + From b0dc83f6283742b6a23db6bfaebf376150c03fbc Mon Sep 17 00:00:00 2001 From: Sylvain Gilgen Date: Fri, 3 Nov 2023 15:31:20 +0100 Subject: [PATCH 4/9] remove unused classes --- .../puzzleitc/jenkins/MavenCoordinates.groovy | 63 ------------------- src/com/puzzleitc/jenkins/Util.groovy | 60 ------------------ .../command/AddDeployLinksCommand.groovy | 27 -------- .../jenkins/command/GitCryptCommand.groovy | 34 ---------- .../command/ReplaceFromVaultCommand.groovy | 43 ------------- 5 files changed, 227 deletions(-) delete mode 100644 src/com/puzzleitc/jenkins/MavenCoordinates.groovy delete mode 100644 src/com/puzzleitc/jenkins/Util.groovy delete mode 100644 src/com/puzzleitc/jenkins/command/AddDeployLinksCommand.groovy delete mode 100644 src/com/puzzleitc/jenkins/command/GitCryptCommand.groovy delete mode 100644 src/com/puzzleitc/jenkins/command/ReplaceFromVaultCommand.groovy diff --git a/src/com/puzzleitc/jenkins/MavenCoordinates.groovy b/src/com/puzzleitc/jenkins/MavenCoordinates.groovy deleted file mode 100644 index e060a5d..0000000 --- a/src/com/puzzleitc/jenkins/MavenCoordinates.groovy +++ /dev/null @@ -1,63 +0,0 @@ -package com.puzzleitc.jenkins - -class MavenCoordinates implements Serializable { - String groupId - String artifactId - String version - String packaging - String classifier - - @com.cloudbees.groovy.cps.NonCPS - String toString() { - return "${groupId}:${artifactId}:${version}:${packaging}${classifier?.trim() ? ":${classifier}" : "" }" - } - - @com.cloudbees.groovy.cps.NonCPS - int hashCode() { - final int prime = 31 - int result = 1 - result = prime * result + ((artifactId == null) ? 0 : artifactId.hashCode()) - result = prime * result + ((classifier == null) ? 0 : classifier.hashCode()) - result = prime * result + ((groupId == null) ? 0 : groupId.hashCode()) - result = prime * result + ((packaging == null) ? 0 : packaging.hashCode()) - result = prime * result + ((version == null) ? 0 : version.hashCode()) - return result - } - - @com.cloudbees.groovy.cps.NonCPS - boolean equals(Object obj) { - if (this == obj) - return true - if (obj == null) - return false - if (getClass() != obj.getClass()) - return false - MavenCoordinates other = (MavenCoordinates) obj - if (artifactId == null) { - if (other.artifactId != null) - return false - } else if (!artifactId.equals(other.artifactId)) - return false - if (classifier == null) { - if (other.classifier != null) - return false - } else if (!classifier.equals(other.classifier)) - return false - if (groupId == null) { - if (other.groupId != null) - return false - } else if (!groupId.equals(other.groupId)) - return false - if (packaging == null) { - if (other.packaging != null) - return false - } else if (!packaging.equals(other.packaging)) - return false - if (version == null) { - if (other.version != null) - return false - } else if (!version.equals(other.version)) - return false - return true - } -} diff --git a/src/com/puzzleitc/jenkins/Util.groovy b/src/com/puzzleitc/jenkins/Util.groovy deleted file mode 100644 index 240a083..0000000 --- a/src/com/puzzleitc/jenkins/Util.groovy +++ /dev/null @@ -1,60 +0,0 @@ -package com.puzzleitc.jenkins - -// This class is deprecated, use src/com/puzzleitc/jenkins/util/Args.groovy instead - -@Deprecated -class Util { - static Map parseArgs(namedArgs, positionalArgs, List requiredParams, Map optionalParams = [:]) { - - // null can be incorrectly assigned to namedArgs or positionalArgs if there are no other named or positional arguments, fix it - if (namedArgs == null) { - namedArgs = [:] - positionalArgs = ([null] as Object[]) + positionalArgs - } - if (positionalArgs == null) { - positionalArgs = ([null] as Object[]) - } - - // If the last argument is a closure it always goes into the last parameter - def positionalArgsCount = positionalArgs?.length - if (positionalArgsCount && positionalArgs[-1] instanceof Closure) { - println("closure") - def lastKey = optionalParams.size() ? optionalParams.keySet().last() : requiredParams[-1] - namedArgs[lastKey] = positionalArgs[-1] - positionalArgsCount-- - } - - int i = 0 - for (def item: requiredParams) { - if (namedArgs.containsKey(item)) { - if (i < positionalArgsCount) { - throw new IllegalArgumentException("Multiple values for argument '${item}'") - } - } else { - if (i < positionalArgsCount) { - namedArgs[item] = positionalArgs[i] - } else { - throw new IllegalArgumentException("Missing argument '${item}'") - } - } - i++ - } - - for (def item: optionalParams) { - if (namedArgs.containsKey(item.key)) { - if (i < positionalArgsCount) { - throw new IllegalArgumentException("Multiple values for argument '${item.key}'") - } - } else { - if (i < positionalArgsCount) { - namedArgs[item.key] = positionalArgs[i] - } else { - namedArgs[item.key] = item.value - } - } - i++ - } - - return namedArgs - } -} diff --git a/src/com/puzzleitc/jenkins/command/AddDeployLinksCommand.groovy b/src/com/puzzleitc/jenkins/command/AddDeployLinksCommand.groovy deleted file mode 100644 index c0623f8..0000000 --- a/src/com/puzzleitc/jenkins/command/AddDeployLinksCommand.groovy +++ /dev/null @@ -1,27 +0,0 @@ -package com.puzzleitc.jenkins.command - -import com.puzzleitc.jenkins.command.context.PipelineContext -import com.puzzleitc.jenkins.command.context.StepParams - -class AddDeployLinksCommand { - - private final PipelineContext ctx - - AddDeployLinksCommand(PipelineContext ctx) { - this.ctx = ctx - } - - void execute() { - ctx.info('-- AddDeployLinks --') - def deployJob = ctx.stepParams.getOptional('deployJob') - if (deployJob == null) { - error(ctx.getClass().getName() + ': No deploymentJob found. Must be specified!') - } - ctx.echo("deployJob: " + deployJob) - def deploymentJob = Jenkins.instance.getItemByFullName(deployJob) - if (deploymentJob == null) { - error(ctx.getClass().getName() + ": can't find job '${deploymentJob}'!" ) - } - ctx.addHtmlBadge html:"Deploy " - } -} \ No newline at end of file diff --git a/src/com/puzzleitc/jenkins/command/GitCryptCommand.groovy b/src/com/puzzleitc/jenkins/command/GitCryptCommand.groovy deleted file mode 100644 index c28068c..0000000 --- a/src/com/puzzleitc/jenkins/command/GitCryptCommand.groovy +++ /dev/null @@ -1,34 +0,0 @@ -package com.puzzleitc.jenkins.command - -import com.puzzleitc.jenkins.command.context.PipelineContext - -class GitCryptCommand { - - private final PipelineContext ctx - - GitCryptCommand(PipelineContext ctx) { - this.ctx = ctx - } - - void execute() { - def credentialsId = ctx.stepParams.getRequired('credentialsId') as String - def body = ctx.stepParams.getRequired('body') as Closure - def gitCryptPath = ctx.executable('git-crypt', 'gitcrypt') - def unlocked = false - try { - if (credentialsId) { - ctx.info('-- git-crypt unlock --') - ctx.withCredentials([ctx.file(credentialsId: credentialsId, variable: 'GIT_CRYPT_KEYFILE')]) { - ctx.sh script: "${gitCryptPath}/git-crypt unlock \${GIT_CRYPT_KEYFILE}" - unlocked = true - } - } - body() - } finally { - if (unlocked) { - ctx.info('-- git-crypt lock --') - ctx.sh script: "${gitCryptPath}/git-crypt lock" - } - } - } -} diff --git a/src/com/puzzleitc/jenkins/command/ReplaceFromVaultCommand.groovy b/src/com/puzzleitc/jenkins/command/ReplaceFromVaultCommand.groovy deleted file mode 100644 index 9bf4905..0000000 --- a/src/com/puzzleitc/jenkins/command/ReplaceFromVaultCommand.groovy +++ /dev/null @@ -1,43 +0,0 @@ -package com.puzzleitc.jenkins.command - -import com.puzzleitc.jenkins.command.context.PipelineContext - -class ReplaceFromVaultCommand { - - private final PipelineContext ctx - - ReplaceFromVaultCommand(PipelineContext ctx) { - this.ctx = ctx - } - - Object execute() { - ctx.info('-- replaceFromVault --') - def text = ctx.stepParams.getRequired('text') as String - def result = text - while (parseVaultLookup(result).size() > 0) { - def match = parseVaultLookup(result).get(0) - if (match.path) { - def replacedValue = ctx.lookupValueFromVault(match.path, match.key) - result = result.substring(0, match.start) + replacedValue + result.substring(match.end, result.length()) - } - } - return result - } - - private static List parseVaultLookup(String lookup) { - def matcher = lookup =~ /(?m)\{\{\s*vault\.get\(\s*"([^"]+)",\s*"([^"]+)"\s*\)\s*\}\}/ - def result = [] - while (matcher.find()) { - result.add(new VaultMatch(path: matcher.group(1), key: matcher.group(2), start: matcher.start(), end: matcher.end())) - } - return result - } - - private static class VaultMatch { - String path - String key - int start - int end - } - -} From 4127b4eb696793e30345eaa5de9c9b1b27042e83 Mon Sep 17 00:00:00 2001 From: Reto Galante Date: Wed, 10 Jul 2024 14:02:31 +0200 Subject: [PATCH 5/9] Adds java 21 support (cherry picked from commit e78426b7f4ead5a2c89ce70d7ec331ae178540ca) --- .github/workflows/gradle.yml | 8 ++--- build.gradle | 39 +++++++++++++----------- gradle/wrapper/gradle-wrapper.properties | 2 +- jenkins/docker-compose.yaml | 4 +-- test/groovy/util/Result.groovy | 2 +- test/groovy/vars/KustomizeTest.groovy | 4 +-- 6 files changed, 31 insertions(+), 28 deletions(-) diff --git a/.github/workflows/gradle.yml b/.github/workflows/gradle.yml index b8259cf..79b6849 100644 --- a/.github/workflows/gradle.yml +++ b/.github/workflows/gradle.yml @@ -11,11 +11,11 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 - - name: Set up JDK 8 - uses: actions/setup-java@v2 + - uses: actions/checkout@v4 + - name: Set up JDK 21 + uses: actions/setup-java@v4 with: - java-version: '8' + java-version: '21' distribution: 'adopt' - name: Grant execute permission for gradlew run: chmod +x gradlew diff --git a/build.gradle b/build.gradle index de6cf0c..996e99d 100644 --- a/build.gradle +++ b/build.gradle @@ -16,9 +16,11 @@ sourceSets { } } -sourceCompatibility = 1.8 +java { + sourceCompatibility = JavaVersion.VERSION_17 +} -project.buildDir = 'target' +project.layout.buildDirectory = 'target' repositories { mavenCentral() @@ -31,31 +33,32 @@ dependencies { // mandatory dependencies for using Spock // note: please use same Groovy version as on Jenkins - implementation "org.codehaus.groovy:groovy-all:2.4.12" + implementation "org.codehaus.groovy:groovy-all:2.4.21" implementation "com.cloudbees:groovy-cps:1.31@jar", withoutIcu - implementation "org.jenkins-ci.main:jenkins-core:2.225", withoutIcu - implementation "org.jenkins-ci.plugins:badge:1.8@jar" + implementation "org.jenkins-ci.main:jenkins-core:2.452", withoutIcu + implementation "org.jenkins-ci.plugins:badge:1.13@jar" implementation "org.jenkins-ci.plugins:pipeline-maven:3.9.3@jar" // dependency for MavenReport implementation "org.jenkins-ci.plugins:credentials-binding:1.27.1@jar" - implementation "org.slf4j:jcl-over-slf4j:1.7.25" - testImplementation "org.slf4j:log4j-over-slf4j:1.7.25" - testImplementation "org.slf4j:slf4j-api:1.7.25" + implementation "org.slf4j:jcl-over-slf4j:2.0.13" + testImplementation "org.slf4j:log4j-over-slf4j:2.0.13" + testImplementation "org.slf4j:slf4j-api:2.0.12" - testImplementation "ch.qos.logback:logback-core:1.2.3" - testImplementation "ch.qos.logback:logback-classic:1.2.3" - testImplementation "com.google.guava:guava:20.0" + testImplementation "ch.qos.logback:logback-core:1.5.6" + testImplementation "ch.qos.logback:logback-classic:1.5.6" + testImplementation "com.google.guava:guava:33.2.1-jre" - testImplementation "org.spockframework:spock-core:1.3-groovy-2.4@jar" + testImplementation "org.codehaus.groovy:groovy-all:3.0.21" + testImplementation "org.spockframework:spock-core:2.3-groovy-3.0@jar" // Jenkins Pipeline Unit + JUnit 4 testImplementation "com.lesfurets:jenkins-pipeline-unit:1.13" - testImplementation "org.assertj:assertj-core:3.22.0" + testImplementation "org.assertj:assertj-core:3.26.0" // Jenkins related - testImplementation "com.homeaway.devtools.jenkins:jenkins-spock:2.1.4" - testImplementation "javax.servlet:javax.servlet-api:3.1.0" - testImplementation "org.jenkins-ci.main:jenkins-core:2.225", withoutIcu + testImplementation "com.homeaway.devtools.jenkins:jenkins-spock:2.1.5" + testImplementation "javax.servlet:javax.servlet-api:4.0.1" + testImplementation "org.jenkins-ci.main:jenkins-core:2.452", withoutIcu testImplementation "org.jenkins-ci.plugins.workflow:workflow-api:2.40@jar" testImplementation "org.jenkins-ci.plugins.workflow:workflow-step-api:2.22@jar" testImplementation "org.jenkins-ci.plugins.workflow:workflow-cps:2.78@jar" @@ -72,9 +75,9 @@ dependencies { // this is needed for spock to find all the source code in the var directory task copyGlobalLibVars (type: Copy) { - from "$rootDir/vars" + from layout.projectDirectory.dir("vars") include '**/*.groovy' - into "$buildDir/classes/vars" + into layout.buildDirectory.dir("classes/vars") } compileTestGroovy { diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index 6c9a224..0d18421 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,5 +1,5 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-6.6-bin.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-8.8-bin.zip zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists diff --git a/jenkins/docker-compose.yaml b/jenkins/docker-compose.yaml index 06d37b2..015d008 100644 --- a/jenkins/docker-compose.yaml +++ b/jenkins/docker-compose.yaml @@ -5,8 +5,8 @@ services: privileged: true user: root ports: - - 8081:8080 - - 50000:50000 + - "8081:8080" + - "50000:50000" container_name: jenkins volumes: - ~/jenkins:/var/jenkins_home diff --git a/test/groovy/util/Result.groovy b/test/groovy/util/Result.groovy index c7db8d9..382c294 100644 --- a/test/groovy/util/Result.groovy +++ b/test/groovy/util/Result.groovy @@ -7,7 +7,7 @@ class Result { def Result() {} - def int status() { + int status() { return status } } \ No newline at end of file diff --git a/test/groovy/vars/KustomizeTest.groovy b/test/groovy/vars/KustomizeTest.groovy index f6f68d1..c3f184d 100644 --- a/test/groovy/vars/KustomizeTest.groovy +++ b/test/groovy/vars/KustomizeTest.groovy @@ -1,7 +1,7 @@ package groovy.vars import static com.lesfurets.jenkins.unit.MethodCall.callArgsToString -import static org.assertj.core.api.Assertions.*; +import static org.assertj.core.api.Assertions.* import com.lesfurets.jenkins.unit.BasePipelineTest import org.junit.Before @@ -9,7 +9,7 @@ import org.junit.Test class KustomizeTest extends BasePipelineTest { - Script kustomize; + Script kustomize @Before @Override From d956080af3f69a5d1e04f03a97b9c046ee73dc6f Mon Sep 17 00:00:00 2001 From: SylivanKenobi Date: Fri, 13 Oct 2023 14:38:13 +0200 Subject: [PATCH 6/9] remove all methods who are deprecated or use bad plugins --- vars/addDeployLinks.groovy | 7 -- vars/addDeployLinks.txt | 21 ----- vars/check.groovy | 7 -- vars/check.txt | 17 ---- vars/cleanBuilds.groovy | 37 --------- vars/cleanBuilds.txt | 22 ----- vars/deployChangedComponents.groovy | 119 ---------------------------- vars/deployChangedComponents.txt | 36 --------- vars/openshiftUtils.groovy | 75 ------------------ vars/openshiftUtils.txt | 87 -------------------- vars/replaceFromVault.groovy | 10 --- vars/replaceFromVault.txt | 32 -------- vars/trackComponentVersions.groovy | 91 --------------------- vars/trackComponentVersions.txt | 47 ----------- vars/withGitCrypt.groovy | 9 --- vars/withGitCrypt.txt | 38 --------- 16 files changed, 655 deletions(-) delete mode 100644 vars/addDeployLinks.groovy delete mode 100644 vars/addDeployLinks.txt delete mode 100644 vars/check.groovy delete mode 100644 vars/check.txt delete mode 100644 vars/cleanBuilds.groovy delete mode 100644 vars/cleanBuilds.txt delete mode 100644 vars/deployChangedComponents.groovy delete mode 100644 vars/deployChangedComponents.txt delete mode 100644 vars/openshiftUtils.groovy delete mode 100644 vars/openshiftUtils.txt delete mode 100644 vars/replaceFromVault.groovy delete mode 100644 vars/replaceFromVault.txt delete mode 100644 vars/trackComponentVersions.groovy delete mode 100644 vars/trackComponentVersions.txt delete mode 100644 vars/withGitCrypt.groovy delete mode 100644 vars/withGitCrypt.txt diff --git a/vars/addDeployLinks.groovy b/vars/addDeployLinks.groovy deleted file mode 100644 index 3220181..0000000 --- a/vars/addDeployLinks.groovy +++ /dev/null @@ -1,7 +0,0 @@ -import com.puzzleitc.jenkins.command.AddDeployLinksCommand -import com.puzzleitc.jenkins.command.context.JenkinsPipelineContext - -def call(Map params = [:]) { - AddDeployLinksCommand command = new AddDeployLinksCommand(new JenkinsPipelineContext(this, params)) - command.execute() -} diff --git a/vars/addDeployLinks.txt b/vars/addDeployLinks.txt deleted file mode 100644 index 24bd124..0000000 --- a/vars/addDeployLinks.txt +++ /dev/null @@ -1,21 +0,0 @@ - -

- Adds a deployment link to a specific build.
- This can be useful if a deploy job should be triggered when a build was successfull. -

-

- parameters: -

    -
  • - deployJob: Jobname that should be triggered with the Link -
  • -
-

-

- examples: -

- addDeployLinks deployJob: 'integration-pipeline-deploy' -

-

- - \ No newline at end of file diff --git a/vars/check.groovy b/vars/check.groovy deleted file mode 100644 index bcaaf14..0000000 --- a/vars/check.groovy +++ /dev/null @@ -1,7 +0,0 @@ -// Deprecated: Will be implemented as a step. -def mandatoryParameter(parameterName) { - if (!params.containsKey(parameterName)) { - currentBuild.result = 'ABORTED' - error('missing parameter: ' + parameterName) - } -} diff --git a/vars/check.txt b/vars/check.txt deleted file mode 100644 index 3badd5c..0000000 --- a/vars/check.txt +++ /dev/null @@ -1,17 +0,0 @@ - -

- Part of @Library('jenkins-pipeline-shared-libraries') -

-
-
check.mandatoryParameter()
-
-

- Deprecated: Will be implemented as a step. -

-

- Checks if a parameter with the given name is available. If not, the build will be aborted with an error message. -

-
-
- - diff --git a/vars/cleanBuilds.groovy b/vars/cleanBuilds.groovy deleted file mode 100644 index 640c812..0000000 --- a/vars/cleanBuilds.groovy +++ /dev/null @@ -1,37 +0,0 @@ -#!/usr/bin/env groovy - -import com.jenkinsci.plugins.badge.action.BadgeAction - -// the function implements how many build per environment should be kept -// parameter is maxKeepBuilds -def call(Map args) { - def maxNumberToKeepBuilds = args?.maxKeepBuilds ?: 10 - - def environmentBuildCount = [:] - Jenkins.instance.getItemByFullName(args.job) - .getBuilds() - .findAll { it.isKeepLog() } - .each { build -> - deployedEnvironment = [] - build.getActions(BadgeAction.class).each { - deployedEnvironment << it.id - environmentBuildCount[it.id] = environmentBuildCount.get(it.id, 0) + 1 - } - - // each Build that should be kept will be stored in keepBuild map - def keepBuild = [] - deployedEnvironment.each { - if (environmentBuildCount[it] <= maxNumberToKeepBuilds) { - keepBuild << it - } - } - - // print out reason of/not keeping the build - if (keepBuild) { - echo "Keeping build ${build} because of the following promotions: ${keepBuild.join(' ')}" - } else { - echo "Deleting build ${build}" - build.delete() - } - } -} diff --git a/vars/cleanBuilds.txt b/vars/cleanBuilds.txt deleted file mode 100644 index 0a6cf1b..0000000 --- a/vars/cleanBuilds.txt +++ /dev/null @@ -1,22 +0,0 @@ - -

- The cleanBuilds function offers a housekeeeping for the integration pipeline.
- Every build of the integration pipeline which was deployed on an environment is automatically kept. - To prevent a numerous amount of kept builds this variable is cleaning up these jobs. It distinguishes between deployment for every environment. -

-

- parameters: -

    -
  • job(manadatory): identifies the job on which the housekeeping should be done
  • -
  • maxKeepBuilds(optional, default 10): defines how many deployed builds per environment should be kept (counting from the newest to maxKeepBuilds)
  • -
-

-

- examples: -

- cleanBuilds job: 'project-a/integration-pipeline'
- cleanBuilds job: 'project-a/integration-pipeline', maxKeepBuilds: 10 -

-

- - \ No newline at end of file diff --git a/vars/deployChangedComponents.groovy b/vars/deployChangedComponents.groovy deleted file mode 100644 index c4e8e60..0000000 --- a/vars/deployChangedComponents.groovy +++ /dev/null @@ -1,119 +0,0 @@ -#!/usr/bin/env groovy - -import hudson.model.Run -import com.jenkinsci.plugins.badge.action.AbstractAction -import com.jenkinsci.plugins.badge.action.BadgeAction -import com.jenkinsci.plugins.badge.action.BadgeSummaryAction - -Map readComponentVersionsFromArtifact(String job, Object selector, String file) { - try { - copyArtifacts filter: file, projectName: job, selector: selector - def versions = readYaml(file: file) - if (versions instanceof Map) { - return versions - } else { - return [:] - } - // when no artifact available (e.g. its the first run) - // return an empty map instead of throwing an error - } catch (Exception e) { - return [:] - } -} - -void writeComponentVersionsToArtifact(String dataFile, Map componentVersions) { - // always remove the old file first - sh "rm -f ${dataFile}" - writeYaml file: dataFile, data: componentVersions - archiveArtifacts dataFile -} - -// always remove the old badge action before appling anything new -void removeBadgeAction(Run currentBuild, Class actionClass, String id) { - def badgeAction = currentBuild.getActions(actionClass).find { it.id == id } - if (badgeAction != null) { - currentBuild.removeAction(badgeAction) - } -} - -void setBadgeInBuildHistory(Run currentBuild, String icon, String altText, String id, String link) { - removeBadgeAction(currentBuild, BadgeAction.class, id) - - // as the badge-plugin does not support the adding of badges - // to other builds, we have to use the class directly - def badgeAction = BadgeAction.createBadge(icon, altText, link) - badgeAction.setId(id) - currentBuild.addAction(badgeAction) -} - -void setBadgeAndLinkInSummary(Run currentBuild, String icon, String id, String altText) { - removeBadgeAction(currentBuild, BadgeSummaryAction.class, id) - - // as the badge-plugin does not support the adding of badges - // to other builds, we have to use the class directly - def badgeSummaryAction = new BadgeSummaryAction(icon) - badgeSummaryAction.setId(id) - badgeSummaryAction.appendText(altText) - currentBuild.addAction(badgeSummaryAction) -} - -// Depending on the Badgelocation the Icon is either "large" or "small" -String getDeployIcon(String targetEnv, boolean isLarge = false) { - size = "16x16" - if (isLarge) { - size = "32x32" - } - if (targetEnv == "test") { - return '/plugin/promoted-builds/icons/' + size + '/star-silver.png' - } else if (targetEnv == "prod") { - return '/plugin/promoted-builds/icons/' + size + '/star-gold.png' - } else if (targetEnv == "int") { - return '/plugin/promoted-builds/icons/' + size + '/star-purple.png' - } - return '/plugin/promoted-builds/icons/' + size + '/star-orange.png' -} - -void addDeployedBadges() { - def built = Jenkins.instance.getItemByFullName(built_name).getBuild(built_number) - def deploy = currentBuild.rawBuild - - setBadgeInBuildHistory(deploy, getDeployIcon(target_env), "Deployed ${built_name} #${built_number} to ${target_env}", target_env, "/${built.getUrl()}") - setBadgeAndLinkInSummary(deploy, getDeployIcon(target_env, true), target_env, "Deployed ${built_name} #${built_number} to ${target_env}") - - setBadgeInBuildHistory(built, getDeployIcon(target_env), "Deployed to ${target_env} by ${env.JOB_NAME} #${env.BUILD_NUMBER}", target_env, "/${currentBuild.rawBuild.getUrl()}") - setBadgeAndLinkInSummary(built, getDeployIcon(target_env, true), target_env, "Deployed to ${target_env} by ${env.JOB_NAME} #${env.BUILD_NUMBER}") - // save is required to persist badges on other builds than the current - built.save() - - built.keepLog(true) -} - -def call() { - final DEPLOYED_VERSIONS_FILE = 'deployed-versions.yaml' - final COMPONENT_VERSIONS_FILE = 'component-versions.yaml' - final EXECUTION_JOB_FILE = 'rundeck-jobs.yaml' - - def newComponentVersions = readComponentVersionsFromArtifact(built_name, specific(built_number), COMPONENT_VERSIONS_FILE) - def deployedVersions = readComponentVersionsFromArtifact(env.JOB_NAME, lastSuccessful(), DEPLOYED_VERSIONS_FILE) - - def currentComponentVersions = deployedVersions[target_env] - deployedVersions[target_env] = newComponentVersions - - executionJobs = readYaml(file: EXECUTION_JOB_FILE) - - newComponentVersions.each { k, v -> - - if (!currentComponentVersions) { - echo "Installing component ${k} on environment ${target_env} with version ${v.version}." - echo "Executing rundeck job ${executionJobs[k].jobId} with env ${target_env} and version ${v.version}." // TODO: Replace with rundeck call - } else if (v.version == currentComponentVersions[k].version) { - echo "Component ${k} version ${v.version} already deployed, skipping." - } else { - echo "Updating component ${k} on environment ${target_env} from version ${currentComponentVersions[k].version} to ${v.version}." - echo "Executing rundeck job ${executionJobs[k].jobId} with env ${target_env} and version ${v.version}." // TODO: Replace with rundeck call - } - } - - writeComponentVersionsToArtifact DEPLOYED_VERSIONS_FILE, deployedVersions - addDeployedBadges() -} diff --git a/vars/deployChangedComponents.txt b/vars/deployChangedComponents.txt deleted file mode 100644 index 54c517a..0000000 --- a/vars/deployChangedComponents.txt +++ /dev/null @@ -1,36 +0,0 @@ - -

- This function can be used to deploy a component (or a set of components) from a specific integration pipeline to a distinct environment.
- It provides multiple features: -

    -
  • checks the version of components that are last deployed on a specific environment
  • -
  • checks the version of components that should be deployed on that specific environment
  • -
  • deploy the delta set of components on that specific environment
  • -
  • add a deployment badge that to the deployment pipeline job to indicate that a deployment was done for that specific environment
  • -
- The integration pipeline has to provide a valid component-versions.yaml file.
- deployChangedComponents() has to be called in a parameterized job as it depends on parameters to be set. -

-

- parameters for the containing job: -

    -
  • - built_name: integration pipeline that is used to create the component-versions.yaml -
  • -
  • - built_number: build number of the integration pipeline -
  • -
  • - target_env: environment to which the component-versions.yaml should be run against -
  • -
-

-

-

- Attention:
- There is no implementation of the actual deployment.
- The function only prints out the functionality that can be used for the deployment process. -
-

- - \ No newline at end of file diff --git a/vars/openshiftUtils.groovy b/vars/openshiftUtils.groovy deleted file mode 100644 index c231080..0000000 --- a/vars/openshiftUtils.groovy +++ /dev/null @@ -1,75 +0,0 @@ -// Deprecated: Use the openshiftApply() step. - -// -// OpenShift resource update by applying a template. -// -// method parameters: -// ocpUrl -> url of the OpenShift server -// ocpProject -> project-name/namespace of the OpenShift project -// templateFile -> OpenShift template -// credentialsId -> credentials for the OpenShift login -// namespace -> true adds NAMESPACE_NAME param -// -void applyTemplate(String ocpUrl, String ocpProject, String templateFile, String credentialsId, boolean namespace) { - - echo "-- start resource update by template --" - echo "OpenShift server URL: $ocpUrl" - echo "OpenShift project: $ocpProject" - echo "resource file: $templateFile" - - withCredentials([[$class : 'StringBinding', - credentialsId : "${credentialsId}", - variable : 'openshift_token']]) { - withEnv(["KUBECONFIG=${pwd()}/.kube", "PATH+OC_HOME=${tool 'oc'}/bin", "ocpUrl=${ocpUrl}"]) { - sh "oc login $ocpUrl --insecure-skip-tls-verify=true --token=$openshift_token" - sh "oc project $ocpProject" - sh "oc project" - sh "oc whoami" - - // apply template - if (namespace) { - sh "oc process -f $templateFile -p NAMESPACE_NAME=\$(oc project -q) | oc apply -f -" - } else { - sh "oc process -f $templateFile | oc apply -f -" - } - } - } -} - -// -// OpenShift resource update by applying a template with environment file. -// -// method parameters: -// ocpUrl -> url of the OpenShift server -// ocpProject -> project-name/namespace of the OpenShift project -// templateFile -> OpenShift template -// credentialsId -> credentials for the OpenShift login -// envFile -> environment file -// namespace -> true adds NAMESPACE_NAME param -// -void applyTemplateWithEnvFile(String ocpUrl, String ocpProject, String templateFile, String credentialsId, String envFile, boolean namespace) { - - echo "-- start resource update by template with environment file --" - echo "OpenShift server URL: $ocpUrl" - echo "OpenShift project: $ocpProject" - echo "resource file: $templateFile" - echo "environment file: $envFile" - - withCredentials([[$class : 'StringBinding', - credentialsId : "${credentialsId}", - variable : 'openshift_token']]) { - withEnv(["KUBECONFIG=${pwd()}/.kube", "PATH+OC_HOME=${tool 'oc'}/bin", "ocpUrl=${ocpUrl}"]) { - sh "oc login $ocpUrl --insecure-skip-tls-verify=true --token=$openshift_token" - sh "oc project $ocpProject" - sh "oc project" - sh "oc whoami" - - // apply template - if (namespace) { - sh "oc process -f $templateFile -p NAMESPACE_NAME=\$(oc project -q) --param-file $envFile | oc apply -f -" - } else { - sh "oc process -f $templateFile --param-file $envFile | oc apply -f -" - } - } - } -} diff --git a/vars/openshiftUtils.txt b/vars/openshiftUtils.txt deleted file mode 100644 index bfda8fc..0000000 --- a/vars/openshiftUtils.txt +++ /dev/null @@ -1,87 +0,0 @@ - -

- Part of @Library('jenkins-pipeline-shared-libraries') -

-
-
- openshiftUtils.applyTemplate(ocpUrl:String, ocpProject:String, templateFile:String, credentialsId:String, namespace:Boolean):void -
-
-

- Deprecated: Use the openshiftApply() step. -

-

- OpenShift resource update by applying a template. -
- Example call: -
- openshiftUtils.applyTemplate("https://console.appuio.ch/", "my-appuio-project", "template.yaml", "APPUiO_login_token", false) -

-
    -
  • - ocpUrl - URL of the OpenShift server API endpoint. -
  • -
  • - ocpProject - Project / Namespace name of the OpenShift project to apply the template. -
  • -
  • - templateFile - Path to the OpenShift template file. -
  • -
  • - credentialsId - The credentialsId of an OpenShift Auth token stored in the Jenkins credential - manager with which to authenticate. Type of Credential is Secret text. -
  • -
  • - namespace - Boolean parameter. True adds a parameter with the name NAMESPACE_NAME - and as value the name of the actual OpenShift project. -
  • -
-
-
- openshiftUtils.applyTemplateWithEnvFile(ocpUrl:String, ocpProject:String, templateFile:String, credentialsId:String, envFile:String, namespace:Boolean):void -
-
-

- Deprecated: Use the openshiftApply() step. -

-

- OpenShift resource update by applying a template with parameter from an environment file. -

-

- Environment file contains key and value separated by double point. -
- Example: -
- KEY: 'value' -

-

- Example call: -
- openshiftUtils.applyTemplateWithEnvFile("https://console.appuio.ch/", "my-appuio-project", "template.yaml", "APPUiO_login_token", "test.yaml", false) -

-
    -
  • - ocpUrl - URL of the OpenShift server API endpoint. -
  • -
  • - ocpProject - Project / Namespace name of the OpenShift project to apply the template. -
  • -
  • - templateFile - Path to the OpenShift template file. -
  • -
  • - credentialsId - The credentialsId of an OpenShift Auth token stored in the Jenkins credential - manager with which to authenticate. Type of Credential is Secret text. -
  • -
  • - envFile - Path to the environment file holding all parameter with their values. -
  • -
  • - namespace - Boolean parameter. True adds a parameter with the name NAMESPACE_NAME - and as value the name of the actual OpenShift project. -
  • -
-
-
- - diff --git a/vars/replaceFromVault.groovy b/vars/replaceFromVault.groovy deleted file mode 100644 index f0ab575..0000000 --- a/vars/replaceFromVault.groovy +++ /dev/null @@ -1,10 +0,0 @@ -import com.puzzleitc.jenkins.command.ReplaceFromVaultCommand -import com.puzzleitc.jenkins.command.context.JenkinsPipelineContext - -import static com.puzzleitc.jenkins.util.Args.parseArgs - -def call(Map namedArgs = [:], Object... positionalArgs) { - def args = parseArgs(namedArgs, positionalArgs, ['text']) - ReplaceFromVaultCommand command = new ReplaceFromVaultCommand(new JenkinsPipelineContext(this, args)) - return command.execute() -} \ No newline at end of file diff --git a/vars/replaceFromVault.txt b/vars/replaceFromVault.txt deleted file mode 100644 index 0680a3e..0000000 --- a/vars/replaceFromVault.txt +++ /dev/null @@ -1,32 +0,0 @@ - -

- Part of @Library('jenkins-pipeline-shared-libraries') -

-
-
- replaceFromVault(text:String):String -
-
-

- Substitutes variables the a provided string with the secrets retrieved from HashiCorp Vault. The string - will be scanned for the pattern {{ vault.get("<VAULT_PATH>", "<VAULT_KEY>") }}, - where VAULT_PATH defines the fully qualified path in vault of the secret and - VAULT_KEY the key to lookup of the specified secret. Example pattern: - {{ vault.get("/kv/spaces/outerspace/wekan-secret-test", "mongodb_user") }}. -

-
    -
  • - text (required) - The string which contains a predefined pattern that should be substituted by - secrets from Hashicorp Vault. -
  • -
-

- Example: -

-

-replaceFromVault(text: kustomize(path: 'openshift/postgresql/overlays/dev'))
-        
-
-
- - diff --git a/vars/trackComponentVersions.groovy b/vars/trackComponentVersions.groovy deleted file mode 100644 index 9f5071d..0000000 --- a/vars/trackComponentVersions.groovy +++ /dev/null @@ -1,91 +0,0 @@ -#!/usr/bin/env groovy -import hudson.model.Cause -import org.jenkinsci.plugins.pipeline.maven.publishers.MavenReport - -Map readComponentVersionsFromArtifact(String job, Object selector, String file) { - try { - copyArtifacts filter: file, projectName: job, selector: selector - def versions = readYaml(file: file) - if (versions instanceof Map) { - return versions - } else { - return [:] - } - // when no artifact available (e.g. its the first run) - // return an empty map instead of throwing an error - } catch (Exception e) { - return [:] - } -} - -void writeComponentVersionsToArtifact(String dataFile, Map componentInfos) { - // always remove the old file first - sh "rm -f ${dataFile}" - writeYaml file: dataFile, data: componentInfos - archiveArtifacts dataFile -} - -def getUpstreamCauses() { - def result = [] - - def buildCauses = currentBuild.rawBuild.getCauses() - buildCauses.each { - if (it instanceof Cause.UpstreamCause) { - def upstreamCause = it as Cause.UpstreamCause - result << [job: upstreamCause.upstreamProject, build: upstreamCause.upstreamBuild.toString()] - } - } - - return result -} - -def getComponentInformations(String componentVersion, Boolean externalComponent = false) { - def infos = [version: componentVersion + '-' + env.BUILD_NUMBER, job: env.JOB_NAME, buildNumber: env.BUILD_NUMBER as int, buildUrl: env.BUILD_URL] - if(externalComponent) { - infos = [version: componentVersion] - } - - // does the current build contain a maven build? - def mavenReport = currentBuild.rawBuild.getActions(MavenReport.class) - if (mavenReport) { - def mavenArtifacts = [] - mavenReport[0].getDeployedArtifacts().each { - mavenArtifacts << it.url - } - infos['artifacts'] = mavenArtifacts - } - - return infos -} - -def call(Map args) { - final COMPONENT_VERSIONS_FILE = 'component-versions.yaml' - - lock(resource: 'trackComponentVersions', inversePrecedence: false) { - def componentInfos = readComponentVersionsFromArtifact(env.JOB_NAME, lastSuccessful(), COMPONENT_VERSIONS_FILE) - - // If a build is taking a long time multiple other build requests can - // occour so there may be multiple build causes: http://javadoc.jenkins-ci.org/hudson/model/Run.html#getCauses-- - getUpstreamCauses().each { - def upstreamVersions = readComponentVersionsFromArtifact(it.job, specific(it.build), COMPONENT_VERSIONS_FILE) - componentInfos.putAll(upstreamVersions) - } - // check if pomFile location is passed - if (args?.pomFile) { - def mavenCoordinates = readMavenPom(file: args.pomFile) - componentInfos[mavenCoordinates.artifactId] = getComponentInformations(mavenCoordinates.version) - // is version yaml passed? - } else if (args?.versionFile) { - releaseVersion = readYaml(file: args.versionFile) - if (args?.containsExternalComponents) { - releaseVersion.each { k, v -> componentInfos[k] = getComponentInformations(v, args.containsExternalComponents) } - } else { - releaseVersion.each { k, v -> componentInfos[k] = getComponentInformations(v) } - } - } else { - error(getClass().getName() + ': Either pomFile or versionFile must be set!') - } - - writeComponentVersionsToArtifact(COMPONENT_VERSIONS_FILE, componentInfos) - } -} diff --git a/vars/trackComponentVersions.txt b/vars/trackComponentVersions.txt deleted file mode 100644 index a188178..0000000 --- a/vars/trackComponentVersions.txt +++ /dev/null @@ -1,47 +0,0 @@ - -

- Tracks the version of a component and it upstreams.
- For upstream jobs the information about the component versions is retrieved from a file called component-versions.yaml.
- The structure of this file: -

-

- - [componentname]:
-   version: [componentversion]
-   job: [name of job]
-   buildNumber: [number of last successful build]
-   buildUrl: [URL for last successful build]
-   artifacts: [contains list of artifacts]
-
-

-

- Stores these informations in the file component-versions.yaml.
- If trackComponentVersions is called from multiple upstream projects it will aggregate the information about every component in the component-versions.yaml. -

-

- parameters: -

    -
  • pomFile: Path (relative to the project base) to a pom file containing version information -
  • will retrieve version info from tag
  • -
  • will retrieve artifactId from tag
  • - -
  • versionFile: Path (relative to the project base) to a yaml file containing version information
  • -
  • containsExternalComponents (optional): The flag indicates if the file that is passed contains external components
  • -
- If the file cannot be found the function returns an empty array. -

-

- return: -

- The result file must exist in the repository but can be emtpy. -

-

-

- examples: -

- trackComponentVersions versionFile: 'project-a/version.yml'
- trackComponentVersions pomFile: 'project-a/pom.xml' -

-

- - \ No newline at end of file diff --git a/vars/withGitCrypt.groovy b/vars/withGitCrypt.groovy deleted file mode 100644 index d6d17a9..0000000 --- a/vars/withGitCrypt.groovy +++ /dev/null @@ -1,9 +0,0 @@ -import com.puzzleitc.jenkins.command.GitCryptCommand -import com.puzzleitc.jenkins.command.context.JenkinsPipelineContext -import static com.puzzleitc.jenkins.util.Args.parseArgs - -def call(Map namedArgs = [:], Object... positionalArgs) { - def args = parseArgs(namedArgs, positionalArgs, ['credentialsId', 'body']) - GitCryptCommand command = new GitCryptCommand(new JenkinsPipelineContext(this, args)) - command.execute() -} diff --git a/vars/withGitCrypt.txt b/vars/withGitCrypt.txt deleted file mode 100644 index c3c09c7..0000000 --- a/vars/withGitCrypt.txt +++ /dev/null @@ -1,38 +0,0 @@ - -

- Part of @Library('jenkins-pipeline-shared-libraries') -

-
-
- withGitCrypt(credentialisId:String):Object {…} -
-
-

- Unlocks the Git repository in the current directory with git-crypt for the duration of the scope of the step - and locks it again afterwards. The repository is also relocked when an error occurs in the scope of the step. - The return value is the value returned by (or the value of the last statement within) the scope of the step. - If credentialsId is null git-crypt is not called but the passed block/closure is still executed. This can - be used to run git-crypt conditionally. -

-
    -
  • - credentialsId - The credentials id of a git-crypt keyfile stored in the Jenkins credential - manager. -
  • -
-

- Example: -

-

-withGitCrypt(credentialsId: 'my-git-crypt-keyfile') {
-    // work with decrypted files
-}
-// files are encrypted again
-        
-

- For further information see git-crypt documentation. -

-
-
- - From 149afbd28dff939e4fa18aa4d1e7f8e88bd7d362 Mon Sep 17 00:00:00 2001 From: SylivanKenobi Date: Tue, 17 Oct 2023 10:32:31 +0200 Subject: [PATCH 7/9] remove unused tests --- README.md | 42 --------------- test/groovy/vars/CheckPipelineTest.groovy | 51 ------------------- test/groovy/vars/CheckTest.groovy | 56 -------------------- test/groovy/vars/OpenshiftUtilsSpec.groovy | 59 ---------------------- 4 files changed, 208 deletions(-) delete mode 100644 test/groovy/vars/CheckPipelineTest.groovy delete mode 100644 test/groovy/vars/CheckTest.groovy delete mode 100644 test/groovy/vars/OpenshiftUtilsSpec.groovy diff --git a/README.md b/README.md index 862162f..f50971b 100644 --- a/README.md +++ b/README.md @@ -8,31 +8,6 @@ Documentation: ## global variables -### addDeployLinks - -Adds a link(to another job) for a specific build from the build history of a jenkins job. -See it's [documentation](vars/addDeployLinks.txt). -This function is a component of the [Integration Pipeline](doc/Integration-Pipeline.md). - -Source: [vars/addDeployLinks.groovy](vars/addDeployLinks.groovy) - -### cleanBuilds - -Cleans up the build history of the jenkins job. It can differentiate deployments to specific environments. -See it's [documentation](vars/cleanBuilds.txt). -This function is a component of the [Integration Pipeline](doc/Integration-Pipeline.md). - -Source: [vars/cleanBuilds.groovy](vars/cleanBuilds.groovy) - -### deployChangedComponents - -This function can be used as a template for deploying a component on a specific environment. -There is no actual implementation of a deployment but a placeholder with an echo output. -See it's [documentation](vars/deployChangedComponents.txt). -This function is a component of the [Integration Pipeline](doc/Integration-Pipeline.md). - -Source: [vars/deployChangedComponents.groovy](vars/deployChangedComponents.groovy) - ### executable Ensures that the given executable is available on the current Jenkins agent, installing it with a Jenkins tool installer @@ -63,23 +38,6 @@ Requests OpenShift to start build from the specified build config and waits for Runs the OWASP dependency-check tool. -### replaceFromVault - -Substitutes variables in a provided String with secrets retrieved from HashiCorp Vault. - -### trackComponentVersions - -This function tracks information about the current deployment state of a component on a specific environment. -See it's [documentation](vars/trackComponentVersions.txt). -This function is a component of the [Integration Pipeline](doc/Integration-Pipeline.md). - -Source: [vars/trackComponentVersions.groovy](vars/trackComponentVersions.groovy) - -### withGitCrypt - -Unlocks the Git repository in the current directory with `git-crypt` for the duration of the scope of the - step and locks it again afterwards. - ## Groovy source files ### DockerHub diff --git a/test/groovy/vars/CheckPipelineTest.groovy b/test/groovy/vars/CheckPipelineTest.groovy deleted file mode 100644 index 5d2a75b..0000000 --- a/test/groovy/vars/CheckPipelineTest.groovy +++ /dev/null @@ -1,51 +0,0 @@ -package groovy.pipeline - -import com.lesfurets.jenkins.unit.declarative.DeclarativePipelineTest -import org.junit.Before -import org.junit.Test - -import static com.lesfurets.jenkins.unit.MethodCall.callArgsToString -import static com.lesfurets.jenkins.unit.global.lib.LibraryConfiguration.library -import static com.lesfurets.jenkins.unit.global.lib.ProjectSource.projectSource -import static org.assertj.core.api.Assertions.assertThat - -class CheckPipelineTest extends DeclarativePipelineTest { - - @Before - @Override - void setUp() throws Exception { - super.setUp() - helper.libLoader.preloadLibraryClasses = false - def library = library().name('jenkins-shared-library') - .defaultVersion('master') - .allowOverride(true) - .implicit(true) - .targetPath('') - .retriever(projectSource()) - .build() - helper.registerSharedLibrary(library) - } - - @Test - void itShouldAbortForMissingMandatoryParameter() { - // when - runScript('test/resources/vars/check/mandatoryParameterMissing.groovy') - - // then - assertThat(helper.callStack.findAll { call -> - call.methodName == "error" - }.any { call -> - callArgsToString(call).contains("missing parameter: hallo") - }).isTrue() - printCallStack() - } - - @Test - void itShouldNotAbortForAvailableMandatoryParameter() { - // when - runScript('test/resources/vars/check/mandatoryParameter.groovy') - - // then - assertJobStatusSuccess() - } -} diff --git a/test/groovy/vars/CheckTest.groovy b/test/groovy/vars/CheckTest.groovy deleted file mode 100644 index 952e52b..0000000 --- a/test/groovy/vars/CheckTest.groovy +++ /dev/null @@ -1,56 +0,0 @@ -package groovy.vars - -import static org.assertj.core.api.Assertions.* -import static com.lesfurets.jenkins.unit.MethodCall.callArgsToString - -import com.lesfurets.jenkins.unit.BasePipelineTest -import org.junit.Before -import org.junit.Test - -/** - * Tests for vars/check.groovy - */ -class CheckTest extends BasePipelineTest { - - private Script check - - @Override - @Before - void setUp() { - super.setUp() - check = loadScript('vars/check.groovy') - } - - @Test - void itShouldAbortForMissingMandatoryParameter() { - // given - // Override error behaviour as it is not correctly implemented by the test framework - helper.registerAllowedMethod('error', [String.class]) - check.binding.setVariable('params', [:]) - - // when - check.mandatoryParameter('hallo') - - // then - assertJobStatusAborted() - assertThat(helper.callStack.findAll { call -> - call.methodName == 'error' - }.any { call -> - callArgsToString(call).contains('missing parameter: hallo') - }).isTrue() - } - - @Test - void itShouldNotAbortForAvailableMandatoryParameter() { - // given - check.binding.setVariable('currentBuild', [result: 'null']) - check.binding.setVariable('params', [hallo: 'test']) - - // when - check.mandatoryParameter('hallo') - - // then - assertThat(check.binding.getVariable('currentBuild').result as String).isEqualTo('null') - assertThat(helper.callStack).filteredOn({ it.methodName == 'error' }).isEmpty() - } -} diff --git a/test/groovy/vars/OpenshiftUtilsSpec.groovy b/test/groovy/vars/OpenshiftUtilsSpec.groovy deleted file mode 100644 index 6dc3032..0000000 --- a/test/groovy/vars/OpenshiftUtilsSpec.groovy +++ /dev/null @@ -1,59 +0,0 @@ -package groovy.vars - -import com.homeaway.devtools.jenkins.testing.JenkinsPipelineSpecification - -class OpenshiftUtilsSpec extends JenkinsPipelineSpecification { - - def openshiftUtils = loadPipelineScriptForTest('vars/openshiftUtils.groovy') - - def 'Applys the template without namespace'() { - setup: - explicitlyMockPipelineVariable('openshift_token') - when: - openshiftUtils.applyTemplate("https://console.appuio.ch/", "my-appuio-project", "template.yaml", "APPUiO_login_token", false) - then: - 1 * getPipelineMock('echo').call('OpenShift server URL: https://console.appuio.ch/') - 1 * getPipelineMock('echo').call('OpenShift project: my-appuio-project') - 1 * getPipelineMock('echo').call('resource file: template.yaml') - 1 * getPipelineMock('withCredentials').call(_) >> { _arguments -> - def credentialsId = 'APPUiO_login_token' - credentialsId == _arguments[0][0].credentialsId[0] - } - 1 * getPipelineMock('tool').call(_) >> '/home/jenkins' - 1 * getPipelineMock('withEnv').call(_) >> { _arguments -> -// TODO: pwd mock - def env = ["KUBECONFIG=null/test/path/.kube", "PATH+OC_HOME=/home/jenkins/bin", "ocpUrl=https://console.appuio.ch/"] - env == _arguments[0][0] - } - 1 * getPipelineMock('sh').call('oc login https://console.appuio.ch/ --insecure-skip-tls-verify=true --token=Mock Generator for [openshift_token]') - 1 * getPipelineMock('sh').call('oc process -f template.yaml | oc apply -f -') - } - - def 'Applys the template with namespace'() { - setup: - explicitlyMockPipelineVariable('openshift_token') - when: - openshiftUtils.applyTemplate("https://console.appuio.ch/", "my-appuio-project", "template.yaml", "APPUiO_login_token", true) - then: - 1 * getPipelineMock('sh').call('oc process -f template.yaml -p NAMESPACE_NAME=$(oc project -q) | oc apply -f -') - } - - def 'Applys the template with envFile and without namespace'() { - setup: - explicitlyMockPipelineVariable('openshift_token') - when: - openshiftUtils.applyTemplateWithEnvFile("https://console.appuio.ch/", "my-appuio-project", "template.yaml", "APPUiO_login_token", 'envFile.env', false) - then: - 1 * getPipelineMock('echo').call('environment file: envFile.env') - 1 * getPipelineMock('sh').call('oc process -f template.yaml --param-file envFile.env | oc apply -f -') - } - - def 'Applys the template with namespace and envFile'() { - setup: - explicitlyMockPipelineVariable('openshift_token') - when: - openshiftUtils.applyTemplateWithEnvFile("https://console.appuio.ch/", "my-appuio-project", "template.yaml", "APPUiO_login_token", 'envFile.env', true) - then: - 1 * getPipelineMock('sh').call('oc process -f template.yaml -p NAMESPACE_NAME=$(oc project -q) --param-file envFile.env | oc apply -f -') - } -} From 305dbbf4ab759c094c401eca8af44e8a7d3ef3c1 Mon Sep 17 00:00:00 2001 From: Lukas Koller Date: Fri, 27 Oct 2023 10:17:12 +0200 Subject: [PATCH 8/9] add rocketSend function --- README.md | 4 ++++ vars/rocketSend.groovy | 22 ++++++++++++++++++++++ vars/rocketSend.txt | 34 ++++++++++++++++++++++++++++++++++ 3 files changed, 60 insertions(+) create mode 100644 vars/rocketSend.groovy create mode 100644 vars/rocketSend.txt diff --git a/README.md b/README.md index f50971b..8ad75a9 100644 --- a/README.md +++ b/README.md @@ -38,6 +38,10 @@ Requests OpenShift to start build from the specified build config and waits for Runs the OWASP dependency-check tool. +### rocketSend + +Sends message to the specified WebHook. + ## Groovy source files ### DockerHub diff --git a/vars/rocketSend.groovy b/vars/rocketSend.groovy new file mode 100644 index 0000000..ec24ba4 --- /dev/null +++ b/vars/rocketSend.groovy @@ -0,0 +1,22 @@ +def void call(String webHook, String message, String avatar = null, Boolean rawMessage = true) { + Map data = [:] as Map + data['text'] = message + data['rawMessage'] = rawMessage + + if (avatar != null) { + data['avatar'] = avatar + } + + def curlCommand = 'curl ' + + ' -X POST "' + webHook + '" ' + + ' -H "Content-Type: application/json" ' + + ' --data \'' + groovy.json.JsonOutput.toJson(data) + '\' ' + + def returnCode = sh(script: curlCommand, returnStatus: true) + + if (returnCode != 0) { + error("RocketChat notification failed!") + } else { + echo "RocketChat notification sent successfully" + } +} \ No newline at end of file diff --git a/vars/rocketSend.txt b/vars/rocketSend.txt new file mode 100644 index 0000000..77cb054 --- /dev/null +++ b/vars/rocketSend.txt @@ -0,0 +1,34 @@ + +

+ Part of @Library('jenkins-pipeline-shared-libraries') +

+
+
+ rocketSend(webHook:String, message:String, avatar:String=null, rawMessage:Boolean=true):void +
+
+

+ Publish message at chat.puzzle.ch. The channel destination is disabled for the integration. Therefore you need to create the Webhook first. +
+ Example call: +
+ rocketSend("https://chat.puzzle.ch/hooks/../..", "Build Successful", "https://chat.puzzle.ch/emoji-custom/success.png", true) +

+
    +
  • + webHook - WebHook Url +
  • +
  • + message - Content of the Rocket Chat message +
  • +
  • + avatar - Avatar image used in Rocket Chat +
  • +
  • + rawMessage - Message should be sent as raw text +
  • +
+
+
+ + From 860b7d3a67b1b8d71e8275a74353fa9c82e9e55b Mon Sep 17 00:00:00 2001 From: Sylvain Gilgen Date: Fri, 3 Nov 2023 15:31:20 +0100 Subject: [PATCH 9/9] remove unused classes --- .../puzzleitc/jenkins/MavenCoordinates.groovy | 63 ------------------- src/com/puzzleitc/jenkins/Util.groovy | 60 ------------------ .../command/AddDeployLinksCommand.groovy | 27 -------- .../jenkins/command/GitCryptCommand.groovy | 34 ---------- .../command/ReplaceFromVaultCommand.groovy | 43 ------------- 5 files changed, 227 deletions(-) delete mode 100644 src/com/puzzleitc/jenkins/MavenCoordinates.groovy delete mode 100644 src/com/puzzleitc/jenkins/Util.groovy delete mode 100644 src/com/puzzleitc/jenkins/command/AddDeployLinksCommand.groovy delete mode 100644 src/com/puzzleitc/jenkins/command/GitCryptCommand.groovy delete mode 100644 src/com/puzzleitc/jenkins/command/ReplaceFromVaultCommand.groovy diff --git a/src/com/puzzleitc/jenkins/MavenCoordinates.groovy b/src/com/puzzleitc/jenkins/MavenCoordinates.groovy deleted file mode 100644 index e060a5d..0000000 --- a/src/com/puzzleitc/jenkins/MavenCoordinates.groovy +++ /dev/null @@ -1,63 +0,0 @@ -package com.puzzleitc.jenkins - -class MavenCoordinates implements Serializable { - String groupId - String artifactId - String version - String packaging - String classifier - - @com.cloudbees.groovy.cps.NonCPS - String toString() { - return "${groupId}:${artifactId}:${version}:${packaging}${classifier?.trim() ? ":${classifier}" : "" }" - } - - @com.cloudbees.groovy.cps.NonCPS - int hashCode() { - final int prime = 31 - int result = 1 - result = prime * result + ((artifactId == null) ? 0 : artifactId.hashCode()) - result = prime * result + ((classifier == null) ? 0 : classifier.hashCode()) - result = prime * result + ((groupId == null) ? 0 : groupId.hashCode()) - result = prime * result + ((packaging == null) ? 0 : packaging.hashCode()) - result = prime * result + ((version == null) ? 0 : version.hashCode()) - return result - } - - @com.cloudbees.groovy.cps.NonCPS - boolean equals(Object obj) { - if (this == obj) - return true - if (obj == null) - return false - if (getClass() != obj.getClass()) - return false - MavenCoordinates other = (MavenCoordinates) obj - if (artifactId == null) { - if (other.artifactId != null) - return false - } else if (!artifactId.equals(other.artifactId)) - return false - if (classifier == null) { - if (other.classifier != null) - return false - } else if (!classifier.equals(other.classifier)) - return false - if (groupId == null) { - if (other.groupId != null) - return false - } else if (!groupId.equals(other.groupId)) - return false - if (packaging == null) { - if (other.packaging != null) - return false - } else if (!packaging.equals(other.packaging)) - return false - if (version == null) { - if (other.version != null) - return false - } else if (!version.equals(other.version)) - return false - return true - } -} diff --git a/src/com/puzzleitc/jenkins/Util.groovy b/src/com/puzzleitc/jenkins/Util.groovy deleted file mode 100644 index 240a083..0000000 --- a/src/com/puzzleitc/jenkins/Util.groovy +++ /dev/null @@ -1,60 +0,0 @@ -package com.puzzleitc.jenkins - -// This class is deprecated, use src/com/puzzleitc/jenkins/util/Args.groovy instead - -@Deprecated -class Util { - static Map parseArgs(namedArgs, positionalArgs, List requiredParams, Map optionalParams = [:]) { - - // null can be incorrectly assigned to namedArgs or positionalArgs if there are no other named or positional arguments, fix it - if (namedArgs == null) { - namedArgs = [:] - positionalArgs = ([null] as Object[]) + positionalArgs - } - if (positionalArgs == null) { - positionalArgs = ([null] as Object[]) - } - - // If the last argument is a closure it always goes into the last parameter - def positionalArgsCount = positionalArgs?.length - if (positionalArgsCount && positionalArgs[-1] instanceof Closure) { - println("closure") - def lastKey = optionalParams.size() ? optionalParams.keySet().last() : requiredParams[-1] - namedArgs[lastKey] = positionalArgs[-1] - positionalArgsCount-- - } - - int i = 0 - for (def item: requiredParams) { - if (namedArgs.containsKey(item)) { - if (i < positionalArgsCount) { - throw new IllegalArgumentException("Multiple values for argument '${item}'") - } - } else { - if (i < positionalArgsCount) { - namedArgs[item] = positionalArgs[i] - } else { - throw new IllegalArgumentException("Missing argument '${item}'") - } - } - i++ - } - - for (def item: optionalParams) { - if (namedArgs.containsKey(item.key)) { - if (i < positionalArgsCount) { - throw new IllegalArgumentException("Multiple values for argument '${item.key}'") - } - } else { - if (i < positionalArgsCount) { - namedArgs[item.key] = positionalArgs[i] - } else { - namedArgs[item.key] = item.value - } - } - i++ - } - - return namedArgs - } -} diff --git a/src/com/puzzleitc/jenkins/command/AddDeployLinksCommand.groovy b/src/com/puzzleitc/jenkins/command/AddDeployLinksCommand.groovy deleted file mode 100644 index c0623f8..0000000 --- a/src/com/puzzleitc/jenkins/command/AddDeployLinksCommand.groovy +++ /dev/null @@ -1,27 +0,0 @@ -package com.puzzleitc.jenkins.command - -import com.puzzleitc.jenkins.command.context.PipelineContext -import com.puzzleitc.jenkins.command.context.StepParams - -class AddDeployLinksCommand { - - private final PipelineContext ctx - - AddDeployLinksCommand(PipelineContext ctx) { - this.ctx = ctx - } - - void execute() { - ctx.info('-- AddDeployLinks --') - def deployJob = ctx.stepParams.getOptional('deployJob') - if (deployJob == null) { - error(ctx.getClass().getName() + ': No deploymentJob found. Must be specified!') - } - ctx.echo("deployJob: " + deployJob) - def deploymentJob = Jenkins.instance.getItemByFullName(deployJob) - if (deploymentJob == null) { - error(ctx.getClass().getName() + ": can't find job '${deploymentJob}'!" ) - } - ctx.addHtmlBadge html:"Deploy " - } -} \ No newline at end of file diff --git a/src/com/puzzleitc/jenkins/command/GitCryptCommand.groovy b/src/com/puzzleitc/jenkins/command/GitCryptCommand.groovy deleted file mode 100644 index c28068c..0000000 --- a/src/com/puzzleitc/jenkins/command/GitCryptCommand.groovy +++ /dev/null @@ -1,34 +0,0 @@ -package com.puzzleitc.jenkins.command - -import com.puzzleitc.jenkins.command.context.PipelineContext - -class GitCryptCommand { - - private final PipelineContext ctx - - GitCryptCommand(PipelineContext ctx) { - this.ctx = ctx - } - - void execute() { - def credentialsId = ctx.stepParams.getRequired('credentialsId') as String - def body = ctx.stepParams.getRequired('body') as Closure - def gitCryptPath = ctx.executable('git-crypt', 'gitcrypt') - def unlocked = false - try { - if (credentialsId) { - ctx.info('-- git-crypt unlock --') - ctx.withCredentials([ctx.file(credentialsId: credentialsId, variable: 'GIT_CRYPT_KEYFILE')]) { - ctx.sh script: "${gitCryptPath}/git-crypt unlock \${GIT_CRYPT_KEYFILE}" - unlocked = true - } - } - body() - } finally { - if (unlocked) { - ctx.info('-- git-crypt lock --') - ctx.sh script: "${gitCryptPath}/git-crypt lock" - } - } - } -} diff --git a/src/com/puzzleitc/jenkins/command/ReplaceFromVaultCommand.groovy b/src/com/puzzleitc/jenkins/command/ReplaceFromVaultCommand.groovy deleted file mode 100644 index 9bf4905..0000000 --- a/src/com/puzzleitc/jenkins/command/ReplaceFromVaultCommand.groovy +++ /dev/null @@ -1,43 +0,0 @@ -package com.puzzleitc.jenkins.command - -import com.puzzleitc.jenkins.command.context.PipelineContext - -class ReplaceFromVaultCommand { - - private final PipelineContext ctx - - ReplaceFromVaultCommand(PipelineContext ctx) { - this.ctx = ctx - } - - Object execute() { - ctx.info('-- replaceFromVault --') - def text = ctx.stepParams.getRequired('text') as String - def result = text - while (parseVaultLookup(result).size() > 0) { - def match = parseVaultLookup(result).get(0) - if (match.path) { - def replacedValue = ctx.lookupValueFromVault(match.path, match.key) - result = result.substring(0, match.start) + replacedValue + result.substring(match.end, result.length()) - } - } - return result - } - - private static List parseVaultLookup(String lookup) { - def matcher = lookup =~ /(?m)\{\{\s*vault\.get\(\s*"([^"]+)",\s*"([^"]+)"\s*\)\s*\}\}/ - def result = [] - while (matcher.find()) { - result.add(new VaultMatch(path: matcher.group(1), key: matcher.group(2), start: matcher.start(), end: matcher.end())) - } - return result - } - - private static class VaultMatch { - String path - String key - int start - int end - } - -}