From 0fbbf82980ac23a93a4d2b91cb891f6be1cb6fc5 Mon Sep 17 00:00:00 2001 From: SylivanKenobi Date: Fri, 13 Oct 2023 14:38:13 +0200 Subject: [PATCH] remove all methods who are deprecated or use bad plugins --- vars/addDeployLinks.groovy | 7 -- vars/addDeployLinks.txt | 21 ----- vars/check.groovy | 7 -- vars/check.txt | 17 ---- vars/cleanBuilds.groovy | 37 --------- vars/cleanBuilds.txt | 22 ----- vars/deployChangedComponents.groovy | 119 ---------------------------- vars/deployChangedComponents.txt | 36 --------- vars/openshiftUtils.groovy | 75 ------------------ vars/openshiftUtils.txt | 87 -------------------- vars/replaceFromVault.groovy | 10 --- vars/replaceFromVault.txt | 32 -------- vars/trackComponentVersions.groovy | 91 --------------------- vars/trackComponentVersions.txt | 47 ----------- vars/withGitCrypt.groovy | 9 --- vars/withGitCrypt.txt | 38 --------- 16 files changed, 655 deletions(-) delete mode 100644 vars/addDeployLinks.groovy delete mode 100644 vars/addDeployLinks.txt delete mode 100644 vars/check.groovy delete mode 100644 vars/check.txt delete mode 100644 vars/cleanBuilds.groovy delete mode 100644 vars/cleanBuilds.txt delete mode 100644 vars/deployChangedComponents.groovy delete mode 100644 vars/deployChangedComponents.txt delete mode 100644 vars/openshiftUtils.groovy delete mode 100644 vars/openshiftUtils.txt delete mode 100644 vars/replaceFromVault.groovy delete mode 100644 vars/replaceFromVault.txt delete mode 100644 vars/trackComponentVersions.groovy delete mode 100644 vars/trackComponentVersions.txt delete mode 100644 vars/withGitCrypt.groovy delete mode 100644 vars/withGitCrypt.txt diff --git a/vars/addDeployLinks.groovy b/vars/addDeployLinks.groovy deleted file mode 100644 index 3220181..0000000 --- a/vars/addDeployLinks.groovy +++ /dev/null @@ -1,7 +0,0 @@ -import com.puzzleitc.jenkins.command.AddDeployLinksCommand -import com.puzzleitc.jenkins.command.context.JenkinsPipelineContext - -def call(Map params = [:]) { - AddDeployLinksCommand command = new AddDeployLinksCommand(new JenkinsPipelineContext(this, params)) - command.execute() -} diff --git a/vars/addDeployLinks.txt b/vars/addDeployLinks.txt deleted file mode 100644 index 24bd124..0000000 --- a/vars/addDeployLinks.txt +++ /dev/null @@ -1,21 +0,0 @@ - -

- Adds a deployment link to a specific build.
- This can be useful if a deploy job should be triggered when a build was successfull. -

-

- parameters: -

-

-

- examples: -

- addDeployLinks deployJob: 'integration-pipeline-deploy' -

-

- - \ No newline at end of file diff --git a/vars/check.groovy b/vars/check.groovy deleted file mode 100644 index bcaaf14..0000000 --- a/vars/check.groovy +++ /dev/null @@ -1,7 +0,0 @@ -// Deprecated: Will be implemented as a step. -def mandatoryParameter(parameterName) { - if (!params.containsKey(parameterName)) { - currentBuild.result = 'ABORTED' - error('missing parameter: ' + parameterName) - } -} diff --git a/vars/check.txt b/vars/check.txt deleted file mode 100644 index 3badd5c..0000000 --- a/vars/check.txt +++ /dev/null @@ -1,17 +0,0 @@ - -

- Part of @Library('jenkins-pipeline-shared-libraries') -

-
-
check.mandatoryParameter()
-
-

- Deprecated: Will be implemented as a step. -

-

- Checks if a parameter with the given name is available. If not, the build will be aborted with an error message. -

-
-
- - diff --git a/vars/cleanBuilds.groovy b/vars/cleanBuilds.groovy deleted file mode 100644 index e91e4a4..0000000 --- a/vars/cleanBuilds.groovy +++ /dev/null @@ -1,37 +0,0 @@ -#!/usr/bin/env groovy - -import com.jenkinsci.plugins.badge.action.BadgeAction - -// the function implements how many build per environment should be kept -// parameter is maxKeepBuilds -def call(Map args) { - def maxNumberToKeepBuilds = args?.maxKeepBuilds ?: 10; - - def environmentBuildCount = [:] - Jenkins.instance.getItemByFullName(args.job) - .getBuilds() - .findAll { it.isKeepLog() } - .each { build -> - deployedEnvironment = [] - build.getActions(BadgeAction.class).each { - deployedEnvironment << it.id - environmentBuildCount[it.id] = environmentBuildCount.get(it.id, 0) + 1 - } - - // each Build that should be kept will be stored in keepBuild map - def keepBuild = [] - deployedEnvironment.each { - if (environmentBuildCount[it] <= maxNumberToKeepBuilds) { - keepBuild << it - } - } - - // print out reason of/not keeping the build - if (keepBuild) { - echo "Keeping build ${build} because of the following promotions: ${keepBuild.join(' ')}" - } else { - echo "Deleting build ${build}" - build.delete() - } - } -} diff --git a/vars/cleanBuilds.txt b/vars/cleanBuilds.txt deleted file mode 100644 index 0a6cf1b..0000000 --- a/vars/cleanBuilds.txt +++ /dev/null @@ -1,22 +0,0 @@ - -

- The cleanBuilds function offers a housekeeeping for the integration pipeline.
- Every build of the integration pipeline which was deployed on an environment is automatically kept. - To prevent a numerous amount of kept builds this variable is cleaning up these jobs. It distinguishes between deployment for every environment. -

-

- parameters: -

-

-

- examples: -

- cleanBuilds job: 'project-a/integration-pipeline'
- cleanBuilds job: 'project-a/integration-pipeline', maxKeepBuilds: 10 -

-

- - \ No newline at end of file diff --git a/vars/deployChangedComponents.groovy b/vars/deployChangedComponents.groovy deleted file mode 100644 index c4e8e60..0000000 --- a/vars/deployChangedComponents.groovy +++ /dev/null @@ -1,119 +0,0 @@ -#!/usr/bin/env groovy - -import hudson.model.Run -import com.jenkinsci.plugins.badge.action.AbstractAction -import com.jenkinsci.plugins.badge.action.BadgeAction -import com.jenkinsci.plugins.badge.action.BadgeSummaryAction - -Map readComponentVersionsFromArtifact(String job, Object selector, String file) { - try { - copyArtifacts filter: file, projectName: job, selector: selector - def versions = readYaml(file: file) - if (versions instanceof Map) { - return versions - } else { - return [:] - } - // when no artifact available (e.g. its the first run) - // return an empty map instead of throwing an error - } catch (Exception e) { - return [:] - } -} - -void writeComponentVersionsToArtifact(String dataFile, Map componentVersions) { - // always remove the old file first - sh "rm -f ${dataFile}" - writeYaml file: dataFile, data: componentVersions - archiveArtifacts dataFile -} - -// always remove the old badge action before appling anything new -void removeBadgeAction(Run currentBuild, Class actionClass, String id) { - def badgeAction = currentBuild.getActions(actionClass).find { it.id == id } - if (badgeAction != null) { - currentBuild.removeAction(badgeAction) - } -} - -void setBadgeInBuildHistory(Run currentBuild, String icon, String altText, String id, String link) { - removeBadgeAction(currentBuild, BadgeAction.class, id) - - // as the badge-plugin does not support the adding of badges - // to other builds, we have to use the class directly - def badgeAction = BadgeAction.createBadge(icon, altText, link) - badgeAction.setId(id) - currentBuild.addAction(badgeAction) -} - -void setBadgeAndLinkInSummary(Run currentBuild, String icon, String id, String altText) { - removeBadgeAction(currentBuild, BadgeSummaryAction.class, id) - - // as the badge-plugin does not support the adding of badges - // to other builds, we have to use the class directly - def badgeSummaryAction = new BadgeSummaryAction(icon) - badgeSummaryAction.setId(id) - badgeSummaryAction.appendText(altText) - currentBuild.addAction(badgeSummaryAction) -} - -// Depending on the Badgelocation the Icon is either "large" or "small" -String getDeployIcon(String targetEnv, boolean isLarge = false) { - size = "16x16" - if (isLarge) { - size = "32x32" - } - if (targetEnv == "test") { - return '/plugin/promoted-builds/icons/' + size + '/star-silver.png' - } else if (targetEnv == "prod") { - return '/plugin/promoted-builds/icons/' + size + '/star-gold.png' - } else if (targetEnv == "int") { - return '/plugin/promoted-builds/icons/' + size + '/star-purple.png' - } - return '/plugin/promoted-builds/icons/' + size + '/star-orange.png' -} - -void addDeployedBadges() { - def built = Jenkins.instance.getItemByFullName(built_name).getBuild(built_number) - def deploy = currentBuild.rawBuild - - setBadgeInBuildHistory(deploy, getDeployIcon(target_env), "Deployed ${built_name} #${built_number} to ${target_env}", target_env, "/${built.getUrl()}") - setBadgeAndLinkInSummary(deploy, getDeployIcon(target_env, true), target_env, "Deployed ${built_name} #${built_number} to ${target_env}") - - setBadgeInBuildHistory(built, getDeployIcon(target_env), "Deployed to ${target_env} by ${env.JOB_NAME} #${env.BUILD_NUMBER}", target_env, "/${currentBuild.rawBuild.getUrl()}") - setBadgeAndLinkInSummary(built, getDeployIcon(target_env, true), target_env, "Deployed to ${target_env} by ${env.JOB_NAME} #${env.BUILD_NUMBER}") - // save is required to persist badges on other builds than the current - built.save() - - built.keepLog(true) -} - -def call() { - final DEPLOYED_VERSIONS_FILE = 'deployed-versions.yaml' - final COMPONENT_VERSIONS_FILE = 'component-versions.yaml' - final EXECUTION_JOB_FILE = 'rundeck-jobs.yaml' - - def newComponentVersions = readComponentVersionsFromArtifact(built_name, specific(built_number), COMPONENT_VERSIONS_FILE) - def deployedVersions = readComponentVersionsFromArtifact(env.JOB_NAME, lastSuccessful(), DEPLOYED_VERSIONS_FILE) - - def currentComponentVersions = deployedVersions[target_env] - deployedVersions[target_env] = newComponentVersions - - executionJobs = readYaml(file: EXECUTION_JOB_FILE) - - newComponentVersions.each { k, v -> - - if (!currentComponentVersions) { - echo "Installing component ${k} on environment ${target_env} with version ${v.version}." - echo "Executing rundeck job ${executionJobs[k].jobId} with env ${target_env} and version ${v.version}." // TODO: Replace with rundeck call - } else if (v.version == currentComponentVersions[k].version) { - echo "Component ${k} version ${v.version} already deployed, skipping." - } else { - echo "Updating component ${k} on environment ${target_env} from version ${currentComponentVersions[k].version} to ${v.version}." - echo "Executing rundeck job ${executionJobs[k].jobId} with env ${target_env} and version ${v.version}." // TODO: Replace with rundeck call - } - } - - writeComponentVersionsToArtifact DEPLOYED_VERSIONS_FILE, deployedVersions - addDeployedBadges() -} diff --git a/vars/deployChangedComponents.txt b/vars/deployChangedComponents.txt deleted file mode 100644 index 54c517a..0000000 --- a/vars/deployChangedComponents.txt +++ /dev/null @@ -1,36 +0,0 @@ - -

- This function can be used to deploy a component (or a set of components) from a specific integration pipeline to a distinct environment.
- It provides multiple features: -

- The integration pipeline has to provide a valid component-versions.yaml file.
- deployChangedComponents() has to be called in a parameterized job as it depends on parameters to be set. -

-

- parameters for the containing job: -

-

-

-

- Attention:
- There is no implementation of the actual deployment.
- The function only prints out the functionality that can be used for the deployment process. -
-

- - \ No newline at end of file diff --git a/vars/openshiftUtils.groovy b/vars/openshiftUtils.groovy deleted file mode 100644 index fae1d81..0000000 --- a/vars/openshiftUtils.groovy +++ /dev/null @@ -1,75 +0,0 @@ -// Deprecated: Use the openshiftApply() step. - -// -// OpenShift resource update by applying a template. -// -// method parameters: -// ocpUrl -> url of the OpenShift server -// ocpProject -> project-name/namespace of the OpenShift project -// templateFile -> OpenShift template -// credentialsId -> credentials for the OpenShift login -// namespace -> true adds NAMESPACE_NAME param -// -def void applyTemplate(String ocpUrl, String ocpProject, String templateFile, String credentialsId, boolean namespace) { - - echo "-- start resource update by template --" - echo "OpenShift server URL: $ocpUrl" - echo "OpenShift project: $ocpProject" - echo "resource file: $templateFile" - - withCredentials([[$class : 'StringBinding', - credentialsId : "${credentialsId}", - variable : 'openshift_token']]) { - withEnv(["KUBECONFIG=${pwd()}/.kube", "PATH+OC_HOME=${tool 'oc'}/bin", "ocpUrl=${ocpUrl}"]) { - sh "oc login $ocpUrl --insecure-skip-tls-verify=true --token=$openshift_token" - sh "oc project $ocpProject" - sh "oc project" - sh "oc whoami" - - // apply template - if (namespace) { - sh "oc process -f $templateFile -p NAMESPACE_NAME=\$(oc project -q) | oc apply -f -" - } else { - sh "oc process -f $templateFile | oc apply -f -" - } - } - } -} - -// -// OpenShift resource update by applying a template with environment file. -// -// method parameters: -// ocpUrl -> url of the OpenShift server -// ocpProject -> project-name/namespace of the OpenShift project -// templateFile -> OpenShift template -// credentialsId -> credentials for the OpenShift login -// envFile -> environment file -// namespace -> true adds NAMESPACE_NAME param -// -def void applyTemplateWithEnvFile(String ocpUrl, String ocpProject, String templateFile, String credentialsId, String envFile, boolean namespace) { - - echo "-- start resource update by template with environment file --" - echo "OpenShift server URL: $ocpUrl" - echo "OpenShift project: $ocpProject" - echo "resource file: $templateFile" - echo "environment file: $envFile" - - withCredentials([[$class : 'StringBinding', - credentialsId : "${credentialsId}", - variable : 'openshift_token']]) { - withEnv(["KUBECONFIG=${pwd()}/.kube", "PATH+OC_HOME=${tool 'oc'}/bin", "ocpUrl=${ocpUrl}"]) { - sh "oc login $ocpUrl --insecure-skip-tls-verify=true --token=$openshift_token" - sh "oc project $ocpProject" - sh "oc project" - sh "oc whoami" - - // apply template - if (namespace) { - sh "oc process -f $templateFile -p NAMESPACE_NAME=\$(oc project -q) --param-file $envFile | oc apply -f -" - } else { - sh "oc process -f $templateFile --param-file $envFile | oc apply -f -" - } - } - } -} diff --git a/vars/openshiftUtils.txt b/vars/openshiftUtils.txt deleted file mode 100644 index bfda8fc..0000000 --- a/vars/openshiftUtils.txt +++ /dev/null @@ -1,87 +0,0 @@ - -

- Part of @Library('jenkins-pipeline-shared-libraries') -

-
-
- openshiftUtils.applyTemplate(ocpUrl:String, ocpProject:String, templateFile:String, credentialsId:String, namespace:Boolean):void -
-
-

- Deprecated: Use the openshiftApply() step. -

-

- OpenShift resource update by applying a template. -
- Example call: -
- openshiftUtils.applyTemplate("https://console.appuio.ch/", "my-appuio-project", "template.yaml", "APPUiO_login_token", false) -

-
    -
  • - ocpUrl - URL of the OpenShift server API endpoint. -
  • -
  • - ocpProject - Project / Namespace name of the OpenShift project to apply the template. -
  • -
  • - templateFile - Path to the OpenShift template file. -
  • -
  • - credentialsId - The credentialsId of an OpenShift Auth token stored in the Jenkins credential - manager with which to authenticate. Type of Credential is Secret text. -
  • -
  • - namespace - Boolean parameter. True adds a parameter with the name NAMESPACE_NAME - and as value the name of the actual OpenShift project. -
  • -
-
-
- openshiftUtils.applyTemplateWithEnvFile(ocpUrl:String, ocpProject:String, templateFile:String, credentialsId:String, envFile:String, namespace:Boolean):void -
-
-

- Deprecated: Use the openshiftApply() step. -

-

- OpenShift resource update by applying a template with parameter from an environment file. -

-

- Environment file contains key and value separated by double point. -
- Example: -
- KEY: 'value' -

-

- Example call: -
- openshiftUtils.applyTemplateWithEnvFile("https://console.appuio.ch/", "my-appuio-project", "template.yaml", "APPUiO_login_token", "test.yaml", false) -

-
    -
  • - ocpUrl - URL of the OpenShift server API endpoint. -
  • -
  • - ocpProject - Project / Namespace name of the OpenShift project to apply the template. -
  • -
  • - templateFile - Path to the OpenShift template file. -
  • -
  • - credentialsId - The credentialsId of an OpenShift Auth token stored in the Jenkins credential - manager with which to authenticate. Type of Credential is Secret text. -
  • -
  • - envFile - Path to the environment file holding all parameter with their values. -
  • -
  • - namespace - Boolean parameter. True adds a parameter with the name NAMESPACE_NAME - and as value the name of the actual OpenShift project. -
  • -
-
-
- - diff --git a/vars/replaceFromVault.groovy b/vars/replaceFromVault.groovy deleted file mode 100644 index f0ab575..0000000 --- a/vars/replaceFromVault.groovy +++ /dev/null @@ -1,10 +0,0 @@ -import com.puzzleitc.jenkins.command.ReplaceFromVaultCommand -import com.puzzleitc.jenkins.command.context.JenkinsPipelineContext - -import static com.puzzleitc.jenkins.util.Args.parseArgs - -def call(Map namedArgs = [:], Object... positionalArgs) { - def args = parseArgs(namedArgs, positionalArgs, ['text']) - ReplaceFromVaultCommand command = new ReplaceFromVaultCommand(new JenkinsPipelineContext(this, args)) - return command.execute() -} \ No newline at end of file diff --git a/vars/replaceFromVault.txt b/vars/replaceFromVault.txt deleted file mode 100644 index 0680a3e..0000000 --- a/vars/replaceFromVault.txt +++ /dev/null @@ -1,32 +0,0 @@ - -

- Part of @Library('jenkins-pipeline-shared-libraries') -

-
-
- replaceFromVault(text:String):String -
-
-

- Substitutes variables the a provided string with the secrets retrieved from HashiCorp Vault. The string - will be scanned for the pattern {{ vault.get("<VAULT_PATH>", "<VAULT_KEY>") }}, - where VAULT_PATH defines the fully qualified path in vault of the secret and - VAULT_KEY the key to lookup of the specified secret. Example pattern: - {{ vault.get("/kv/spaces/outerspace/wekan-secret-test", "mongodb_user") }}. -

-
    -
  • - text (required) - The string which contains a predefined pattern that should be substituted by - secrets from Hashicorp Vault. -
  • -
-

- Example: -

-

-replaceFromVault(text: kustomize(path: 'openshift/postgresql/overlays/dev'))
-        
-
-
- - diff --git a/vars/trackComponentVersions.groovy b/vars/trackComponentVersions.groovy deleted file mode 100644 index 9f5071d..0000000 --- a/vars/trackComponentVersions.groovy +++ /dev/null @@ -1,91 +0,0 @@ -#!/usr/bin/env groovy -import hudson.model.Cause -import org.jenkinsci.plugins.pipeline.maven.publishers.MavenReport - -Map readComponentVersionsFromArtifact(String job, Object selector, String file) { - try { - copyArtifacts filter: file, projectName: job, selector: selector - def versions = readYaml(file: file) - if (versions instanceof Map) { - return versions - } else { - return [:] - } - // when no artifact available (e.g. its the first run) - // return an empty map instead of throwing an error - } catch (Exception e) { - return [:] - } -} - -void writeComponentVersionsToArtifact(String dataFile, Map componentInfos) { - // always remove the old file first - sh "rm -f ${dataFile}" - writeYaml file: dataFile, data: componentInfos - archiveArtifacts dataFile -} - -def getUpstreamCauses() { - def result = [] - - def buildCauses = currentBuild.rawBuild.getCauses() - buildCauses.each { - if (it instanceof Cause.UpstreamCause) { - def upstreamCause = it as Cause.UpstreamCause - result << [job: upstreamCause.upstreamProject, build: upstreamCause.upstreamBuild.toString()] - } - } - - return result -} - -def getComponentInformations(String componentVersion, Boolean externalComponent = false) { - def infos = [version: componentVersion + '-' + env.BUILD_NUMBER, job: env.JOB_NAME, buildNumber: env.BUILD_NUMBER as int, buildUrl: env.BUILD_URL] - if(externalComponent) { - infos = [version: componentVersion] - } - - // does the current build contain a maven build? - def mavenReport = currentBuild.rawBuild.getActions(MavenReport.class) - if (mavenReport) { - def mavenArtifacts = [] - mavenReport[0].getDeployedArtifacts().each { - mavenArtifacts << it.url - } - infos['artifacts'] = mavenArtifacts - } - - return infos -} - -def call(Map args) { - final COMPONENT_VERSIONS_FILE = 'component-versions.yaml' - - lock(resource: 'trackComponentVersions', inversePrecedence: false) { - def componentInfos = readComponentVersionsFromArtifact(env.JOB_NAME, lastSuccessful(), COMPONENT_VERSIONS_FILE) - - // If a build is taking a long time multiple other build requests can - // occour so there may be multiple build causes: http://javadoc.jenkins-ci.org/hudson/model/Run.html#getCauses-- - getUpstreamCauses().each { - def upstreamVersions = readComponentVersionsFromArtifact(it.job, specific(it.build), COMPONENT_VERSIONS_FILE) - componentInfos.putAll(upstreamVersions) - } - // check if pomFile location is passed - if (args?.pomFile) { - def mavenCoordinates = readMavenPom(file: args.pomFile) - componentInfos[mavenCoordinates.artifactId] = getComponentInformations(mavenCoordinates.version) - // is version yaml passed? - } else if (args?.versionFile) { - releaseVersion = readYaml(file: args.versionFile) - if (args?.containsExternalComponents) { - releaseVersion.each { k, v -> componentInfos[k] = getComponentInformations(v, args.containsExternalComponents) } - } else { - releaseVersion.each { k, v -> componentInfos[k] = getComponentInformations(v) } - } - } else { - error(getClass().getName() + ': Either pomFile or versionFile must be set!') - } - - writeComponentVersionsToArtifact(COMPONENT_VERSIONS_FILE, componentInfos) - } -} diff --git a/vars/trackComponentVersions.txt b/vars/trackComponentVersions.txt deleted file mode 100644 index a188178..0000000 --- a/vars/trackComponentVersions.txt +++ /dev/null @@ -1,47 +0,0 @@ - -

- Tracks the version of a component and it upstreams.
- For upstream jobs the information about the component versions is retrieved from a file called component-versions.yaml.
- The structure of this file: -

-

- - [componentname]:
-   version: [componentversion]
-   job: [name of job]
-   buildNumber: [number of last successful build]
-   buildUrl: [URL for last successful build]
-   artifacts: [contains list of artifacts]
-
-

-

- Stores these informations in the file component-versions.yaml.
- If trackComponentVersions is called from multiple upstream projects it will aggregate the information about every component in the component-versions.yaml. -

-

- parameters: -

- If the file cannot be found the function returns an empty array. -

-

- return: -

- The result file must exist in the repository but can be emtpy. -

-

-

- examples: -

- trackComponentVersions versionFile: 'project-a/version.yml'
- trackComponentVersions pomFile: 'project-a/pom.xml' -

-

- - \ No newline at end of file diff --git a/vars/withGitCrypt.groovy b/vars/withGitCrypt.groovy deleted file mode 100644 index d6d17a9..0000000 --- a/vars/withGitCrypt.groovy +++ /dev/null @@ -1,9 +0,0 @@ -import com.puzzleitc.jenkins.command.GitCryptCommand -import com.puzzleitc.jenkins.command.context.JenkinsPipelineContext -import static com.puzzleitc.jenkins.util.Args.parseArgs - -def call(Map namedArgs = [:], Object... positionalArgs) { - def args = parseArgs(namedArgs, positionalArgs, ['credentialsId', 'body']) - GitCryptCommand command = new GitCryptCommand(new JenkinsPipelineContext(this, args)) - command.execute() -} diff --git a/vars/withGitCrypt.txt b/vars/withGitCrypt.txt deleted file mode 100644 index c3c09c7..0000000 --- a/vars/withGitCrypt.txt +++ /dev/null @@ -1,38 +0,0 @@ - -

- Part of @Library('jenkins-pipeline-shared-libraries') -

-
-
- withGitCrypt(credentialisId:String):Object {…} -
-
-

- Unlocks the Git repository in the current directory with git-crypt for the duration of the scope of the step - and locks it again afterwards. The repository is also relocked when an error occurs in the scope of the step. - The return value is the value returned by (or the value of the last statement within) the scope of the step. - If credentialsId is null git-crypt is not called but the passed block/closure is still executed. This can - be used to run git-crypt conditionally. -

-
    -
  • - credentialsId - The credentials id of a git-crypt keyfile stored in the Jenkins credential - manager. -
  • -
-

- Example: -

-

-withGitCrypt(credentialsId: 'my-git-crypt-keyfile') {
-    // work with decrypted files
-}
-// files are encrypted again
-        
-

- For further information see git-crypt documentation. -

-
-
- -