diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 00000000..c05aa74e --- /dev/null +++ b/.gitattributes @@ -0,0 +1,3 @@ +* text eol=lf +*.png -text +*.pxm -text diff --git a/LICENSE b/LICENSE index f0e34ee1..9f90e8c4 100644 --- a/LICENSE +++ b/LICENSE @@ -1,21 +1,21 @@ -The MIT License (MIT) - -Copyright (c) 2016 CloudBees, Inc. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. +The MIT License (MIT) + +Copyright (c) 2016 CloudBees, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/pipeline-maven-api/pom.xml b/pipeline-maven-api/pom.xml index 6dd37edd..e24cba42 100644 --- a/pipeline-maven-api/pom.xml +++ b/pipeline-maven-api/pom.xml @@ -7,8 +7,8 @@ ${changelist} pipeline-maven-api - Pipeline Maven Plugin API hpi + Pipeline Maven Plugin API diff --git a/pipeline-maven-api/src/main/java/org/jenkinsci/plugins/pipeline/maven/MavenArtifact.java b/pipeline-maven-api/src/main/java/org/jenkinsci/plugins/pipeline/maven/MavenArtifact.java index 5b1a5180..82aadf40 100644 --- a/pipeline-maven-api/src/main/java/org/jenkinsci/plugins/pipeline/maven/MavenArtifact.java +++ b/pipeline-maven-api/src/main/java/org/jenkinsci/plugins/pipeline/maven/MavenArtifact.java @@ -1,11 +1,10 @@ package org.jenkinsci.plugins.pipeline.maven; -import org.apache.commons.lang.builder.CompareToBuilder; - import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; import java.io.Serializable; import java.util.Objects; +import org.apache.commons.lang.builder.CompareToBuilder; /** * @author Cyrille Le Clerc @@ -14,9 +13,7 @@ public class MavenArtifact implements Serializable, Comparable { private static final long serialVersionUID = 1L; - public MavenArtifact() { - - } + public MavenArtifact() {} /** * @param identifier Maven {@code $groupId:$artifactId:$version } (GAV) or {@code $groupId:$artifactId:$type:$version} (GATV) @@ -62,11 +59,14 @@ public MavenArtifact(String identifier) throws IllegalArgumentException { * @see org.eclipse.aether.artifact.Artifact#getBaseVersion() */ private String baseVersion; + private String type; private String classifier; private String extension; + @Nullable private String file; + private boolean snapshot; /** * URL on of the Maven repository on which the artifact has been deployed ("mvn deploy") @@ -79,7 +79,9 @@ public MavenArtifact(String identifier) throws IllegalArgumentException { */ @NonNull public String getFileName() { - return getArtifactId() + "-" + getVersion() + ((getClassifier() == null || getClassifier().isEmpty()) ? "" : "-" + getClassifier()) + "." + getExtension(); + return getArtifactId() + "-" + getVersion() + + ((getClassifier() == null || getClassifier().isEmpty()) ? "" : "-" + getClassifier()) + "." + + getExtension(); } /** @@ -87,7 +89,9 @@ public String getFileName() { */ @NonNull public String getFileNameWithBaseVersion() { - return getArtifactId() + "-" + getBaseVersion() + ((getClassifier() == null || getClassifier().isEmpty()) ? "" : "-" + getClassifier()) + "." + getExtension(); + return getArtifactId() + "-" + getBaseVersion() + + ((getClassifier() == null || getClassifier().isEmpty()) ? "" : "-" + getClassifier()) + "." + + getExtension(); } /** @@ -95,7 +99,9 @@ public String getFileNameWithBaseVersion() { */ @NonNull public String getFileNameWithVersion() { - return getArtifactId() + "-" + getVersion() + ((getClassifier() == null || getClassifier().isEmpty()) ? "" : "-" + getClassifier()) + "." + getExtension(); + return getArtifactId() + "-" + getVersion() + + ((getClassifier() == null || getClassifier().isEmpty()) ? "" : "-" + getClassifier()) + "." + + getExtension(); } /** @@ -103,10 +109,9 @@ public String getFileNameWithVersion() { */ @NonNull public String getId() { - return getGroupId() + ":" + getArtifactId() + ":" + - getType() + ":" + - ((getClassifier() == null || getClassifier().isEmpty()) ? "" : getClassifier() + ":") + - (getBaseVersion() == null ? getVersion() : getBaseVersion()); + return getGroupId() + ":" + getArtifactId() + ":" + getType() + + ":" + ((getClassifier() == null || getClassifier().isEmpty()) ? "" : getClassifier() + ":") + + (getBaseVersion() == null ? getVersion() : getBaseVersion()); } /** @@ -117,14 +122,13 @@ public String getShortDescription() { if (getBaseVersion() == null) { return getId(); } else { - return getGroupId() + ":" + getArtifactId() + ":" + - getType() + ":" + - ((getClassifier() == null || getClassifier().isEmpty()) ? "" : getClassifier() + ":") + - getBaseVersion() + "(" + getVersion() + ")"; + return getGroupId() + ":" + getArtifactId() + ":" + getType() + + ":" + ((getClassifier() == null || getClassifier().isEmpty()) ? "" : getClassifier() + ":") + + getBaseVersion() + + "(" + getVersion() + ")"; } } - /** * URL of the artifact on the maven repository on which it has been deployed if it has been deployed. * @@ -132,33 +136,33 @@ public String getShortDescription() { */ @Nullable public String getUrl() { - if (getRepositoryUrl() == null) - return null; - return getRepositoryUrl() + "/" + getGroupId().replace('.', '/') + "/" + getArtifactId() + "/" + getBaseVersion() + "/" + getFileNameWithVersion(); + if (getRepositoryUrl() == null) return null; + return getRepositoryUrl() + "/" + getGroupId().replace('.', '/') + "/" + getArtifactId() + "/" + + getBaseVersion() + "/" + getFileNameWithVersion(); } @Override public String toString() { - return "MavenArtifact{" + - getGroupId() + ":" + - getArtifactId() + ":" + - getType() + - (getClassifier() == null ? "" : ":" + getClassifier()) + ":" + - getBaseVersion() + "(version: " + getVersion() + ", snapshot:" + isSnapshot() + ") " + - (getFile() == null ? "" : " " + getFile()) + - '}'; + return "MavenArtifact{" + getGroupId() + + ":" + getArtifactId() + + ":" + getType() + + (getClassifier() == null ? "" : ":" + getClassifier()) + + ":" + getBaseVersion() + + "(version: " + getVersion() + ", snapshot:" + isSnapshot() + ") " + + (getFile() == null ? "" : " " + getFile()) + + '}'; } @Override public int compareTo(MavenArtifact o) { - return new CompareToBuilder(). - append(this.getGroupId(), o.getGroupId()). - append(this.getArtifactId(), o.getArtifactId()). - append(this.getBaseVersion(), o.getBaseVersion()). - append(this.getVersion(), o.getVersion()). - append(this.getType(), o.getType()). - append(this.getClassifier(), o.getClassifier()). - toComparison(); + return new CompareToBuilder() + .append(this.getGroupId(), o.getGroupId()) + .append(this.getArtifactId(), o.getArtifactId()) + .append(this.getBaseVersion(), o.getBaseVersion()) + .append(this.getVersion(), o.getVersion()) + .append(this.getType(), o.getType()) + .append(this.getClassifier(), o.getClassifier()) + .toComparison(); } /** @@ -177,38 +181,25 @@ public int hashCode() { @Override public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; + if (this == obj) return true; + if (obj == null) return false; + if (getClass() != obj.getClass()) return false; MavenArtifact other = (MavenArtifact) obj; if (getGroupId() == null) { - if (other.getGroupId() != null) - return false; - } else if (!getGroupId().equals(other.getGroupId())) - return false; + if (other.getGroupId() != null) return false; + } else if (!getGroupId().equals(other.getGroupId())) return false; if (getArtifactId() == null) { - if (other.getArtifactId() != null) - return false; - } else if (!getArtifactId().equals(other.getArtifactId())) - return false; + if (other.getArtifactId() != null) return false; + } else if (!getArtifactId().equals(other.getArtifactId())) return false; if (getBaseVersion() == null) { - if (other.getBaseVersion() != null) - return false; - } else if (!getBaseVersion().equals(other.getBaseVersion())) - return false; + if (other.getBaseVersion() != null) return false; + } else if (!getBaseVersion().equals(other.getBaseVersion())) return false; if (getVersion() == null) { - if (other.getVersion() != null) - return false; - } else if (!getVersion().equals(other.getVersion())) - return false; + if (other.getVersion() != null) return false; + } else if (!getVersion().equals(other.getVersion())) return false; if (getType() == null) { - if (other.getType() != null) - return false; - } else if (!getType().equals(other.getType())) - return false; + if (other.getType() != null) return false; + } else if (!getType().equals(other.getType())) return false; if (getClassifier() == null) { return other.getClassifier() == null; } else return getClassifier().equals(other.getClassifier()); diff --git a/pipeline-maven-api/src/main/java/org/jenkinsci/plugins/pipeline/maven/MavenDependency.java b/pipeline-maven-api/src/main/java/org/jenkinsci/plugins/pipeline/maven/MavenDependency.java index 5f7fb857..da075a23 100644 --- a/pipeline-maven-api/src/main/java/org/jenkinsci/plugins/pipeline/maven/MavenDependency.java +++ b/pipeline-maven-api/src/main/java/org/jenkinsci/plugins/pipeline/maven/MavenDependency.java @@ -22,18 +22,17 @@ public void setScope(String scope) { @Override public String toString() { - return "MavenDependency{" + - getGroupId() + ":" + - getArtifactId() + ":" + - getType() + - (getClassifier() == null ? "" : ":" + getClassifier()) + ":" + - getBaseVersion() + ", " + - "scope: " + scope + ", " + - " optional: " + optional + - " version: " + getVersion() + - " snapshot: " + isSnapshot() + - (getFile() == null ? "" : " " + getFile()) + - '}'; + return "MavenDependency{" + getGroupId() + + ":" + getArtifactId() + + ":" + getType() + + (getClassifier() == null ? "" : ":" + getClassifier()) + + ":" + getBaseVersion() + + ", " + "scope: " + + scope + ", " + " optional: " + + optional + " version: " + + getVersion() + " snapshot: " + + isSnapshot() + (getFile() == null ? "" : " " + getFile()) + + '}'; } public MavenArtifact asMavenArtifact() { @@ -52,7 +51,6 @@ public MavenArtifact asMavenArtifact() { return result; } - @Override public int hashCode() { return Objects.hash(super.hashCode(), optional, scope); @@ -60,15 +58,11 @@ public int hashCode() { @Override public boolean equals(Object obj) { - if (this == obj) - return true; - if (!super.equals(obj)) - return false; - if (getClass() != obj.getClass()) - return false; + if (this == obj) return true; + if (!super.equals(obj)) return false; + if (getClass() != obj.getClass()) return false; MavenDependency other = (MavenDependency) obj; - if (optional != other.optional) - return false; + if (optional != other.optional) return false; if (scope == null) { return other.scope == null; } else return scope.equals(other.scope); diff --git a/pipeline-maven-api/src/main/java/org/jenkinsci/plugins/pipeline/maven/dao/AbstractPipelineMavenPluginDaoDecorator.java b/pipeline-maven-api/src/main/java/org/jenkinsci/plugins/pipeline/maven/dao/AbstractPipelineMavenPluginDaoDecorator.java index 91b9fbe4..9f0e7a31 100644 --- a/pipeline-maven-api/src/main/java/org/jenkinsci/plugins/pipeline/maven/dao/AbstractPipelineMavenPluginDaoDecorator.java +++ b/pipeline-maven-api/src/main/java/org/jenkinsci/plugins/pipeline/maven/dao/AbstractPipelineMavenPluginDaoDecorator.java @@ -1,15 +1,13 @@ package org.jenkinsci.plugins.pipeline.maven.dao; -import org.jenkinsci.plugins.pipeline.maven.MavenArtifact; -import org.jenkinsci.plugins.pipeline.maven.MavenDependency; - import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; - import java.io.IOException; import java.util.List; import java.util.Map; import java.util.SortedSet; +import org.jenkinsci.plugins.pipeline.maven.MavenArtifact; +import org.jenkinsci.plugins.pipeline.maven.MavenDependency; public abstract class AbstractPipelineMavenPluginDaoDecorator implements PipelineMavenPluginDao { @@ -30,23 +28,72 @@ public Builder getBuilder() { } @Override - public void recordDependency(@NonNull String jobFullName, int buildNumber, @NonNull String groupId, @NonNull String artifactId, @NonNull String version, @NonNull String type, @NonNull String scope, boolean ignoreUpstreamTriggers, String classifier) { - delegate.recordDependency(jobFullName, buildNumber, groupId, artifactId, version, type, scope, ignoreUpstreamTriggers, classifier); - } - - @Override - public void recordParentProject(@NonNull String jobFullName, int buildNumber, @NonNull String parentGroupId, @NonNull String parentArtifactId, @NonNull String parentVersion, boolean ignoreUpstreamTriggers) { - delegate.recordParentProject(jobFullName, buildNumber, parentGroupId, parentArtifactId, parentVersion, ignoreUpstreamTriggers); - } - - @Override - public void recordGeneratedArtifact(@NonNull String jobFullName, int buildNumber, @NonNull String groupId, @NonNull String artifactId, @NonNull String version, @NonNull String type, @NonNull String baseVersion, @Nullable String repositoryUrl, boolean skipDownstreamTriggers, String extension, String classifier) { - delegate.recordGeneratedArtifact(jobFullName, buildNumber, groupId, artifactId, version, type, baseVersion, repositoryUrl, skipDownstreamTriggers, extension, classifier); - } - - @Override - public void recordBuildUpstreamCause(String upstreamJobName, int upstreamBuildNumber, String downstreamJobName, int downstreamBuildNumber) { - delegate.recordBuildUpstreamCause(upstreamJobName, upstreamBuildNumber, downstreamJobName, downstreamBuildNumber); + public void recordDependency( + @NonNull String jobFullName, + int buildNumber, + @NonNull String groupId, + @NonNull String artifactId, + @NonNull String version, + @NonNull String type, + @NonNull String scope, + boolean ignoreUpstreamTriggers, + String classifier) { + delegate.recordDependency( + jobFullName, + buildNumber, + groupId, + artifactId, + version, + type, + scope, + ignoreUpstreamTriggers, + classifier); + } + + @Override + public void recordParentProject( + @NonNull String jobFullName, + int buildNumber, + @NonNull String parentGroupId, + @NonNull String parentArtifactId, + @NonNull String parentVersion, + boolean ignoreUpstreamTriggers) { + delegate.recordParentProject( + jobFullName, buildNumber, parentGroupId, parentArtifactId, parentVersion, ignoreUpstreamTriggers); + } + + @Override + public void recordGeneratedArtifact( + @NonNull String jobFullName, + int buildNumber, + @NonNull String groupId, + @NonNull String artifactId, + @NonNull String version, + @NonNull String type, + @NonNull String baseVersion, + @Nullable String repositoryUrl, + boolean skipDownstreamTriggers, + String extension, + String classifier) { + delegate.recordGeneratedArtifact( + jobFullName, + buildNumber, + groupId, + artifactId, + version, + type, + baseVersion, + repositoryUrl, + skipDownstreamTriggers, + extension, + classifier); + } + + @Override + public void recordBuildUpstreamCause( + String upstreamJobName, int upstreamBuildNumber, String downstreamJobName, int downstreamBuildNumber) { + delegate.recordBuildUpstreamCause( + upstreamJobName, upstreamBuildNumber, downstreamJobName, downstreamBuildNumber); } @NonNull @@ -84,13 +131,20 @@ public List listDownstreamJobs(@NonNull String jobFullName, int buildNum @NonNull @Override - public Map> listDownstreamJobsByArtifact(@NonNull String jobFullName, int buildNumber) { + public Map> listDownstreamJobsByArtifact( + @NonNull String jobFullName, int buildNumber) { return delegate.listDownstreamJobsByArtifact(jobFullName, buildNumber); } @NonNull @Override - public SortedSet listDownstreamJobs(@NonNull String groupId, @NonNull String artifactId, @NonNull String version, @Nullable String baseVersion, @NonNull String type, @Nullable String classifier) { + public SortedSet listDownstreamJobs( + @NonNull String groupId, + @NonNull String artifactId, + @NonNull String version, + @Nullable String baseVersion, + @NonNull String type, + @Nullable String classifier) { return delegate.listDownstreamJobs(groupId, artifactId, version, baseVersion, type, classifier); } @@ -107,8 +161,8 @@ public Map listTransitiveUpstreamJobs(@NonNull String jobFullNa } @Override - public Map listTransitiveUpstreamJobs(String jobFullName, int buildNumber, - UpstreamMemory upstreamMemory) { + public Map listTransitiveUpstreamJobs( + String jobFullName, int buildNumber, UpstreamMemory upstreamMemory) { return delegate.listTransitiveUpstreamJobs(jobFullName, buildNumber, upstreamMemory); } @@ -123,8 +177,14 @@ public String toPrettyString() { } @Override - public void updateBuildOnCompletion(@NonNull String jobFullName, int buildNumber, int buildResultOrdinal, long startTimeInMillis, long durationInMillis) { - delegate.updateBuildOnCompletion(jobFullName, buildNumber, buildResultOrdinal, startTimeInMillis, durationInMillis); + public void updateBuildOnCompletion( + @NonNull String jobFullName, + int buildNumber, + int buildResultOrdinal, + long startTimeInMillis, + long durationInMillis) { + delegate.updateBuildOnCompletion( + jobFullName, buildNumber, buildResultOrdinal, startTimeInMillis, durationInMillis); } @Override diff --git a/pipeline-maven-api/src/main/java/org/jenkinsci/plugins/pipeline/maven/dao/CacheStats.java b/pipeline-maven-api/src/main/java/org/jenkinsci/plugins/pipeline/maven/dao/CacheStats.java index 134ec160..48f12c2a 100644 --- a/pipeline-maven-api/src/main/java/org/jenkinsci/plugins/pipeline/maven/dao/CacheStats.java +++ b/pipeline-maven-api/src/main/java/org/jenkinsci/plugins/pipeline/maven/dao/CacheStats.java @@ -25,4 +25,4 @@ public int getHits() { public int getMisses() { return misses; } -} \ No newline at end of file +} diff --git a/pipeline-maven-api/src/main/java/org/jenkinsci/plugins/pipeline/maven/dao/CustomTypePipelineMavenPluginDaoDecorator.java b/pipeline-maven-api/src/main/java/org/jenkinsci/plugins/pipeline/maven/dao/CustomTypePipelineMavenPluginDaoDecorator.java index 5f3563fd..1d421a4c 100644 --- a/pipeline-maven-api/src/main/java/org/jenkinsci/plugins/pipeline/maven/dao/CustomTypePipelineMavenPluginDaoDecorator.java +++ b/pipeline-maven-api/src/main/java/org/jenkinsci/plugins/pipeline/maven/dao/CustomTypePipelineMavenPluginDaoDecorator.java @@ -18,14 +18,8 @@ public class CustomTypePipelineMavenPluginDaoDecorator extends AbstractPipelineM *

* See https://maven.apache.org/ref/3.8.4/maven-core/artifact-handlers.html for more details. */ - private static final List KNOWN_JAR_TYPES_WITH_DIFFERENT_EXTENSION = Arrays.asList( - "test-jar", - "maven-plugin", - "ejb", - "ejb-client", - "java-source", - "javadoc" - ); + private static final List KNOWN_JAR_TYPES_WITH_DIFFERENT_EXTENSION = + Arrays.asList("test-jar", "maven-plugin", "ejb", "ejb-client", "java-source", "javadoc"); private final Logger LOGGER = Logger.getLogger(getClass().getName()); @@ -34,12 +28,48 @@ public CustomTypePipelineMavenPluginDaoDecorator(@NonNull PipelineMavenPluginDao } @Override - public void recordGeneratedArtifact(@NonNull String jobFullName, int buildNumber, @NonNull String groupId, @NonNull String artifactId, @NonNull String version, @NonNull String type, @NonNull String baseVersion, @Nullable String repositoryUrl, boolean skipDownstreamTriggers, String extension, String classifier) { - super.recordGeneratedArtifact(jobFullName, buildNumber, groupId, artifactId, version, type, baseVersion, repositoryUrl, skipDownstreamTriggers, extension, classifier); + public void recordGeneratedArtifact( + @NonNull String jobFullName, + int buildNumber, + @NonNull String groupId, + @NonNull String artifactId, + @NonNull String version, + @NonNull String type, + @NonNull String baseVersion, + @Nullable String repositoryUrl, + boolean skipDownstreamTriggers, + String extension, + String classifier) { + super.recordGeneratedArtifact( + jobFullName, + buildNumber, + groupId, + artifactId, + version, + type, + baseVersion, + repositoryUrl, + skipDownstreamTriggers, + extension, + classifier); if (shouldReportAgainWithExtensionAsType(type, extension)) { - LOGGER.log(Level.FINE, "Recording generated artifact " + groupId + ":" + artifactId + ":" + version + " as " + extension + " (in addition to " + type + ")"); - super.recordGeneratedArtifact(jobFullName, buildNumber, groupId, artifactId, version, extension, baseVersion, repositoryUrl, skipDownstreamTriggers, extension, classifier); + LOGGER.log( + Level.FINE, + "Recording generated artifact " + groupId + ":" + artifactId + ":" + version + " as " + extension + + " (in addition to " + type + ")"); + super.recordGeneratedArtifact( + jobFullName, + buildNumber, + groupId, + artifactId, + version, + extension, + baseVersion, + repositoryUrl, + skipDownstreamTriggers, + extension, + classifier); } } @@ -50,5 +80,4 @@ private boolean shouldReportAgainWithExtensionAsType(String type, String extensi return type != null && !type.equals(extension); } - } diff --git a/pipeline-maven-api/src/main/java/org/jenkinsci/plugins/pipeline/maven/dao/MonitoringPipelineMavenPluginDaoDecorator.java b/pipeline-maven-api/src/main/java/org/jenkinsci/plugins/pipeline/maven/dao/MonitoringPipelineMavenPluginDaoDecorator.java index 47ed96b4..31630b6e 100644 --- a/pipeline-maven-api/src/main/java/org/jenkinsci/plugins/pipeline/maven/dao/MonitoringPipelineMavenPluginDaoDecorator.java +++ b/pipeline-maven-api/src/main/java/org/jenkinsci/plugins/pipeline/maven/dao/MonitoringPipelineMavenPluginDaoDecorator.java @@ -1,13 +1,9 @@ package org.jenkinsci.plugins.pipeline.maven.dao; -import org.jenkinsci.plugins.pipeline.maven.MavenArtifact; -import org.jenkinsci.plugins.pipeline.maven.MavenDependency; +import static java.util.Optional.ofNullable; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; - -import static java.util.Optional.ofNullable; - import java.text.NumberFormat; import java.util.ArrayList; import java.util.List; @@ -17,10 +13,12 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.function.Supplier; +import org.jenkinsci.plugins.pipeline.maven.MavenArtifact; +import org.jenkinsci.plugins.pipeline.maven.MavenDependency; public class MonitoringPipelineMavenPluginDaoDecorator extends AbstractPipelineMavenPluginDaoDecorator { - private final static List> CACHE_STATS_SUPPLIERS = new ArrayList<>(); + private static final List> CACHE_STATS_SUPPLIERS = new ArrayList<>(); public static void registerCacheStatsSupplier(Supplier supplier) { CACHE_STATS_SUPPLIERS.add(supplier); @@ -36,23 +34,72 @@ public MonitoringPipelineMavenPluginDaoDecorator(@NonNull PipelineMavenPluginDao } @Override - public void recordDependency(@NonNull String jobFullName, int buildNumber, @NonNull String groupId, @NonNull String artifactId, @NonNull String version, @NonNull String type, @NonNull String scope, boolean ignoreUpstreamTriggers, String classifier) { - executeMonitored(() -> super.recordDependency(jobFullName, buildNumber, groupId, artifactId, version, type, scope, ignoreUpstreamTriggers, classifier)); - } - - @Override - public void recordParentProject(@NonNull String jobFullName, int buildNumber, @NonNull String parentGroupId, @NonNull String parentArtifactId, @NonNull String parentVersion, boolean ignoreUpstreamTriggers) { - executeMonitored(() -> super.recordParentProject(jobFullName, buildNumber, parentGroupId, parentArtifactId, parentVersion, ignoreUpstreamTriggers)); - } - - @Override - public void recordGeneratedArtifact(@NonNull String jobFullName, int buildNumber, @NonNull String groupId, @NonNull String artifactId, @NonNull String version, @NonNull String type, @NonNull String baseVersion, @Nullable String repositoryUrl, boolean skipDownstreamTriggers, String extension, String classifier) { - executeMonitored(() -> super.recordGeneratedArtifact(jobFullName, buildNumber, groupId, artifactId, version, type, baseVersion, repositoryUrl, skipDownstreamTriggers, extension, classifier)); - } - - @Override - public void recordBuildUpstreamCause(String upstreamJobName, int upstreamBuildNumber, String downstreamJobName, int downstreamBuildNumber) { - executeMonitored(() -> super.recordBuildUpstreamCause(upstreamJobName, upstreamBuildNumber, downstreamJobName, downstreamBuildNumber)); + public void recordDependency( + @NonNull String jobFullName, + int buildNumber, + @NonNull String groupId, + @NonNull String artifactId, + @NonNull String version, + @NonNull String type, + @NonNull String scope, + boolean ignoreUpstreamTriggers, + String classifier) { + executeMonitored(() -> super.recordDependency( + jobFullName, + buildNumber, + groupId, + artifactId, + version, + type, + scope, + ignoreUpstreamTriggers, + classifier)); + } + + @Override + public void recordParentProject( + @NonNull String jobFullName, + int buildNumber, + @NonNull String parentGroupId, + @NonNull String parentArtifactId, + @NonNull String parentVersion, + boolean ignoreUpstreamTriggers) { + executeMonitored(() -> super.recordParentProject( + jobFullName, buildNumber, parentGroupId, parentArtifactId, parentVersion, ignoreUpstreamTriggers)); + } + + @Override + public void recordGeneratedArtifact( + @NonNull String jobFullName, + int buildNumber, + @NonNull String groupId, + @NonNull String artifactId, + @NonNull String version, + @NonNull String type, + @NonNull String baseVersion, + @Nullable String repositoryUrl, + boolean skipDownstreamTriggers, + String extension, + String classifier) { + executeMonitored(() -> super.recordGeneratedArtifact( + jobFullName, + buildNumber, + groupId, + artifactId, + version, + type, + baseVersion, + repositoryUrl, + skipDownstreamTriggers, + extension, + classifier)); + } + + @Override + public void recordBuildUpstreamCause( + String upstreamJobName, int upstreamBuildNumber, String downstreamJobName, int downstreamBuildNumber) { + executeMonitored(() -> super.recordBuildUpstreamCause( + upstreamJobName, upstreamBuildNumber, downstreamJobName, downstreamBuildNumber)); } @Override @@ -91,14 +138,17 @@ public List listDownstreamJobs(@NonNull String jobFullName, int buildNum @NonNull @Override - public Map> listDownstreamJobsByArtifact(@NonNull String jobFullName, int buildNumber) { + public Map> listDownstreamJobsByArtifact( + @NonNull String jobFullName, int buildNumber) { return executeMonitored(() -> super.listDownstreamJobsByArtifact(jobFullName, buildNumber)); } @NonNull @Override - public SortedSet listDownstreamJobs(String groupId, String artifactId, String version, String baseVersion, String type, String classifier) { - return executeMonitored(() -> super.listDownstreamJobs(groupId, artifactId, version, baseVersion, type, classifier)); + public SortedSet listDownstreamJobs( + String groupId, String artifactId, String version, String baseVersion, String type, String classifier) { + return executeMonitored( + () -> super.listDownstreamJobs(groupId, artifactId, version, baseVersion, type, classifier)); } @Override @@ -119,16 +169,29 @@ public void cleanup() { } @Override - public void updateBuildOnCompletion(@NonNull String jobFullName, int buildNumber, int buildResultOrdinal, long startTimeInMillis, long durationInMillis) { - executeMonitored(() -> super.updateBuildOnCompletion(jobFullName, buildNumber, buildResultOrdinal, startTimeInMillis, durationInMillis)); + public void updateBuildOnCompletion( + @NonNull String jobFullName, + int buildNumber, + int buildResultOrdinal, + long startTimeInMillis, + long durationInMillis) { + executeMonitored(() -> super.updateBuildOnCompletion( + jobFullName, buildNumber, buildResultOrdinal, startTimeInMillis, durationInMillis)); } @Override public String toPrettyString() { - StringBuilder builder = new StringBuilder(ofNullable(super.toPrettyString()).orElse("")); + StringBuilder builder = + new StringBuilder(ofNullable(super.toPrettyString()).orElse("")); builder.append("\r\n Performances: "); - builder.append("\r\n\t find: totalDurationInMs=").append(TimeUnit.NANOSECONDS.toMillis(findDurationInNanos.get())).append(", count=").append(findCount.get()); - builder.append("\r\n\t write: totalDurationInMs=").append(TimeUnit.NANOSECONDS.toMillis(writeDurationInNanos.get())).append(", count=").append(writeCount.get()); + builder.append("\r\n\t find: totalDurationInMs=") + .append(TimeUnit.NANOSECONDS.toMillis(findDurationInNanos.get())) + .append(", count=") + .append(findCount.get()); + builder.append("\r\n\t write: totalDurationInMs=") + .append(TimeUnit.NANOSECONDS.toMillis(writeDurationInNanos.get())) + .append(", count=") + .append(writeCount.get()); builder.append("\r\n Caches: "); CACHE_STATS_SUPPLIERS.forEach(s -> builder.append("\r\n\t ").append(cachePrettyString(s.get()))); return builder.toString(); @@ -177,5 +240,4 @@ private interface CallableWithResult { private interface CallableWithoutResult { void call(); } - } diff --git a/pipeline-maven-api/src/main/java/org/jenkinsci/plugins/pipeline/maven/dao/PipelineMavenPluginDao.java b/pipeline-maven-api/src/main/java/org/jenkinsci/plugins/pipeline/maven/dao/PipelineMavenPluginDao.java index ac962556..6653e33a 100644 --- a/pipeline-maven-api/src/main/java/org/jenkinsci/plugins/pipeline/maven/dao/PipelineMavenPluginDao.java +++ b/pipeline-maven-api/src/main/java/org/jenkinsci/plugins/pipeline/maven/dao/PipelineMavenPluginDao.java @@ -24,19 +24,18 @@ package org.jenkinsci.plugins.pipeline.maven.dao; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; import hudson.model.Item; import hudson.model.Run; import hudson.util.FormValidation; -import org.apache.maven.artifact.Artifact; -import org.jenkinsci.plugins.pipeline.maven.MavenArtifact; -import org.jenkinsci.plugins.pipeline.maven.MavenDependency; - -import edu.umd.cs.findbugs.annotations.NonNull; -import edu.umd.cs.findbugs.annotations.Nullable; import java.io.Closeable; import java.util.List; import java.util.Map; import java.util.SortedSet; +import org.apache.maven.artifact.Artifact; +import org.jenkinsci.plugins.pipeline.maven.MavenArtifact; +import org.jenkinsci.plugins.pipeline.maven.MavenDependency; /** * @author Cyrille Le Clerc @@ -55,9 +54,16 @@ public interface PipelineMavenPluginDao extends Closeable { * @param ignoreUpstreamTriggers see PipelineGraphPublisher#isIgnoreUpstreamTriggers() * @param classifier Maven dependency classifier */ - void recordDependency(@NonNull String jobFullName, int buildNumber, - @NonNull String groupId, @NonNull String artifactId, @NonNull String version, @NonNull String type, @NonNull String scope, - boolean ignoreUpstreamTriggers, String classifier); + void recordDependency( + @NonNull String jobFullName, + int buildNumber, + @NonNull String groupId, + @NonNull String artifactId, + @NonNull String version, + @NonNull String type, + @NonNull String scope, + boolean ignoreUpstreamTriggers, + String classifier); /** * Record a Maven parent project of a pom processed by this build of a build. @@ -69,9 +75,13 @@ void recordDependency(@NonNull String jobFullName, int buildNumber, * @param parentVersion Maven dependency version * @param ignoreUpstreamTriggers see PipelineGraphPublisher#isIgnoreUpstreamTriggers() */ - void recordParentProject(@NonNull String jobFullName, int buildNumber, - @NonNull String parentGroupId, @NonNull String parentArtifactId, @NonNull String parentVersion, - boolean ignoreUpstreamTriggers); + void recordParentProject( + @NonNull String jobFullName, + int buildNumber, + @NonNull String parentGroupId, + @NonNull String parentArtifactId, + @NonNull String parentVersion, + boolean ignoreUpstreamTriggers); /** * Record a Maven artifact generated in a build. * @param jobFullName see {@link Item#getFullName()} @@ -79,19 +89,28 @@ void recordParentProject(@NonNull String jobFullName, int buildNumber, * @param groupId Maven artifact groupId * @param artifactId Maven artifact artifactId * @param version Maven artifact version, the "expanded version" for snapshots who have been "mvn deploy" or equivalent -* (e.g. "1.1-20170808.155524-66" for "1.1-SNAPSHOT" deployed to a repo) + * (e.g. "1.1-20170808.155524-66" for "1.1-SNAPSHOT" deployed to a repo) * @param type Maven artifact type (e.g. "jar", "war", "pom", hpi"...) * @param baseVersion Maven artifact version, the NOT "expanded version" for snapshots who have been "mvn deploy" or equivalent -* (e.g. baseVersion is "1.1-SNAPSHOT" for a "1.1-SNAPSHOT" artifact that has been deployed to a repo and expanded -* to "1.1-20170808.155524-66") + * (e.g. baseVersion is "1.1-SNAPSHOT" for a "1.1-SNAPSHOT" artifact that has been deployed to a repo and expanded + * to "1.1-20170808.155524-66") * @param repositoryUrl URL of the Maven repository on which the artifact is deployed ("mvn deploy"). {@code null} if the artifact was not deployed * @param skipDownstreamTriggers see PipelineGraphPublisher#isSkipDownstreamTriggers() * @param extension * @param classifier */ - void recordGeneratedArtifact(@NonNull String jobFullName, int buildNumber, - @NonNull String groupId, @NonNull String artifactId, @NonNull String version, @NonNull String type, @NonNull String baseVersion, - @Nullable String repositoryUrl, boolean skipDownstreamTriggers, String extension, String classifier); + void recordGeneratedArtifact( + @NonNull String jobFullName, + int buildNumber, + @NonNull String groupId, + @NonNull String artifactId, + @NonNull String version, + @NonNull String type, + @NonNull String baseVersion, + @Nullable String repositoryUrl, + boolean skipDownstreamTriggers, + String extension, + String classifier); /** * TODO add {@link MavenArtifact} as org.jenkinsci.plugins.pipeline.maven.cause.MavenDependencyUpstreamCause gives these details @@ -100,7 +119,8 @@ void recordGeneratedArtifact(@NonNull String jobFullName, int buildNumber, * @param downstreamJobName Job that is triggered. See {@link Item#getFullName()}. * @param downstreamBuildNumber Job that is triggered. See {@link Run#getNumber()}. */ - void recordBuildUpstreamCause(String upstreamJobName, int upstreamBuildNumber, String downstreamJobName, int downstreamBuildNumber); + void recordBuildUpstreamCause( + String upstreamJobName, int upstreamBuildNumber, String downstreamJobName, int downstreamBuildNumber); /** * Return the dependencies registered by the given build. @@ -190,7 +210,12 @@ void recordGeneratedArtifact(@NonNull String jobFullName, int buildNumber, * @see Item#getFullName() */ @NonNull - default SortedSet listDownstreamJobs(@NonNull String groupId, @NonNull String artifactId, @NonNull String version, @Nullable String baseVersion, @NonNull String type) { + default SortedSet listDownstreamJobs( + @NonNull String groupId, + @NonNull String artifactId, + @NonNull String version, + @Nullable String baseVersion, + @NonNull String type) { return listDownstreamJobs(groupId, artifactId, version, baseVersion, type, null); } @@ -207,7 +232,13 @@ default SortedSet listDownstreamJobs(@NonNull String groupId, @NonNull S * @see Item#getFullName() */ @NonNull - SortedSet listDownstreamJobs(@NonNull String groupId, @NonNull String artifactId, @NonNull String version, @Nullable String baseVersion, @NonNull String type, @Nullable String classifier); + SortedSet listDownstreamJobs( + @NonNull String groupId, + @NonNull String artifactId, + @NonNull String version, + @Nullable String baseVersion, + @NonNull String type, + @Nullable String classifier); /** * List the upstream jobs who generate an artifact that the given build depends on @@ -222,7 +253,7 @@ default SortedSet listDownstreamJobs(@NonNull String groupId, @NonNull S */ @NonNull Map listUpstreamJobs(@NonNull String jobFullName, int buildNumber); - + /** * List the upstream jobs who generate an artifact that the given build depends * on, including transitive dependencies (build identified by the given @@ -240,7 +271,7 @@ default SortedSet listDownstreamJobs(@NonNull String groupId, @NonNull S * List the upstream jobs who generate an artifact that the given build depends * on, including transitive dependencies (build identified by the given * {@code jobFullName}, {@code buildNumber}) - * + * * Use a memory for already known upstreams to boost performance * * @param jobFullName see {@link Item#getFullName()} @@ -250,7 +281,8 @@ default SortedSet listDownstreamJobs(@NonNull String groupId, @NonNull S * @see Item#getFullName() */ @NonNull - Map listTransitiveUpstreamJobs(@NonNull String jobFullName, int buildNumber, UpstreamMemory upstreamMemory); + Map listTransitiveUpstreamJobs( + @NonNull String jobFullName, int buildNumber, UpstreamMemory upstreamMemory); /** * Routine task to cleanup the database and reclaim disk space (if possible in the underlying database). @@ -264,7 +296,6 @@ default SortedSet listDownstreamJobs(@NonNull String groupId, @NonNull S String getDescription(); - /** * Update the database with build result details. * @@ -274,7 +305,12 @@ default SortedSet listDownstreamJobs(@NonNull String groupId, @NonNull S * @param startTimeInMillis see {@link Run#getStartTimeInMillis()} * @param durationInMillis see {@link Run#getDuration()} */ - void updateBuildOnCompletion(@NonNull String jobFullName, int buildNumber, int buildResultOrdinal, long startTimeInMillis, long durationInMillis); + void updateBuildOnCompletion( + @NonNull String jobFullName, + int buildNumber, + int buildResultOrdinal, + long startTimeInMillis, + long durationInMillis); /** * Indicates if the underlying database is production grade enough for the workload. @@ -328,7 +364,6 @@ public Config() { PipelineMavenPluginDao build(Config config); FormValidation validateConfiguration(Config config); - } Builder getBuilder(); diff --git a/pipeline-maven-api/src/main/java/org/jenkinsci/plugins/pipeline/maven/dao/PipelineMavenPluginNullDao.java b/pipeline-maven-api/src/main/java/org/jenkinsci/plugins/pipeline/maven/dao/PipelineMavenPluginNullDao.java index 31212042..a5f68102 100644 --- a/pipeline-maven-api/src/main/java/org/jenkinsci/plugins/pipeline/maven/dao/PipelineMavenPluginNullDao.java +++ b/pipeline-maven-api/src/main/java/org/jenkinsci/plugins/pipeline/maven/dao/PipelineMavenPluginNullDao.java @@ -24,13 +24,9 @@ package org.jenkinsci.plugins.pipeline.maven.dao; +import edu.umd.cs.findbugs.annotations.NonNull; import hudson.Extension; import hudson.util.FormValidation; -import org.jenkinsci.plugins.pipeline.maven.MavenArtifact; -import org.jenkinsci.plugins.pipeline.maven.MavenDependency; - -import edu.umd.cs.findbugs.annotations.NonNull; - import java.io.IOException; import java.util.Collections; import java.util.List; @@ -39,6 +35,8 @@ import java.util.TreeSet; import java.util.logging.Level; import java.util.logging.Logger; +import org.jenkinsci.plugins.pipeline.maven.MavenArtifact; +import org.jenkinsci.plugins.pipeline.maven.MavenDependency; /** * @author Cyrille Le Clerc @@ -68,9 +66,22 @@ public FormValidation validateConfiguration(Config config) { } @Override - public void recordDependency(String jobFullName, int buildNumber, String groupId, String artifactId, String version, String type, String scope, boolean ignoreUpstreamTriggers, String classifier) { - LOGGER.log(Level.FINEST, "NOT recordDependency({0}#{1}, {2}:{3}:{4}:{5}, {6}, ignoreUpstreamTriggers:{7}})", - new Object[]{jobFullName, buildNumber, groupId, artifactId, version, type, scope, ignoreUpstreamTriggers}); + public void recordDependency( + String jobFullName, + int buildNumber, + String groupId, + String artifactId, + String version, + String type, + String scope, + boolean ignoreUpstreamTriggers, + String classifier) { + LOGGER.log( + Level.FINEST, + "NOT recordDependency({0}#{1}, {2}:{3}:{4}:{5}, {6}, ignoreUpstreamTriggers:{7}})", + new Object[] { + jobFullName, buildNumber, groupId, artifactId, version, type, scope, ignoreUpstreamTriggers + }); } @NonNull @@ -80,41 +91,69 @@ public List listDependencies(@NonNull String jobFullName, int b } @Override - public void recordParentProject(@NonNull String jobFullName, int buildNumber, @NonNull String parentGroupId, @NonNull String parentArtifactId, @NonNull String parentVersion, boolean ignoreUpstreamTriggers) { - LOGGER.log(Level.FINEST, "NOT recordParentProject({0}#{1}, {2}:{3} ignoreUpstreamTriggers:{5}})", - new Object[]{jobFullName, buildNumber, parentGroupId, parentArtifactId, parentVersion, ignoreUpstreamTriggers}); - - } - - @Override - public void recordGeneratedArtifact(String jobFullName, int buildNumber, String groupId, String artifactId, String version, String type, String baseVersion, String repositoryUrl, boolean skipDownstreamTriggers, String extension, String classifier) { - LOGGER.log(Level.FINEST, "NOT recordGeneratedArtifact({0}#{1}, {2}:{3}:{4}:{5}, version:{6}, repositoryUrl:{7}, skipDownstreamTriggers:{8})", - new Object[]{jobFullName, buildNumber, groupId, artifactId, baseVersion, type, version, repositoryUrl, skipDownstreamTriggers}); - - } - - @Override - public void recordBuildUpstreamCause(String upstreamJobName, int upstreamBuildNumber, String downstreamJobName, int downstreamBuildNumber) { - LOGGER.log(Level.FINEST, "NOT recordBuildUpstreamCause(upstreamBuild: {0}#{1}, downstreamBuild: {2}#{3})", - new Object[]{upstreamJobName, upstreamBuildNumber, downstreamJobName, downstreamBuildNumber}); + public void recordParentProject( + @NonNull String jobFullName, + int buildNumber, + @NonNull String parentGroupId, + @NonNull String parentArtifactId, + @NonNull String parentVersion, + boolean ignoreUpstreamTriggers) { + LOGGER.log(Level.FINEST, "NOT recordParentProject({0}#{1}, {2}:{3} ignoreUpstreamTriggers:{5}})", new Object[] { + jobFullName, buildNumber, parentGroupId, parentArtifactId, parentVersion, ignoreUpstreamTriggers + }); + } + + @Override + public void recordGeneratedArtifact( + String jobFullName, + int buildNumber, + String groupId, + String artifactId, + String version, + String type, + String baseVersion, + String repositoryUrl, + boolean skipDownstreamTriggers, + String extension, + String classifier) { + LOGGER.log( + Level.FINEST, + "NOT recordGeneratedArtifact({0}#{1}, {2}:{3}:{4}:{5}, version:{6}, repositoryUrl:{7}, skipDownstreamTriggers:{8})", + new Object[] { + jobFullName, + buildNumber, + groupId, + artifactId, + baseVersion, + type, + version, + repositoryUrl, + skipDownstreamTriggers + }); + } + + @Override + public void recordBuildUpstreamCause( + String upstreamJobName, int upstreamBuildNumber, String downstreamJobName, int downstreamBuildNumber) { + LOGGER.log( + Level.FINEST, + "NOT recordBuildUpstreamCause(upstreamBuild: {0}#{1}, downstreamBuild: {2}#{3})", + new Object[] {upstreamJobName, upstreamBuildNumber, downstreamJobName, downstreamBuildNumber}); } @Override public void renameJob(String oldFullName, String newFullName) { - LOGGER.log(Level.FINEST, "NOT renameJob({0}, {1})", new Object[]{oldFullName, newFullName}); - + LOGGER.log(Level.FINEST, "NOT renameJob({0}, {1})", new Object[] {oldFullName, newFullName}); } @Override public void deleteJob(String jobFullName) { - LOGGER.log(Level.FINEST, "NOT deleteJob({0})", new Object[]{jobFullName}); - + LOGGER.log(Level.FINEST, "NOT deleteJob({0})", new Object[] {jobFullName}); } @Override public void deleteBuild(String jobFullName, int buildNumber) { - LOGGER.log(Level.FINEST, "NOT deleteBuild({0}#{1})", new Object[]{jobFullName, buildNumber}); - + LOGGER.log(Level.FINEST, "NOT deleteBuild({0}#{1})", new Object[] {jobFullName, buildNumber}); } @NonNull @@ -126,13 +165,15 @@ public List listDownstreamJobs(@NonNull String jobFullName, int buildNum @NonNull @Override - public Map> listDownstreamJobsByArtifact(@NonNull String jobFullName, int buildNumber) { + public Map> listDownstreamJobsByArtifact( + @NonNull String jobFullName, int buildNumber) { return Collections.emptyMap(); } @NonNull @Override - public SortedSet listDownstreamJobs(String groupId, String artifactId, String version, String baseVersion, String type, String classifier) { + public SortedSet listDownstreamJobs( + String groupId, String artifactId, String version, String baseVersion, String type, String classifier) { return new TreeSet<>(); } @@ -150,8 +191,8 @@ public Map listTransitiveUpstreamJobs(String jobFullName, int b @NonNull @Override - public Map listTransitiveUpstreamJobs(String jobFullName, int buildNumber, - UpstreamMemory upstreamMemory) { + public Map listTransitiveUpstreamJobs( + String jobFullName, int buildNumber, UpstreamMemory upstreamMemory) { return Collections.emptyMap(); } @@ -167,9 +208,16 @@ public List getGeneratedArtifacts(@NonNull String jobFullName, in } @Override - public void updateBuildOnCompletion(@NonNull String jobFullName, int buildNumber, int buildResultOrdinal, long startTimeInMillis, long durationInMillis) { - LOGGER.log(Level.FINEST, "NOOT updateBuildOnCompletion({0}, {1}, result: {2}, startTime): {3}, duration: {4}", - new Object[]{jobFullName, buildNumber, buildResultOrdinal, startTimeInMillis, durationInMillis}); + public void updateBuildOnCompletion( + @NonNull String jobFullName, + int buildNumber, + int buildResultOrdinal, + long startTimeInMillis, + long durationInMillis) { + LOGGER.log( + Level.FINEST, + "NOOT updateBuildOnCompletion({0}, {1}, result: {2}, startTime): {3}, duration: {4}", + new Object[] {jobFullName, buildNumber, buildResultOrdinal, startTimeInMillis, durationInMillis}); } @Override @@ -186,5 +234,4 @@ public boolean isEnoughProductionGradeForTheWorkload() { public void close() throws IOException { // no op } - } diff --git a/pipeline-maven-api/src/main/java/org/jenkinsci/plugins/pipeline/maven/dao/UpstreamMemory.java b/pipeline-maven-api/src/main/java/org/jenkinsci/plugins/pipeline/maven/dao/UpstreamMemory.java index 2d18f963..b079c7fa 100644 --- a/pipeline-maven-api/src/main/java/org/jenkinsci/plugins/pipeline/maven/dao/UpstreamMemory.java +++ b/pipeline-maven-api/src/main/java/org/jenkinsci/plugins/pipeline/maven/dao/UpstreamMemory.java @@ -20,7 +20,8 @@ public class UpstreamMemory { private static final AtomicInteger MISSES = new AtomicInteger(); static { - MonitoringPipelineMavenPluginDaoDecorator.registerCacheStatsSupplier(() -> new CacheStats("listUpstreamJobs", HITS.get(), MISSES.get())); + MonitoringPipelineMavenPluginDaoDecorator.registerCacheStatsSupplier( + () -> new CacheStats("listUpstreamJobs", HITS.get(), MISSES.get())); } // remember the already known upstreams @@ -35,5 +36,4 @@ public Map listUpstreamJobs(PipelineMavenPluginDao dao, String } return upstreams.computeIfAbsent(key, k -> dao.listUpstreamJobs(jobFullName, buildNumber)); } - } diff --git a/pipeline-maven-database/pom.xml b/pipeline-maven-database/pom.xml index 4640ae7e..48cacf8f 100644 --- a/pipeline-maven-database/pom.xml +++ b/pipeline-maven-database/pom.xml @@ -7,18 +7,10 @@ ${changelist} pipeline-maven-database - Pipeline Maven Plugin Database hpi + Pipeline Maven Plugin Database - - org.jenkins-ci.plugins - pipeline-maven-api - - - org.jenkins-ci.plugins - credentials - com.zaxxer HikariCP @@ -54,6 +46,14 @@ ${jenkins-plugin-postgresql.version} true + + org.jenkins-ci.plugins + credentials + + + org.jenkins-ci.plugins + pipeline-maven-api + org.slf4j slf4j-simple diff --git a/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/AbstractPipelineMavenPluginDao.java b/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/AbstractPipelineMavenPluginDao.java index d726522e..7c976e6b 100644 --- a/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/AbstractPipelineMavenPluginDao.java +++ b/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/AbstractPipelineMavenPluginDao.java @@ -29,6 +29,8 @@ import com.cloudbees.plugins.credentials.common.UsernamePasswordCredentials; import com.zaxxer.hikari.HikariConfig; import com.zaxxer.hikari.HikariDataSource; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import hudson.ExtensionList; import hudson.model.Item; @@ -37,21 +39,6 @@ import hudson.security.ACL; import hudson.util.FormValidation; import hudson.util.Secret; -import jenkins.model.Jenkins; -import org.apache.commons.io.IOUtils; -import org.apache.commons.lang.StringUtils; -import org.h2.api.ErrorCode; -import org.jenkinsci.plugins.pipeline.maven.MavenArtifact; -import org.jenkinsci.plugins.pipeline.maven.MavenDependency; -import org.jenkinsci.plugins.pipeline.maven.dao.*; -import org.jenkinsci.plugins.pipeline.maven.db.migration.MigrationStep; -import org.jenkinsci.plugins.pipeline.maven.db.util.ClassUtils; -import org.jenkinsci.plugins.pipeline.maven.db.util.RuntimeIoException; -import org.jenkinsci.plugins.pipeline.maven.db.util.RuntimeSqlException; - -import edu.umd.cs.findbugs.annotations.NonNull; -import edu.umd.cs.findbugs.annotations.Nullable; -import javax.sql.DataSource; import java.io.Closeable; import java.io.IOException; import java.io.InputStream; @@ -71,13 +58,26 @@ import java.util.Map.Entry; import java.util.logging.Level; import java.util.logging.Logger; +import javax.sql.DataSource; +import jenkins.model.Jenkins; +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang.StringUtils; +import org.h2.api.ErrorCode; +import org.jenkinsci.plugins.pipeline.maven.MavenArtifact; +import org.jenkinsci.plugins.pipeline.maven.MavenDependency; +import org.jenkinsci.plugins.pipeline.maven.dao.*; +import org.jenkinsci.plugins.pipeline.maven.db.migration.MigrationStep; +import org.jenkinsci.plugins.pipeline.maven.db.util.ClassUtils; +import org.jenkinsci.plugins.pipeline.maven.db.util.RuntimeIoException; +import org.jenkinsci.plugins.pipeline.maven.db.util.RuntimeSqlException; /** * @author Cyrille Le Clerc */ public abstract class AbstractPipelineMavenPluginDao implements PipelineMavenPluginDao { - private static final int OPTIMIZATION_MAX_RECURSION_DEPTH = Integer.getInteger("org.jenkinsci.plugins.pipeline.PipelineMavenPluginDao.OPTIMIZATION_MAX_RECURSION_DEPTH",3); + private static final int OPTIMIZATION_MAX_RECURSION_DEPTH = Integer.getInteger( + "org.jenkinsci.plugins.pipeline.PipelineMavenPluginDao.OPTIMIZATION_MAX_RECURSION_DEPTH", 3); protected final Logger LOGGER = Logger.getLogger(getClass().getName()); @NonNull @@ -118,38 +118,40 @@ public PipelineMavenPluginDao build(Config config) { PipelineMavenPluginDao dao; try { String jdbcUrl = config.getJdbcUrl(); - if(StringUtils.isBlank(jdbcUrl)) { + if (StringUtils.isBlank(jdbcUrl)) { // some dao such h2 can use default jdbc ur Optional optionalPipelineMavenPluginDao = - ExtensionList.lookup(PipelineMavenPluginDao.class) - .stream() - .filter(pipelineMavenPluginDao -> pipelineMavenPluginDao.getClass().getName().equals(pipelineMavenPluginDaoClass.getName())) - .findFirst(); - if(optionalPipelineMavenPluginDao.isPresent()){ + ExtensionList.lookup(PipelineMavenPluginDao.class).stream() + .filter(pipelineMavenPluginDao -> pipelineMavenPluginDao + .getClass() + .getName() + .equals(pipelineMavenPluginDaoClass.getName())) + .findFirst(); + if (optionalPipelineMavenPluginDao.isPresent()) { jdbcUrl = optionalPipelineMavenPluginDao.get().getDefaultJdbcUrl(); } } String jdbcUserName, jdbcPassword; - if (StringUtils.isBlank(config.getCredentialsId()) && !AbstractPipelineMavenPluginDao.this.acceptNoCredentials()) + if (StringUtils.isBlank(config.getCredentialsId()) + && !AbstractPipelineMavenPluginDao.this.acceptNoCredentials()) throw new IllegalStateException("No credentials defined for JDBC URL '" + jdbcUrl + "'"); UsernamePasswordCredentials jdbcCredentials = null; if (!StringUtils.isBlank(config.getCredentialsId())) { jdbcCredentials = (UsernamePasswordCredentials) CredentialsMatchers.firstOrNull( - CredentialsProvider.lookupCredentials(UsernamePasswordCredentials.class, j, - ACL.SYSTEM, Collections.EMPTY_LIST), + CredentialsProvider.lookupCredentials( + UsernamePasswordCredentials.class, j, ACL.SYSTEM, Collections.EMPTY_LIST), CredentialsMatchers.withId(config.getCredentialsId())); } if (jdbcCredentials == null && pipelineMavenPluginDaoClass == PipelineMavenPluginH2Dao.class) { jdbcUserName = "sa"; jdbcPassword = "sa"; - } - else if (jdbcCredentials == null) { - throw new IllegalStateException("Credentials '" + config.getCredentialsId() + "' defined for JDBC URL '" + jdbcUrl + "' NOT found"); - } - else { + } else if (jdbcCredentials == null) { + throw new IllegalStateException("Credentials '" + config.getCredentialsId() + + "' defined for JDBC URL '" + jdbcUrl + "' NOT found"); + } else { jdbcUserName = jdbcCredentials.getUsername(); jdbcPassword = Secret.toString(jdbcCredentials.getPassword()); } @@ -157,7 +159,8 @@ else if (jdbcCredentials == null) { HikariConfig dsConfig = createHikariConfig(config.getProperties(), jdbcUrl, jdbcUserName, jdbcPassword); dsConfig.setAutoCommit(false); - // TODO cleanup this quick fix for JENKINS-54587, we should have a better solution with the JDBC driver loaded by the DAO itself + // TODO cleanup this quick fix for JENKINS-54587, we should have a better solution with the JDBC driver + // loaded by the DAO itself try { DriverManager.getDriver(jdbcUrl); } catch (SQLException e) { @@ -173,13 +176,15 @@ else if (jdbcCredentials == null) { try { Class.forName("com.mysql.cj.jdbc.Driver"); } catch (ClassNotFoundException cnfe) { - throw new RuntimeException("MySql driver 'com.mysql.cj.jdbc.Driver' not found. Please install the 'MySQL Database Plugin' to install the MySql driver"); + throw new RuntimeException( + "MySql driver 'com.mysql.cj.jdbc.Driver' not found. Please install the 'MySQL Database Plugin' to install the MySql driver"); } } else if (jdbcUrl.startsWith("jdbc:postgresql:")) { try { Class.forName("org.postgresql.Driver"); } catch (ClassNotFoundException cnfe) { - throw new RuntimeException("PostgreSQL driver 'org.postgresql.Driver' not found. Please install the 'PostgreSQL Database Plugin' to install the PostgreSQL driver"); + throw new RuntimeException( + "PostgreSQL driver 'org.postgresql.Driver' not found. Please install the 'PostgreSQL Database Plugin' to install the PostgreSQL driver"); } } else { throw new IllegalArgumentException("Unsupported database type in JDBC URL " + jdbcUrl); @@ -190,18 +195,21 @@ else if (jdbcCredentials == null) { } } - LOGGER.log(Level.INFO, "Connect to database {0} with username {1}", new Object[]{jdbcUrl, jdbcUserName}); + LOGGER.log( + Level.INFO, "Connect to database {0} with username {1}", new Object[] {jdbcUrl, jdbcUserName}); DataSource ds = new HikariDataSource(dsConfig); try { - dao = new MonitoringPipelineMavenPluginDaoDecorator( - new CustomTypePipelineMavenPluginDaoDecorator((PipelineMavenPluginDao)pipelineMavenPluginDaoClass + dao = new MonitoringPipelineMavenPluginDaoDecorator(new CustomTypePipelineMavenPluginDaoDecorator( + (PipelineMavenPluginDao) pipelineMavenPluginDaoClass .getConstructor(DataSource.class) .newInstance(ds))); } catch (Exception e) { throw new SQLException( - "Exception connecting to '" + jdbcUrl + "' with credentials '" + config.getCredentialsId() + "' (" + - jdbcUserName + "/***) and DAO " + getClass().getSimpleName(), e); + "Exception connecting to '" + jdbcUrl + "' with credentials '" + config.getCredentialsId() + + "' (" + jdbcUserName + "/***) and DAO " + + getClass().getSimpleName(), + e); } } catch (RuntimeException | SQLException e) { LOGGER.log(Level.WARNING, "Exception creating database dao, skip", e); @@ -234,9 +242,15 @@ public FormValidation validateConfiguration(Config config) { Class.forName(driverClass); } catch (ClassNotFoundException e) { if ("com.mysql.cj.jdbc.Driver".equals(driverClass)) { - return FormValidation.error("MySQL JDBC driver '" + driverClass + "' not found, please install the Jenkins 'MySQL API Plugin'", e); + return FormValidation.error( + "MySQL JDBC driver '" + driverClass + + "' not found, please install the Jenkins 'MySQL API Plugin'", + e); } else if ("org.postgresql.Driver".equals(driverClass)) { - return FormValidation.error("PostgreSQL JDBC driver '" + driverClass + "' not found, please install the Jenkins 'PostgreSQL API Plugin'" + jdbcUrl, e); + return FormValidation.error( + "PostgreSQL JDBC driver '" + driverClass + + "' not found, please install the Jenkins 'PostgreSQL API Plugin'" + jdbcUrl, + e); } else { throw e; } @@ -253,12 +267,17 @@ public FormValidation validateConfiguration(Config config) { return FormValidation.error("No credentials specified for JDBC url '" + jdbcUrl + "'"); } } else { - UsernamePasswordCredentials jdbcCredentials = (UsernamePasswordCredentials) CredentialsMatchers.firstOrNull( - CredentialsProvider.lookupCredentials(UsernamePasswordCredentials.class, Jenkins.get(), - ACL.SYSTEM, Collections.EMPTY_LIST), - CredentialsMatchers.withId(jdbcCredentialsId)); + UsernamePasswordCredentials jdbcCredentials = + (UsernamePasswordCredentials) CredentialsMatchers.firstOrNull( + CredentialsProvider.lookupCredentials( + UsernamePasswordCredentials.class, + Jenkins.get(), + ACL.SYSTEM, + Collections.EMPTY_LIST), + CredentialsMatchers.withId(jdbcCredentialsId)); if (jdbcCredentials == null) { - return FormValidation.error("Credentials '" + jdbcCredentialsId + "' defined for JDBC URL '" + jdbcUrl + "' not found"); + return FormValidation.error("Credentials '" + jdbcCredentialsId + "' defined for JDBC URL '" + + jdbcUrl + "' not found"); } jdbcUserName = jdbcCredentials.getUsername(); jdbcPassword = Secret.toString(jdbcCredentials.getPassword()); @@ -273,34 +292,39 @@ public FormValidation validateConfiguration(Config config) { // * MySQL: "8.0.13" // * Amazon Aurora: "5.6.10" // * MariaDB: "5.5.5-10.2.20-MariaDB", "5.5.5-10.3.11-MariaDB-1:10.3.11+maria~bionic" - String databaseVersionDescription = metaData.getDatabaseProductName() + " " + metaData.getDatabaseProductVersion(); + String databaseVersionDescription = + metaData.getDatabaseProductName() + " " + metaData.getDatabaseProductVersion(); LOGGER.log(Level.INFO, "Checking JDBC connection against " + databaseVersionDescription); - String databaseRequirement = "MySQL Server up to 8.1 or Amazon Aurora MySQL 5.6+ or MariaDB up to 11.1 or PostgreSQL up to 16 is required"; + String databaseRequirement = + "MySQL Server up to 8.1 or Amazon Aurora MySQL 5.6+ or MariaDB up to 11.1 or PostgreSQL up to 16 is required"; if ("MariaDB".equals(metaData.getDatabaseProductName())) { @Nullable - String mariaDbVersion = PipelineMavenPluginMySqlDao.extractMariaDbVersion(metaData.getDatabaseProductVersion()); + String mariaDbVersion = PipelineMavenPluginMySqlDao.extractMariaDbVersion( + metaData.getDatabaseProductVersion()); if (mariaDbVersion == null || !mariaDbVersion.matches("^(10|11)\\..*")) { - return FormValidation.warning("Non tested MariaDB version " + metaData.getDatabaseProductVersion() + ". " + databaseRequirement); + return FormValidation.warning("Non tested MariaDB version " + + metaData.getDatabaseProductVersion() + ". " + databaseRequirement); } } else if ("MySQL".equals(metaData.getDatabaseProductName())) { - @Nullable - String amazonAuroraVersion; + @Nullable String amazonAuroraVersion; try (Statement stmt = cnn.createStatement()) { try (ResultSet rst = stmt.executeQuery("select AURORA_VERSION()")) { rst.next(); amazonAuroraVersion = rst.getString(1); databaseVersionDescription += " / Aurora " + rst.getString(1); } catch (SQLException e) { - if (e.getErrorCode() == 1305) { // com.mysql.cj.exceptions.MysqlErrorNumbers.ER_SP_DOES_NOT_EXIST + if (e.getErrorCode() + == 1305) { // com.mysql.cj.exceptions.MysqlErrorNumbers.ER_SP_DOES_NOT_EXIST amazonAuroraVersion = null; } else { - LOGGER.log(Level.WARNING,"Exception checking Amazon Aurora version", e); + LOGGER.log(Level.WARNING, "Exception checking Amazon Aurora version", e); amazonAuroraVersion = null; } } } @Nullable - String mariaDbVersion = PipelineMavenPluginMySqlDao.extractMariaDbVersion(metaData.getDatabaseProductVersion()); + String mariaDbVersion = PipelineMavenPluginMySqlDao.extractMariaDbVersion( + metaData.getDatabaseProductVersion()); switch (metaData.getDatabaseMajorVersion()) { case 8: @@ -314,24 +338,33 @@ public FormValidation validateConfiguration(Config config) { case 6: if (amazonAuroraVersion == null) { // see JENKINS-54784 - return FormValidation.warning("Non validated MySQL version " + metaData.getDatabaseProductVersion() + ". " + databaseRequirement); + return FormValidation.warning("Non validated MySQL version " + + metaData.getDatabaseProductVersion() + ". " + + databaseRequirement); } else { // we have successfully tested on Amazon Aurora MySQL 5.6.10a break; } case 5: if (mariaDbVersion == null) { - return FormValidation.warning("Non validated MySQL version " + metaData.getDatabaseProductVersion() + ". " + databaseRequirement); + return FormValidation.warning("Non validated MySQL version " + + metaData.getDatabaseProductVersion() + ". " + + databaseRequirement); } else { // JENKINS-55378 have successfully tested with "5.5.5-10.2.20-MariaDB" - return FormValidation.ok("MariaDB version " + mariaDbVersion + " detected. Please ensure that your MariaDB version is at least version 10.2+"); + return FormValidation.ok( + "MariaDB version " + mariaDbVersion + + " detected. Please ensure that your MariaDB version is at least version 10.2+"); } default: - return FormValidation.error("Non supported MySQL version " + metaData.getDatabaseProductVersion() + ". " + databaseRequirement); + return FormValidation.error("Non supported MySQL version " + + metaData.getDatabaseProductVersion() + ". " + + databaseRequirement); } break; default: - return FormValidation.error("Non supported MySQL version " + metaData.getDatabaseProductVersion() + ". " + databaseRequirement); + return FormValidation.error("Non supported MySQL version " + + metaData.getDatabaseProductVersion() + ". " + databaseRequirement); } } else if ("PostgreSQL".equals(metaData.getDatabaseProductName())) { try (Statement stmt = cnn.createStatement()) { @@ -342,7 +375,7 @@ public FormValidation validateConfiguration(Config config) { } catch (SQLException e) { // org.postgresql.util.PSQLState.UNDEFINED_FUNCTION.getState() if (!"42883".equals(e.getSQLState())) { - LOGGER.log(Level.WARNING,"Exception checking Amazon Aurora version", e); + LOGGER.log(Level.WARNING, "Exception checking Amazon Aurora version", e); } } } @@ -359,10 +392,12 @@ public FormValidation validateConfiguration(Config config) { // OK break; default: - return FormValidation.warning("Non tested PostgreSQL version " + metaData.getDatabaseProductVersion() + ". " + databaseRequirement); + return FormValidation.warning("Non tested PostgreSQL version " + + metaData.getDatabaseProductVersion() + ". " + databaseRequirement); } } else { - return FormValidation.warning("Non production grade database. For production workloads, " + databaseRequirement); + return FormValidation.warning( + "Non production grade database. For production workloads, " + databaseRequirement); } try (Statement stmt = cnn.createStatement()) { try (ResultSet rst = stmt.executeQuery("select 1")) { @@ -371,14 +406,15 @@ public FormValidation validateConfiguration(Config config) { } } return FormValidation.ok(databaseVersionDescription + " is a supported database"); - } catch (SQLException e ){ + } catch (SQLException e) { return FormValidation.error(e, "Failure to connect to the database " + jdbcUrl); } } } catch (RuntimeException e) { return FormValidation.error(e, "Failed to test JDBC connection '" + jdbcUrl + "'"); } catch (ClassNotFoundException e) { - return FormValidation.error(e, "Failed to load JDBC driver '" + driverClass + "' for JDBC connection '" + jdbcUrl + "'"); + return FormValidation.error( + e, "Failed to load JDBC driver '" + driverClass + "' for JDBC connection '" + jdbcUrl + "'"); } } } @@ -388,7 +424,8 @@ public Builder getBuilder() { return new JDBCDaoBuilder(getClass()); } - private static HikariConfig createHikariConfig(String properties, String jdbcUrl, String jdbcUserName, String jdbcPassword) { + private static HikariConfig createHikariConfig( + String properties, String jdbcUrl, String jdbcUserName, String jdbcPassword) { Properties p = new Properties(); // todo refactor the DAO to inject config defaults in the DAO if (jdbcUrl.startsWith("jdbc:mysql")) { @@ -407,7 +444,8 @@ private static HikariConfig createHikariConfig(String properties, String jdbcUrl } else if (jdbcUrl.startsWith("jdbc:postgresql")) { // no tuning recommendations found for postgresql } else if (jdbcUrl.startsWith("jdbc:h2")) { - // dsConfig.setDataSourceClassName("org.h2.jdbcx.JdbcDataSource"); don't specify the datasource due to a classloading issue + // dsConfig.setDataSourceClassName("org.h2.jdbcx.JdbcDataSource"); don't specify the datasource due to a + // classloading issue } else { // unsupported config } @@ -419,7 +457,8 @@ private static HikariConfig createHikariConfig(String properties, String jdbcUrl throw new IllegalStateException("Failed to read properties.", e); } } - Logger.getLogger(AbstractPipelineMavenPluginDao.class.getName()).log(Level.INFO, "Applied pool properties {0}", p); + Logger.getLogger(AbstractPipelineMavenPluginDao.class.getName()) + .log(Level.INFO, "Applied pool properties {0}", p); HikariConfig dsConfig = new HikariConfig(p); dsConfig.setJdbcUrl(jdbcUrl); dsConfig.setUsername(jdbcUserName); @@ -432,15 +471,29 @@ private static HikariConfig createHikariConfig(String properties, String jdbcUrl protected abstract void registerJdbcDriver(); @Override - public void recordDependency(String jobFullName, int buildNumber, String groupId, String artifactId, String version, String type, String scope, boolean ignoreUpstreamTriggers, String classifier) { - LOGGER.log(Level.FINE, "recordDependency({0}#{1}, {2}:{3}:{4}:{5}, {6}, ignoreUpstreamTriggers:{7}})", - new Object[]{jobFullName, buildNumber, groupId, artifactId, version, type, scope, ignoreUpstreamTriggers}); + public void recordDependency( + String jobFullName, + int buildNumber, + String groupId, + String artifactId, + String version, + String type, + String scope, + boolean ignoreUpstreamTriggers, + String classifier) { + LOGGER.log( + Level.FINE, + "recordDependency({0}#{1}, {2}:{3}:{4}:{5}, {6}, ignoreUpstreamTriggers:{7}})", + new Object[] { + jobFullName, buildNumber, groupId, artifactId, version, type, scope, ignoreUpstreamTriggers + }); long buildPrimaryKey = getOrCreateBuildPrimaryKey(jobFullName, buildNumber); long artifactPrimaryKey = getOrCreateArtifactPrimaryKey(groupId, artifactId, version, type, classifier); try (Connection cnn = ds.getConnection()) { cnn.setAutoCommit(false); - try (PreparedStatement stmt = cnn.prepareStatement("INSERT INTO MAVEN_DEPENDENCY(ARTIFACT_ID, BUILD_ID, SCOPE, IGNORE_UPSTREAM_TRIGGERS) VALUES (?, ?, ?, ?)")) { + try (PreparedStatement stmt = cnn.prepareStatement( + "INSERT INTO MAVEN_DEPENDENCY(ARTIFACT_ID, BUILD_ID, SCOPE, IGNORE_UPSTREAM_TRIGGERS) VALUES (?, ?, ?, ?)")) { stmt.setLong(1, artifactPrimaryKey); stmt.setLong(2, buildPrimaryKey); stmt.setString(3, scope); @@ -456,16 +509,17 @@ public void recordDependency(String jobFullName, int buildNumber, String groupId @NonNull @Override public List listDependencies(@NonNull String jobFullName, int buildNumber) { - LOGGER.log(Level.FINER, "listDependencies({0}, {1})", new Object[]{jobFullName, buildNumber}); - String dependenciesSql = "SELECT DISTINCT MAVEN_ARTIFACT.group_id, MAVEN_ARTIFACT.artifact_id, MAVEN_ARTIFACT.version, MAVEN_ARTIFACT.type, MAVEN_ARTIFACT.classifier, MAVEN_DEPENDENCY.scope " + - " FROM MAVEN_ARTIFACT " + - " INNER JOIN MAVEN_DEPENDENCY ON MAVEN_ARTIFACT.ID = MAVEN_DEPENDENCY.ARTIFACT_ID" + - " INNER JOIN JENKINS_BUILD ON MAVEN_DEPENDENCY.BUILD_ID = JENKINS_BUILD.ID " + - " INNER JOIN JENKINS_JOB ON JENKINS_BUILD.JOB_ID = JENKINS_JOB.ID " + - " WHERE " + - " JENKINS_JOB.FULL_NAME = ? AND" + - " JENKINS_JOB.JENKINS_MASTER_ID = ? AND" + - " JENKINS_BUILD.NUMBER = ? "; + LOGGER.log(Level.FINER, "listDependencies({0}, {1})", new Object[] {jobFullName, buildNumber}); + String dependenciesSql = + "SELECT DISTINCT MAVEN_ARTIFACT.group_id, MAVEN_ARTIFACT.artifact_id, MAVEN_ARTIFACT.version, MAVEN_ARTIFACT.type, MAVEN_ARTIFACT.classifier, MAVEN_DEPENDENCY.scope " + + " FROM MAVEN_ARTIFACT " + + " INNER JOIN MAVEN_DEPENDENCY ON MAVEN_ARTIFACT.ID = MAVEN_DEPENDENCY.ARTIFACT_ID" + + " INNER JOIN JENKINS_BUILD ON MAVEN_DEPENDENCY.BUILD_ID = JENKINS_BUILD.ID " + + " INNER JOIN JENKINS_JOB ON JENKINS_BUILD.JOB_ID = JENKINS_JOB.ID " + + " WHERE " + + " JENKINS_JOB.FULL_NAME = ? AND" + + " JENKINS_JOB.JENKINS_MASTER_ID = ? AND" + + " JENKINS_BUILD.NUMBER = ? "; List results = new ArrayList<>(); try (Connection cnn = this.ds.getConnection()) { @@ -497,15 +551,24 @@ public List listDependencies(@NonNull String jobFullName, int b } @Override - public void recordParentProject(@NonNull String jobFullName, int buildNumber, @NonNull String parentGroupId, @NonNull String parentArtifactId, @NonNull String parentVersion, boolean ignoreUpstreamTriggers) { - LOGGER.log(Level.FINE, "recordParentProject({0}#{1}, {2}:{3} ignoreUpstreamTriggers:{5}})", - new Object[]{jobFullName, buildNumber, parentGroupId, parentArtifactId, parentVersion, ignoreUpstreamTriggers}); + public void recordParentProject( + @NonNull String jobFullName, + int buildNumber, + @NonNull String parentGroupId, + @NonNull String parentArtifactId, + @NonNull String parentVersion, + boolean ignoreUpstreamTriggers) { + LOGGER.log(Level.FINE, "recordParentProject({0}#{1}, {2}:{3} ignoreUpstreamTriggers:{5}})", new Object[] { + jobFullName, buildNumber, parentGroupId, parentArtifactId, parentVersion, ignoreUpstreamTriggers + }); long buildPrimaryKey = getOrCreateBuildPrimaryKey(jobFullName, buildNumber); - long parentArtifactPrimaryKey = getOrCreateArtifactPrimaryKey(parentGroupId, parentArtifactId, parentVersion, "pom", null); + long parentArtifactPrimaryKey = + getOrCreateArtifactPrimaryKey(parentGroupId, parentArtifactId, parentVersion, "pom", null); try (Connection cnn = ds.getConnection()) { cnn.setAutoCommit(false); - try (PreparedStatement stmt = cnn.prepareStatement("INSERT INTO MAVEN_PARENT_PROJECT(ARTIFACT_ID, BUILD_ID, IGNORE_UPSTREAM_TRIGGERS) VALUES (?, ?, ?)")) { + try (PreparedStatement stmt = cnn.prepareStatement( + "INSERT INTO MAVEN_PARENT_PROJECT(ARTIFACT_ID, BUILD_ID, IGNORE_UPSTREAM_TRIGGERS) VALUES (?, ?, ?)")) { stmt.setLong(1, parentArtifactPrimaryKey); stmt.setLong(2, buildPrimaryKey); stmt.setBoolean(3, ignoreUpstreamTriggers); @@ -518,15 +581,39 @@ public void recordParentProject(@NonNull String jobFullName, int buildNumber, @N } @Override - public void recordGeneratedArtifact(String jobFullName, int buildNumber, String groupId, String artifactId, String version, String type, String baseVersion, String repositoryUrl, boolean skipDownstreamTriggers, String extension, String classifier) { - LOGGER.log(Level.FINE, "recordGeneratedArtifact({0}#{1}, {2}:{3}:{4}:{5}, version:{6}, repositoryUrl:{7}, skipDownstreamTriggers:{8})", - new Object[]{jobFullName, buildNumber, groupId, artifactId, baseVersion, type, version, repositoryUrl, skipDownstreamTriggers}); + public void recordGeneratedArtifact( + String jobFullName, + int buildNumber, + String groupId, + String artifactId, + String version, + String type, + String baseVersion, + String repositoryUrl, + boolean skipDownstreamTriggers, + String extension, + String classifier) { + LOGGER.log( + Level.FINE, + "recordGeneratedArtifact({0}#{1}, {2}:{3}:{4}:{5}, version:{6}, repositoryUrl:{7}, skipDownstreamTriggers:{8})", + new Object[] { + jobFullName, + buildNumber, + groupId, + artifactId, + baseVersion, + type, + version, + repositoryUrl, + skipDownstreamTriggers + }); long buildPrimaryKey = getOrCreateBuildPrimaryKey(jobFullName, buildNumber); long artifactPrimaryKey = getOrCreateArtifactPrimaryKey(groupId, artifactId, baseVersion, type, classifier); try (Connection cnn = ds.getConnection()) { cnn.setAutoCommit(false); - try (PreparedStatement stmt = cnn.prepareStatement("INSERT INTO GENERATED_MAVEN_ARTIFACT(ARTIFACT_ID, BUILD_ID, VERSION, REPOSITORY_URL, EXTENSION, SKIP_DOWNSTREAM_TRIGGERS) VALUES (?, ?, ?, ?, ?, ?)")) { + try (PreparedStatement stmt = cnn.prepareStatement( + "INSERT INTO GENERATED_MAVEN_ARTIFACT(ARTIFACT_ID, BUILD_ID, VERSION, REPOSITORY_URL, EXTENSION, SKIP_DOWNSTREAM_TRIGGERS) VALUES (?, ?, ?, ?, ?, ?)")) { stmt.setLong(1, artifactPrimaryKey); stmt.setLong(2, buildPrimaryKey); stmt.setString(3, version); @@ -542,12 +629,16 @@ public void recordGeneratedArtifact(String jobFullName, int buildNumber, String } @Override - public void recordBuildUpstreamCause(String upstreamJobName, int upstreamBuildNumber, String downstreamJobName, int downstreamBuildNumber) { - LOGGER.log(Level.FINE, "recordBuildUpstreamCause(upstreamBuild: {0}#{1}, downstreamBuild: {2}#{3})", - new Object[]{upstreamJobName, upstreamBuildNumber, downstreamJobName, downstreamBuildNumber}); + public void recordBuildUpstreamCause( + String upstreamJobName, int upstreamBuildNumber, String downstreamJobName, int downstreamBuildNumber) { + LOGGER.log( + Level.FINE, + "recordBuildUpstreamCause(upstreamBuild: {0}#{1}, downstreamBuild: {2}#{3})", + new Object[] {upstreamJobName, upstreamBuildNumber, downstreamJobName, downstreamBuildNumber}); try (Connection cnn = ds.getConnection()) { cnn.setAutoCommit(false); - String sql = "insert into JENKINS_BUILD_UPSTREAM_CAUSE (upstream_build_id, downstream_build_id) values (?, ?)"; + String sql = + "insert into JENKINS_BUILD_UPSTREAM_CAUSE (upstream_build_id, downstream_build_id) values (?, ?)"; long upstreamBuildPrimaryKey = getOrCreateBuildPrimaryKey(upstreamJobName, upstreamBuildNumber); long downstreamBuildPrimaryKey = getOrCreateBuildPrimaryKey(downstreamJobName, downstreamBuildNumber); @@ -558,28 +649,32 @@ public void recordBuildUpstreamCause(String upstreamJobName, int upstreamBuildNu int rowCount = stmt.executeUpdate(); if (rowCount != 1) { - LOGGER.log(Level.INFO, "More/less ({0}) than 1 record inserted in JENKINS_BUILD_UPSTREAM_CAUSE for upstreamBuild: {1}#{2}, downstreamBuild: {3}#{4}", - new Object[]{rowCount, upstreamJobName, upstreamBuildNumber, downstreamJobName, downstreamBuildNumber}); + LOGGER.log( + Level.INFO, + "More/less ({0}) than 1 record inserted in JENKINS_BUILD_UPSTREAM_CAUSE for upstreamBuild: {1}#{2}, downstreamBuild: {3}#{4}", + new Object[] { + rowCount, upstreamJobName, upstreamBuildNumber, downstreamJobName, downstreamBuildNumber + }); } } cnn.commit(); } catch (SQLException e) { throw new RuntimeSqlException(e); } - } @Override public void renameJob(String oldFullName, String newFullName) { - LOGGER.log(Level.FINER, "renameJob({0}, {1})", new Object[]{oldFullName, newFullName}); + LOGGER.log(Level.FINER, "renameJob({0}, {1})", new Object[] {oldFullName, newFullName}); try (Connection cnn = ds.getConnection()) { cnn.setAutoCommit(false); - try (PreparedStatement stmt = cnn.prepareStatement("UPDATE JENKINS_JOB SET FULL_NAME = ? WHERE FULL_NAME = ? AND JENKINS_MASTER_ID = ?")) { + try (PreparedStatement stmt = cnn.prepareStatement( + "UPDATE JENKINS_JOB SET FULL_NAME = ? WHERE FULL_NAME = ? AND JENKINS_MASTER_ID = ?")) { stmt.setString(1, newFullName); stmt.setString(2, oldFullName); stmt.setLong(3, getJenkinsMasterPrimaryKey(cnn)); int count = stmt.executeUpdate(); - LOGGER.log(Level.FINE, "renameJob({0}, {1}): {2}", new Object[]{oldFullName, newFullName, count}); + LOGGER.log(Level.FINE, "renameJob({0}, {1}): {2}", new Object[] {oldFullName, newFullName, count}); } cnn.commit(); } catch (SQLException e) { @@ -589,14 +684,15 @@ public void renameJob(String oldFullName, String newFullName) { @Override public void deleteJob(String jobFullName) { - LOGGER.log(Level.FINER, "deleteJob({0})", new Object[]{jobFullName}); + LOGGER.log(Level.FINER, "deleteJob({0})", new Object[] {jobFullName}); try (Connection cnn = ds.getConnection()) { cnn.setAutoCommit(false); - try (PreparedStatement stmt = cnn.prepareStatement("DELETE FROM JENKINS_JOB WHERE FULL_NAME = ? AND JENKINS_MASTER_ID = ?")) { + try (PreparedStatement stmt = + cnn.prepareStatement("DELETE FROM JENKINS_JOB WHERE FULL_NAME = ? AND JENKINS_MASTER_ID = ?")) { stmt.setString(1, jobFullName); stmt.setLong(2, getJenkinsMasterPrimaryKey(cnn)); int count = stmt.executeUpdate(); - LOGGER.log(Level.FINE, "deleteJob({0}): {1}", new Object[]{jobFullName, count}); + LOGGER.log(Level.FINE, "deleteJob({0}): {1}", new Object[] {jobFullName, count}); } cnn.commit(); } catch (SQLException e) { @@ -606,13 +702,14 @@ public void deleteJob(String jobFullName) { @Override public void deleteBuild(String jobFullName, int buildNumber) { - LOGGER.log(Level.FINER, "deleteBuild({0}#{1})", new Object[]{jobFullName, buildNumber}); + LOGGER.log(Level.FINER, "deleteBuild({0}#{1})", new Object[] {jobFullName, buildNumber}); try (Connection cnn = ds.getConnection()) { cnn.setAutoCommit(false); Long jobPrimaryKey; Integer lastBuildNumber; Integer lastSuccessfulBuildNumber; - try (PreparedStatement stmt = cnn.prepareStatement("SELECT ID, LAST_BUILD_NUMBER, LAST_SUCCESSFUL_BUILD_NUMBER FROM JENKINS_JOB WHERE FULL_NAME = ? AND JENKINS_MASTER_ID = ?")) { + try (PreparedStatement stmt = cnn.prepareStatement( + "SELECT ID, LAST_BUILD_NUMBER, LAST_SUCCESSFUL_BUILD_NUMBER FROM JENKINS_JOB WHERE FULL_NAME = ? AND JENKINS_MASTER_ID = ?")) { stmt.setString(1, jobFullName); stmt.setLong(2, getJenkinsMasterPrimaryKey(cnn)); try (ResultSet rst = stmt.executeQuery()) { @@ -628,15 +725,17 @@ public void deleteBuild(String jobFullName, int buildNumber) { } } if (jobPrimaryKey == null) { - LOGGER.log(Level.FINE, "No record found for job {0}", new Object[]{jobFullName}); + LOGGER.log(Level.FINE, "No record found for job {0}", new Object[] {jobFullName}); return; } if (buildNumber == lastBuildNumber || buildNumber == lastSuccessfulBuildNumber) { Integer newLastBuildNumber = (lastBuildNumber == buildNumber) ? null : lastBuildNumber; - Integer newLastSuccessfulBuildNumber = (lastSuccessfulBuildNumber == buildNumber) ? null : lastSuccessfulBuildNumber; + Integer newLastSuccessfulBuildNumber = + (lastSuccessfulBuildNumber == buildNumber) ? null : lastSuccessfulBuildNumber; - try (PreparedStatement stmt = cnn.prepareStatement("SELECT JENKINS_BUILD.number, JENKINS_BUILD.result_id FROM JENKINS_BUILD WHERE JOB_ID = ? AND NUMBER != ? ORDER BY NUMBER DESC")) { + try (PreparedStatement stmt = cnn.prepareStatement( + "SELECT JENKINS_BUILD.number, JENKINS_BUILD.result_id FROM JENKINS_BUILD WHERE JOB_ID = ? AND NUMBER != ? ORDER BY NUMBER DESC")) { stmt.setLong(1, jobPrimaryKey); stmt.setInt(2, buildNumber); stmt.setFetchSize(5); @@ -645,18 +744,20 @@ public void deleteBuild(String jobFullName, int buildNumber) { int currentBuildNumber = rst.getInt("number"); int currentBuildResultId = rst.getInt("result_id"); - if(newLastBuildNumber == null) { + if (newLastBuildNumber == null) { newLastBuildNumber = currentBuildNumber; } - if (newLastSuccessfulBuildNumber == null && Result.SUCCESS.ordinal == currentBuildResultId) { + if (newLastSuccessfulBuildNumber == null + && Result.SUCCESS.ordinal == currentBuildResultId) { newLastSuccessfulBuildNumber = currentBuildNumber; } } } } - try(PreparedStatement stmt = cnn.prepareStatement("UPDATE JENKINS_JOB SET LAST_BUILD_NUMBER = ?, LAST_SUCCESSFUL_BUILD_NUMBER = ? WHERE ID = ?")) { + try (PreparedStatement stmt = cnn.prepareStatement( + "UPDATE JENKINS_JOB SET LAST_BUILD_NUMBER = ?, LAST_SUCCESSFUL_BUILD_NUMBER = ? WHERE ID = ?")) { stmt.setInt(1, newLastBuildNumber); stmt.setInt(2, newLastSuccessfulBuildNumber); stmt.setLong(3, jobPrimaryKey); @@ -664,11 +765,12 @@ public void deleteBuild(String jobFullName, int buildNumber) { } } - try (PreparedStatement stmt = cnn.prepareStatement("DELETE FROM JENKINS_BUILD WHERE JOB_ID = ? AND NUMBER = ?")) { + try (PreparedStatement stmt = + cnn.prepareStatement("DELETE FROM JENKINS_BUILD WHERE JOB_ID = ? AND NUMBER = ?")) { stmt.setLong(1, jobPrimaryKey); stmt.setInt(2, buildNumber); int count = stmt.executeUpdate(); - LOGGER.log(Level.FINE, "deleteJob({0}#{1}): {2}", new Object[]{jobFullName, buildNumber, count}); + LOGGER.log(Level.FINE, "deleteJob({0}#{1}): {2}", new Object[] {jobFullName, buildNumber, count}); } cnn.commit(); } catch (SQLException e) { @@ -680,10 +782,11 @@ public void deleteBuild(String jobFullName, int buildNumber) { public void cleanup() { try (Connection cnn = ds.getConnection()) { cnn.setAutoCommit(false); - String sql = "DELETE FROM MAVEN_ARTIFACT WHERE ID NOT IN (SELECT DISTINCT ARTIFACT_ID FROM MAVEN_DEPENDENCY UNION SELECT DISTINCT ARTIFACT_ID FROM GENERATED_MAVEN_ARTIFACT)"; + String sql = + "DELETE FROM MAVEN_ARTIFACT WHERE ID NOT IN (SELECT DISTINCT ARTIFACT_ID FROM MAVEN_DEPENDENCY UNION SELECT DISTINCT ARTIFACT_ID FROM GENERATED_MAVEN_ARTIFACT)"; try (Statement stmt = cnn.createStatement()) { int count = stmt.executeUpdate(sql); - LOGGER.log(Level.FINE, "cleanup(): {0}", new Object[]{count}); + LOGGER.log(Level.FINE, "cleanup(): {0}", new Object[] {count}); } cnn.commit(); } catch (SQLException e) { @@ -696,7 +799,8 @@ protected synchronized long getOrCreateBuildPrimaryKey(String jobFullName, int b cnn.setAutoCommit(false); Long jobPrimaryKey = null; - try (PreparedStatement stmt = cnn.prepareStatement("SELECT ID FROM JENKINS_JOB WHERE FULL_NAME = ? AND JENKINS_MASTER_ID = ?")) { + try (PreparedStatement stmt = + cnn.prepareStatement("SELECT ID FROM JENKINS_JOB WHERE FULL_NAME = ? AND JENKINS_MASTER_ID = ?")) { stmt.setString(1, jobFullName); stmt.setLong(2, getJenkinsMasterPrimaryKey(cnn)); try (ResultSet rst = stmt.executeQuery()) { @@ -706,7 +810,9 @@ protected synchronized long getOrCreateBuildPrimaryKey(String jobFullName, int b } } if (jobPrimaryKey == null) { - try (PreparedStatement stmt = cnn.prepareStatement("INSERT INTO JENKINS_JOB(FULL_NAME, JENKINS_MASTER_ID) VALUES (?, ?)", Statement.RETURN_GENERATED_KEYS)) { + try (PreparedStatement stmt = cnn.prepareStatement( + "INSERT INTO JENKINS_JOB(FULL_NAME, JENKINS_MASTER_ID) VALUES (?, ?)", + Statement.RETURN_GENERATED_KEYS)) { stmt.setString(1, jobFullName); stmt.setLong(2, getJenkinsMasterPrimaryKey(cnn)); stmt.execute(); @@ -714,7 +820,8 @@ protected synchronized long getOrCreateBuildPrimaryKey(String jobFullName, int b } } Long buildPrimaryKey = null; - try (PreparedStatement stmt = cnn.prepareStatement("SELECT ID FROM JENKINS_BUILD WHERE JOB_ID=? AND NUMBER=?")) { + try (PreparedStatement stmt = + cnn.prepareStatement("SELECT ID FROM JENKINS_BUILD WHERE JOB_ID=? AND NUMBER=?")) { stmt.setLong(1, jobPrimaryKey); stmt.setInt(2, buildNumber); try (ResultSet rst = stmt.executeQuery()) { @@ -725,7 +832,8 @@ protected synchronized long getOrCreateBuildPrimaryKey(String jobFullName, int b } if (buildPrimaryKey == null) { - try (PreparedStatement stmt = cnn.prepareStatement("INSERT INTO JENKINS_BUILD(JOB_ID, NUMBER) VALUES (?, ?)", Statement.RETURN_GENERATED_KEYS)) { + try (PreparedStatement stmt = cnn.prepareStatement( + "INSERT INTO JENKINS_BUILD(JOB_ID, NUMBER) VALUES (?, ?)", Statement.RETURN_GENERATED_KEYS)) { stmt.setLong(1, jobPrimaryKey); stmt.setInt(2, buildNumber); stmt.execute(); @@ -751,14 +859,21 @@ protected Long getGeneratedPrimaryKey(PreparedStatement stmt, String column) thr return jobPrimaryKey; } - protected long getOrCreateArtifactPrimaryKey(@NonNull String groupId, @NonNull String artifactId, @NonNull String version, @NonNull String type, @Nullable String classifier) { + protected long getOrCreateArtifactPrimaryKey( + @NonNull String groupId, + @NonNull String artifactId, + @NonNull String version, + @NonNull String type, + @Nullable String classifier) { try (Connection cnn = ds.getConnection()) { cnn.setAutoCommit(false); // get or create build record Long artifactPrimaryKey = null; if (classifier == null) { - // For an unknown reason, "where classifier = null" does not work as expected when "where classifier is null" does - try (PreparedStatement stmt = cnn.prepareStatement("SELECT ID FROM MAVEN_ARTIFACT WHERE GROUP_ID = ? AND ARTIFACT_ID = ? AND VERSION = ? AND TYPE = ? AND CLASSIFIER is NULL")) { + // For an unknown reason, "where classifier = null" does not work as expected when "where classifier is + // null" does + try (PreparedStatement stmt = cnn.prepareStatement( + "SELECT ID FROM MAVEN_ARTIFACT WHERE GROUP_ID = ? AND ARTIFACT_ID = ? AND VERSION = ? AND TYPE = ? AND CLASSIFIER is NULL")) { stmt.setString(1, groupId); stmt.setString(2, artifactId); stmt.setString(3, version); @@ -771,7 +886,8 @@ protected long getOrCreateArtifactPrimaryKey(@NonNull String groupId, @NonNull S } } } else { - try (PreparedStatement stmt = cnn.prepareStatement("SELECT ID FROM MAVEN_ARTIFACT WHERE GROUP_ID = ? AND ARTIFACT_ID = ? AND VERSION = ? AND TYPE = ? AND CLASSIFIER = ?")) { + try (PreparedStatement stmt = cnn.prepareStatement( + "SELECT ID FROM MAVEN_ARTIFACT WHERE GROUP_ID = ? AND ARTIFACT_ID = ? AND VERSION = ? AND TYPE = ? AND CLASSIFIER = ?")) { stmt.setString(1, groupId); stmt.setString(2, artifactId); stmt.setString(3, version); @@ -787,7 +903,9 @@ protected long getOrCreateArtifactPrimaryKey(@NonNull String groupId, @NonNull S } if (artifactPrimaryKey == null) { - try (PreparedStatement stmt = cnn.prepareStatement("INSERT INTO MAVEN_ARTIFACT(GROUP_ID, ARTIFACT_ID, VERSION, TYPE, CLASSIFIER) VALUES (?, ?, ?, ?, ?)", Statement.RETURN_GENERATED_KEYS)) { + try (PreparedStatement stmt = cnn.prepareStatement( + "INSERT INTO MAVEN_ARTIFACT(GROUP_ID, ARTIFACT_ID, VERSION, TYPE, CLASSIFIER) VALUES (?, ?, ?, ?, ?)", + Statement.RETURN_GENERATED_KEYS)) { stmt.setString(1, groupId); stmt.setString(2, artifactId); stmt.setString(3, version); @@ -810,7 +928,9 @@ protected synchronized void initializeDatabase() { cnn.setAutoCommit(false); int initialSchemaVersion = getSchemaVersion(cnn); - LOGGER.log(Level.FINE, "Initialise database. Current schema version: {0}", new Object[]{initialSchemaVersion}); + LOGGER.log( + Level.FINE, "Initialise database. Current schema version: {0}", new Object[] {initialSchemaVersion + }); NumberFormat numberFormat = new DecimalFormat("00"); int idx = initialSchemaVersion; @@ -837,20 +957,27 @@ protected synchronized void initializeDatabase() { LOGGER.log(Level.FINER, "Execute command {0}", sqlCommand); stmt.execute(sqlCommand); } catch (SQLException e) { - LOGGER.log(Level.SEVERE, "Failed to run SQL {0} from script {1}: {2}", new Object[] {sqlCommand, sqlScriptPath, e.getMessage()}); + LOGGER.log(Level.SEVERE, "Failed to run SQL {0} from script {1}: {2}", new Object[] { + sqlCommand, sqlScriptPath, e.getMessage() + }); handleDatabaseInitialisationException(e); } } } - String className = "org.jenkinsci.plugins.pipeline.maven.db.migration." + getJdbcScheme() + ".MigrationStep" + idx; + String className = "org.jenkinsci.plugins.pipeline.maven.db.migration." + getJdbcScheme() + + ".MigrationStep" + idx; try { - MigrationStep migrationStep = (MigrationStep) Class.forName(className).newInstance(); - LOGGER.log(Level.FINE, "Execute database migration step {0}", migrationStep.getClass().getName()); + MigrationStep migrationStep = + (MigrationStep) Class.forName(className).newInstance(); + LOGGER.log( + Level.FINE, + "Execute database migration step {0}", + migrationStep.getClass().getName()); migrationStep.execute(cnn, getJenkinsDetails()); } catch (ClassNotFoundException e) { // no migration class found, just a migration script - LOGGER.log(Level.FINER, "Migration step {0} not found", new Object[]{className}); + LOGGER.log(Level.FINER, "Migration step {0} not found", new Object[] {className}); } catch (Exception e) { cnn.rollback(); throw new RuntimeException(e); @@ -862,15 +989,18 @@ protected synchronized void initializeDatabase() { if (newSchemaVersion == 0) { // https://issues.jenkins-ci.org/browse/JENKINS-46577 - throw new IllegalStateException("Failure to load database DDL files. " + - "Files 'sql/" + getJdbcScheme() + "/xxx_migration.sql' NOT found in the Thread Context Class Loader. " + - " Pipeline Maven Plugin may be installed in an unsupported manner " + - "(thread.contextClassLoader: " + Thread.currentThread().getContextClassLoader() + ", " + throw new IllegalStateException("Failure to load database DDL files. " + "Files 'sql/" + + getJdbcScheme() + "/xxx_migration.sql' NOT found in the Thread Context Class Loader. " + + " Pipeline Maven Plugin may be installed in an unsupported manner " + + "(thread.contextClassLoader: " + + Thread.currentThread().getContextClassLoader() + ", " + "classLoader: " + ClassUtils.class.getClassLoader() + ")"); } else if (newSchemaVersion == initialSchemaVersion) { // no migration was needed } else { - LOGGER.log(Level.INFO, "Database successfully migrated from version {0} to version {1}", new Object[]{initialSchemaVersion, newSchemaVersion}); + LOGGER.log(Level.INFO, "Database successfully migrated from version {0} to version {1}", new Object[] { + initialSchemaVersion, newSchemaVersion + }); } } catch (SQLException e) { throw new RuntimeSqlException(e); @@ -920,13 +1050,19 @@ protected int getSchemaVersion(Connection cnn) throws SQLException { */ protected synchronized void testDatabase() throws RuntimeSqlException { try (Connection cnn = ds.getConnection()) { - List tables = Arrays.asList("MAVEN_ARTIFACT", "JENKINS_JOB", "JENKINS_BUILD", "MAVEN_DEPENDENCY", "GENERATED_MAVEN_ARTIFACT", "MAVEN_PARENT_PROJECT"); + List tables = Arrays.asList( + "MAVEN_ARTIFACT", + "JENKINS_JOB", + "JENKINS_BUILD", + "MAVEN_DEPENDENCY", + "GENERATED_MAVEN_ARTIFACT", + "MAVEN_PARENT_PROJECT"); for (String table : tables) { try (Statement stmt = cnn.createStatement()) { try (ResultSet rst = stmt.executeQuery("SELECT count(*) FROM " + table)) { if (rst.next()) { int count = rst.getInt(1); - LOGGER.log(Level.FINE, "Table {0}: {1} rows", new Object[]{table, count}); + LOGGER.log(Level.FINE, "Table {0}: {1} rows", new Object[] {table, count}); } else { throw new IllegalStateException("Exception testing table '" + table + "'"); } @@ -954,15 +1090,25 @@ public List listDownstreamJobs(@NonNull String jobFullName, int buildNum @NonNull @Override - public Map> listDownstreamJobsByArtifact(@NonNull String jobFullName, int buildNumber) { - Map> downstreamJobsByArtifactBasedOnMavenDependencies = listDownstreamJobsByArtifactBasedOnMavenDependencies(jobFullName, buildNumber); - LOGGER.log(Level.FINER, "Got downstreamJobsByArtifactBasedOnMavenDependencies for job named {0} and build #{1}: {2}", new Object[]{jobFullName, buildNumber, downstreamJobsByArtifactBasedOnMavenDependencies}); - Map> downstreamJobsByArtifactBasedOnParentProjectDependencies = listDownstreamJobsByArtifactBasedOnParentProjectDependencies(jobFullName, buildNumber); - LOGGER.log(Level.FINER, "Got downstreamJobsByArtifactBasedOnParentProjectDependencies for job named {0} and build #{1}: {2}", new Object[]{jobFullName, buildNumber, downstreamJobsByArtifactBasedOnParentProjectDependencies}); + public Map> listDownstreamJobsByArtifact( + @NonNull String jobFullName, int buildNumber) { + Map> downstreamJobsByArtifactBasedOnMavenDependencies = + listDownstreamJobsByArtifactBasedOnMavenDependencies(jobFullName, buildNumber); + LOGGER.log( + Level.FINER, + "Got downstreamJobsByArtifactBasedOnMavenDependencies for job named {0} and build #{1}: {2}", + new Object[] {jobFullName, buildNumber, downstreamJobsByArtifactBasedOnMavenDependencies}); + Map> downstreamJobsByArtifactBasedOnParentProjectDependencies = + listDownstreamJobsByArtifactBasedOnParentProjectDependencies(jobFullName, buildNumber); + LOGGER.log( + Level.FINER, + "Got downstreamJobsByArtifactBasedOnParentProjectDependencies for job named {0} and build #{1}: {2}", + new Object[] {jobFullName, buildNumber, downstreamJobsByArtifactBasedOnParentProjectDependencies}); Map> results = new HashMap<>(downstreamJobsByArtifactBasedOnMavenDependencies); - for(Entry> entry: downstreamJobsByArtifactBasedOnParentProjectDependencies.entrySet()) { + for (Entry> entry : + downstreamJobsByArtifactBasedOnParentProjectDependencies.entrySet()) { MavenArtifact mavenArtifact = entry.getKey(); if (results.containsKey(mavenArtifact)) { results.get(mavenArtifact).addAll(entry.getValue()); @@ -970,16 +1116,22 @@ public Map> listDownstreamJobsByArtifact(@NonNu results.put(mavenArtifact, new TreeSet<>(entry.getValue())); } } - LOGGER.log(Level.FINER, "Got results for job named {0} and build #{1}: {2}", new Object[]{jobFullName, buildNumber, results}); + LOGGER.log(Level.FINER, "Got results for job named {0} and build #{1}: {2}", new Object[] { + jobFullName, buildNumber, results + }); // JENKINS-50507 Don't return the passed job in case of pipelines consuming the artifacts they produce - for (Iterator>> it = results.entrySet().iterator(); it.hasNext();) { + for (Iterator>> it = + results.entrySet().iterator(); + it.hasNext(); ) { Entry> entry = it.next(); MavenArtifact mavenArtifact = entry.getKey(); SortedSet jobs = entry.getValue(); boolean removed = jobs.remove(jobFullName); if (removed) { - LOGGER.log(Level.FINER, "Remove {0} from downstreamJobs of artifact {1}", new Object[]{jobFullName, mavenArtifact}); + LOGGER.log(Level.FINER, "Remove {0} from downstreamJobs of artifact {1}", new Object[] { + jobFullName, mavenArtifact + }); if (jobs.isEmpty()) { it.remove(); } @@ -991,24 +1143,37 @@ public Map> listDownstreamJobsByArtifact(@NonNu @NonNull @Override - public SortedSet listDownstreamJobs(@NonNull String groupId, @NonNull String artifactId, @NonNull String version, @Nullable String baseVersion, @NonNull String type, @Nullable String classifier) { - return listDownstreamPipelinesBasedOnMavenDependencies(groupId, artifactId, (baseVersion == null ? version : baseVersion), type, classifier); + public SortedSet listDownstreamJobs( + @NonNull String groupId, + @NonNull String artifactId, + @NonNull String version, + @Nullable String baseVersion, + @NonNull String type, + @Nullable String classifier) { + return listDownstreamPipelinesBasedOnMavenDependencies( + groupId, artifactId, (baseVersion == null ? version : baseVersion), type, classifier); } - protected SortedSet listDownstreamPipelinesBasedOnMavenDependencies(@NonNull String groupId, @NonNull String artifactId, @NonNull String version, @NonNull String type, @Nullable String classifier) { - LOGGER.log(Level.FINER, "listDownstreamPipelinesBasedOnMavenDependencies({0}:{1}:{2}:{3}:{4})", new Object[]{groupId, artifactId, version, type, classifier}); - - String sql = "select distinct downstream_job.full_name \n" + - "from MAVEN_ARTIFACT \n" + - "inner join MAVEN_DEPENDENCY on (MAVEN_DEPENDENCY.artifact_id = MAVEN_ARTIFACT.id and MAVEN_DEPENDENCY.ignore_upstream_triggers = false) \n" + - "inner join JENKINS_BUILD as downstream_build on MAVEN_DEPENDENCY.build_id = downstream_build.id \n" + - "inner join JENKINS_JOB as downstream_job on (downstream_build.number = downstream_job.last_successful_build_number and downstream_build.job_id = downstream_job.id) \n" + - "where MAVEN_ARTIFACT.group_id = ? " + - "and MAVEN_ARTIFACT.artifact_id = ? " + - "and MAVEN_ARTIFACT.version = ? " + - "and MAVEN_ARTIFACT.type = ? " + - "and (MAVEN_ARTIFACT.classifier = ? or (MAVEN_ARTIFACT.classifier is null and ? is null)) " + - "and downstream_job.jenkins_master_id = ?"; + protected SortedSet listDownstreamPipelinesBasedOnMavenDependencies( + @NonNull String groupId, + @NonNull String artifactId, + @NonNull String version, + @NonNull String type, + @Nullable String classifier) { + LOGGER.log(Level.FINER, "listDownstreamPipelinesBasedOnMavenDependencies({0}:{1}:{2}:{3}:{4})", new Object[] { + groupId, artifactId, version, type, classifier + }); + + String sql = "select distinct downstream_job.full_name \n" + "from MAVEN_ARTIFACT \n" + + "inner join MAVEN_DEPENDENCY on (MAVEN_DEPENDENCY.artifact_id = MAVEN_ARTIFACT.id and MAVEN_DEPENDENCY.ignore_upstream_triggers = false) \n" + + "inner join JENKINS_BUILD as downstream_build on MAVEN_DEPENDENCY.build_id = downstream_build.id \n" + + "inner join JENKINS_JOB as downstream_job on (downstream_build.number = downstream_job.last_successful_build_number and downstream_build.job_id = downstream_job.id) \n" + + "where MAVEN_ARTIFACT.group_id = ? " + + "and MAVEN_ARTIFACT.artifact_id = ? " + + "and MAVEN_ARTIFACT.version = ? " + + "and MAVEN_ARTIFACT.type = ? " + + "and (MAVEN_ARTIFACT.classifier = ? or (MAVEN_ARTIFACT.classifier is null and ? is null)) " + + "and downstream_job.jenkins_master_id = ?"; SortedSet downstreamJobsFullNames = new TreeSet<>(); @@ -1030,27 +1195,31 @@ protected SortedSet listDownstreamPipelinesBasedOnMavenDependencies(@Non } catch (SQLException e) { throw new RuntimeSqlException(e); } - LOGGER.log(Level.FINER, "listDownstreamPipelinesBasedOnMavenDependencies({0}:{1}:{2}:{3}): {4}", new Object[]{groupId, artifactId, version, type, downstreamJobsFullNames}); + LOGGER.log(Level.FINER, "listDownstreamPipelinesBasedOnMavenDependencies({0}:{1}:{2}:{3}): {4}", new Object[] { + groupId, artifactId, version, type, downstreamJobsFullNames + }); return downstreamJobsFullNames; } - @Deprecated - protected List listDownstreamPipelinesBasedOnMavenDependencies(@NonNull String jobFullName, int buildNumber) { - LOGGER.log(Level.FINER, "listDownstreamJobs({0}, {1})", new Object[]{jobFullName, buildNumber}); - - String sql = "select distinct downstream_job.full_name \n" + - "from JENKINS_JOB as upstream_job \n" + - "inner join JENKINS_BUILD as upstream_build on upstream_job.id = upstream_build.job_id \n" + - "inner join GENERATED_MAVEN_ARTIFACT on (upstream_build.id = GENERATED_MAVEN_ARTIFACT.build_id and GENERATED_MAVEN_ARTIFACT.skip_downstream_triggers = false) \n" + - "inner join MAVEN_ARTIFACT on GENERATED_MAVEN_ARTIFACT.artifact_id = MAVEN_ARTIFACT.id \n" + - "inner join MAVEN_DEPENDENCY on (MAVEN_DEPENDENCY.artifact_id = MAVEN_ARTIFACT.id and MAVEN_DEPENDENCY.ignore_upstream_triggers = false) \n" + - "inner join JENKINS_BUILD as downstream_build on MAVEN_DEPENDENCY.build_id = downstream_build.id \n" + - "inner join JENKINS_JOB as downstream_job on (downstream_build.number = downstream_job.last_successful_build_number and downstream_build.job_id = downstream_job.id) \n" + - "where upstream_job.full_name = ? and upstream_job.jenkins_master_id = ? and upstream_build.number = ? and downstream_job.jenkins_master_id = ?"; + @Deprecated + protected List listDownstreamPipelinesBasedOnMavenDependencies( + @NonNull String jobFullName, int buildNumber) { + LOGGER.log(Level.FINER, "listDownstreamJobs({0}, {1})", new Object[] {jobFullName, buildNumber}); + + String sql = "select distinct downstream_job.full_name \n" + "from JENKINS_JOB as upstream_job \n" + + "inner join JENKINS_BUILD as upstream_build on upstream_job.id = upstream_build.job_id \n" + + "inner join GENERATED_MAVEN_ARTIFACT on (upstream_build.id = GENERATED_MAVEN_ARTIFACT.build_id and GENERATED_MAVEN_ARTIFACT.skip_downstream_triggers = false) \n" + + "inner join MAVEN_ARTIFACT on GENERATED_MAVEN_ARTIFACT.artifact_id = MAVEN_ARTIFACT.id \n" + + "inner join MAVEN_DEPENDENCY on (MAVEN_DEPENDENCY.artifact_id = MAVEN_ARTIFACT.id and MAVEN_DEPENDENCY.ignore_upstream_triggers = false) \n" + + "inner join JENKINS_BUILD as downstream_build on MAVEN_DEPENDENCY.build_id = downstream_build.id \n" + + "inner join JENKINS_JOB as downstream_job on (downstream_build.number = downstream_job.last_successful_build_number and downstream_build.job_id = downstream_job.id) \n" + + "where upstream_job.full_name = ? and upstream_job.jenkins_master_id = ? and upstream_build.number = ? and downstream_job.jenkins_master_id = ?"; List downstreamJobsFullNames = new ArrayList<>(); - LOGGER.log(Level.FINER, "sql: {0}, jobFullName:{1}, buildNumber: {2}", new Object[]{sql, jobFullName, buildNumber}); + LOGGER.log( + Level.FINER, "sql: {0}, jobFullName:{1}, buildNumber: {2}", new Object[] {sql, jobFullName, buildNumber + }); try (Connection cnn = ds.getConnection()) { try (PreparedStatement stmt = cnn.prepareStatement(sql)) { @@ -1067,28 +1236,34 @@ protected List listDownstreamPipelinesBasedOnMavenDependencies(@NonNull } catch (SQLException e) { throw new RuntimeSqlException(e); } - LOGGER.log(Level.FINE, "listDownstreamJobs({0}, {1}): {2}", new Object[]{jobFullName, buildNumber, downstreamJobsFullNames}); + LOGGER.log(Level.FINE, "listDownstreamJobs({0}, {1}): {2}", new Object[] { + jobFullName, buildNumber, downstreamJobsFullNames + }); return downstreamJobsFullNames; } - protected Map> listDownstreamJobsByArtifactBasedOnMavenDependencies(@NonNull String jobFullName, int buildNumber) { - LOGGER.log(Level.FINER, "listDownstreamJobsByArtifactBasedOnMavenDependencies({0}, {1})", new Object[]{jobFullName, buildNumber}); - - - String sql = "select distinct downstream_job.full_name, \n " + - " MAVEN_ARTIFACT.group_id, MAVEN_ARTIFACT.artifact_id, MAVEN_ARTIFACT.version as base_version, MAVEN_ARTIFACT.type, MAVEN_ARTIFACT.classifier, \n" + - " GENERATED_MAVEN_ARTIFACT.version as version, GENERATED_MAVEN_ARTIFACT.extension \n" + - "from JENKINS_JOB as upstream_job \n" + - "inner join JENKINS_BUILD as upstream_build on upstream_job.id = upstream_build.job_id \n" + - "inner join GENERATED_MAVEN_ARTIFACT on (upstream_build.id = GENERATED_MAVEN_ARTIFACT.build_id and GENERATED_MAVEN_ARTIFACT.skip_downstream_triggers = false) \n" + - "inner join MAVEN_ARTIFACT on GENERATED_MAVEN_ARTIFACT.artifact_id = MAVEN_ARTIFACT.id \n" + - "inner join MAVEN_DEPENDENCY on (MAVEN_DEPENDENCY.artifact_id = MAVEN_ARTIFACT.id and MAVEN_DEPENDENCY.ignore_upstream_triggers = false) \n" + - "inner join JENKINS_BUILD as downstream_build on MAVEN_DEPENDENCY.build_id = downstream_build.id \n" + - "inner join JENKINS_JOB as downstream_job on (downstream_build.number = downstream_job.last_successful_build_number and downstream_build.job_id = downstream_job.id) \n" + - "where upstream_job.full_name = ? and upstream_job.jenkins_master_id = ? and upstream_build.number = ? and downstream_job.jenkins_master_id = ?"; - - LOGGER.log(Level.FINER, "sql: {0}, jobFullName:{1}, buildNumber: {2}", new Object[]{sql, jobFullName, buildNumber}); + protected Map> listDownstreamJobsByArtifactBasedOnMavenDependencies( + @NonNull String jobFullName, int buildNumber) { + LOGGER.log(Level.FINER, "listDownstreamJobsByArtifactBasedOnMavenDependencies({0}, {1})", new Object[] { + jobFullName, buildNumber + }); + + String sql = "select distinct downstream_job.full_name, \n " + + " MAVEN_ARTIFACT.group_id, MAVEN_ARTIFACT.artifact_id, MAVEN_ARTIFACT.version as base_version, MAVEN_ARTIFACT.type, MAVEN_ARTIFACT.classifier, \n" + + " GENERATED_MAVEN_ARTIFACT.version as version, GENERATED_MAVEN_ARTIFACT.extension \n" + + "from JENKINS_JOB as upstream_job \n" + + "inner join JENKINS_BUILD as upstream_build on upstream_job.id = upstream_build.job_id \n" + + "inner join GENERATED_MAVEN_ARTIFACT on (upstream_build.id = GENERATED_MAVEN_ARTIFACT.build_id and GENERATED_MAVEN_ARTIFACT.skip_downstream_triggers = false) \n" + + "inner join MAVEN_ARTIFACT on GENERATED_MAVEN_ARTIFACT.artifact_id = MAVEN_ARTIFACT.id \n" + + "inner join MAVEN_DEPENDENCY on (MAVEN_DEPENDENCY.artifact_id = MAVEN_ARTIFACT.id and MAVEN_DEPENDENCY.ignore_upstream_triggers = false) \n" + + "inner join JENKINS_BUILD as downstream_build on MAVEN_DEPENDENCY.build_id = downstream_build.id \n" + + "inner join JENKINS_JOB as downstream_job on (downstream_build.number = downstream_job.last_successful_build_number and downstream_build.job_id = downstream_job.id) \n" + + "where upstream_job.full_name = ? and upstream_job.jenkins_master_id = ? and upstream_build.number = ? and downstream_job.jenkins_master_id = ?"; + + LOGGER.log( + Level.FINER, "sql: {0}, jobFullName:{1}, buildNumber: {2}", new Object[] {sql, jobFullName, buildNumber + }); Map> results = new HashMap<>(); try (Connection cnn = ds.getConnection()) { @@ -1116,27 +1291,32 @@ protected Map> listDownstreamJobsByArtifactBase } catch (SQLException e) { throw new RuntimeSqlException(e); } - LOGGER.log(Level.FINE, "listDownstreamJobsByArtifactBasedOnMavenDependencies({0}, {1}): {2}", new Object[]{jobFullName, buildNumber, results}); + LOGGER.log(Level.FINE, "listDownstreamJobsByArtifactBasedOnMavenDependencies({0}, {1}): {2}", new Object[] { + jobFullName, buildNumber, results + }); return results; } - @Deprecated - protected List listDownstreamPipelinesBasedOnParentProjectDependencies(@NonNull String jobFullName, int buildNumber) { - LOGGER.log(Level.FINER, "listDownstreamPipelinesBasedOnParentProjectDependencies({0}, {1})", new Object[]{jobFullName, buildNumber}); - String sql = "select distinct downstream_job.full_name \n" + - "from JENKINS_JOB as upstream_job \n" + - "inner join JENKINS_BUILD as upstream_build on upstream_job.id = upstream_build.job_id \n" + - "inner join GENERATED_MAVEN_ARTIFACT on (upstream_build.id = GENERATED_MAVEN_ARTIFACT.build_id and GENERATED_MAVEN_ARTIFACT.skip_downstream_triggers = false) \n" + - "inner join MAVEN_ARTIFACT on GENERATED_MAVEN_ARTIFACT.artifact_id = MAVEN_ARTIFACT.id \n" + - "inner join MAVEN_PARENT_PROJECT on (MAVEN_PARENT_PROJECT.artifact_id = MAVEN_ARTIFACT.id and MAVEN_PARENT_PROJECT.ignore_upstream_triggers = false) \n" + - "inner join JENKINS_BUILD as downstream_build on MAVEN_PARENT_PROJECT.build_id = downstream_build.id \n" + - "inner join JENKINS_JOB as downstream_job on (downstream_build.number = downstream_job.last_successful_build_number and downstream_build.job_id = downstream_job.id) \n" + - "where upstream_job.full_name = ? and upstream_job.jenkins_master_id = ? and upstream_build.number = ? and downstream_job.jenkins_master_id = ?"; + protected List listDownstreamPipelinesBasedOnParentProjectDependencies( + @NonNull String jobFullName, int buildNumber) { + LOGGER.log(Level.FINER, "listDownstreamPipelinesBasedOnParentProjectDependencies({0}, {1})", new Object[] { + jobFullName, buildNumber + }); + String sql = "select distinct downstream_job.full_name \n" + "from JENKINS_JOB as upstream_job \n" + + "inner join JENKINS_BUILD as upstream_build on upstream_job.id = upstream_build.job_id \n" + + "inner join GENERATED_MAVEN_ARTIFACT on (upstream_build.id = GENERATED_MAVEN_ARTIFACT.build_id and GENERATED_MAVEN_ARTIFACT.skip_downstream_triggers = false) \n" + + "inner join MAVEN_ARTIFACT on GENERATED_MAVEN_ARTIFACT.artifact_id = MAVEN_ARTIFACT.id \n" + + "inner join MAVEN_PARENT_PROJECT on (MAVEN_PARENT_PROJECT.artifact_id = MAVEN_ARTIFACT.id and MAVEN_PARENT_PROJECT.ignore_upstream_triggers = false) \n" + + "inner join JENKINS_BUILD as downstream_build on MAVEN_PARENT_PROJECT.build_id = downstream_build.id \n" + + "inner join JENKINS_JOB as downstream_job on (downstream_build.number = downstream_job.last_successful_build_number and downstream_build.job_id = downstream_job.id) \n" + + "where upstream_job.full_name = ? and upstream_job.jenkins_master_id = ? and upstream_build.number = ? and downstream_job.jenkins_master_id = ?"; List downstreamJobsFullNames = new ArrayList<>(); - LOGGER.log(Level.FINER, "sql: {0}, jobFullName:{1}, buildNumber: {2}", new Object[]{sql, jobFullName, buildNumber}); + LOGGER.log( + Level.FINER, "sql: {0}, jobFullName:{1}, buildNumber: {2}", new Object[] {sql, jobFullName, buildNumber + }); try (Connection cnn = ds.getConnection()) { try (PreparedStatement stmt = cnn.prepareStatement(sql)) { @@ -1153,27 +1333,33 @@ protected List listDownstreamPipelinesBasedOnParentProjectDependencies(@ } catch (SQLException e) { throw new RuntimeSqlException(e); } - LOGGER.log(Level.FINE, "listDownstreamPipelinesBasedOnParentProjectDependencies({0}, {1}): {2}", new Object[]{jobFullName, buildNumber, downstreamJobsFullNames}); + LOGGER.log(Level.FINE, "listDownstreamPipelinesBasedOnParentProjectDependencies({0}, {1}): {2}", new Object[] { + jobFullName, buildNumber, downstreamJobsFullNames + }); return downstreamJobsFullNames; } - - protected Map> listDownstreamJobsByArtifactBasedOnParentProjectDependencies(String jobFullName, int buildNumber) { - LOGGER.log(Level.FINER, "listDownstreamPipelinesBasedOnParentProjectDependencies({0}, {1})", new Object[]{jobFullName, buildNumber}); - String sql = "select distinct downstream_job.full_name, \n" + - " MAVEN_ARTIFACT.group_id, MAVEN_ARTIFACT.artifact_id, MAVEN_ARTIFACT.version as base_version, MAVEN_ARTIFACT.type, MAVEN_ARTIFACT.classifier, \n" + - " GENERATED_MAVEN_ARTIFACT.version as version, GENERATED_MAVEN_ARTIFACT.extension \n" + - "from JENKINS_JOB as upstream_job \n" + - "inner join JENKINS_BUILD as upstream_build on upstream_job.id = upstream_build.job_id \n" + - "inner join GENERATED_MAVEN_ARTIFACT on (upstream_build.id = GENERATED_MAVEN_ARTIFACT.build_id and GENERATED_MAVEN_ARTIFACT.skip_downstream_triggers = false) \n" + - "inner join MAVEN_ARTIFACT on GENERATED_MAVEN_ARTIFACT.artifact_id = MAVEN_ARTIFACT.id \n" + - "inner join MAVEN_PARENT_PROJECT on (MAVEN_PARENT_PROJECT.artifact_id = MAVEN_ARTIFACT.id and MAVEN_PARENT_PROJECT.ignore_upstream_triggers = false) \n" + - "inner join JENKINS_BUILD as downstream_build on MAVEN_PARENT_PROJECT.build_id = downstream_build.id \n" + - "inner join JENKINS_JOB as downstream_job on (downstream_build.number = downstream_job.last_successful_build_number and downstream_build.job_id = downstream_job.id) \n" + - "where upstream_job.full_name = ? and upstream_job.jenkins_master_id = ? and upstream_build.number = ? and downstream_job.jenkins_master_id = ?"; - - LOGGER.log(Level.FINER, "sql: {0}, jobFullName:{1}, buildNumber: {2}", new Object[]{sql, jobFullName, buildNumber}); + protected Map> listDownstreamJobsByArtifactBasedOnParentProjectDependencies( + String jobFullName, int buildNumber) { + LOGGER.log(Level.FINER, "listDownstreamPipelinesBasedOnParentProjectDependencies({0}, {1})", new Object[] { + jobFullName, buildNumber + }); + String sql = "select distinct downstream_job.full_name, \n" + + " MAVEN_ARTIFACT.group_id, MAVEN_ARTIFACT.artifact_id, MAVEN_ARTIFACT.version as base_version, MAVEN_ARTIFACT.type, MAVEN_ARTIFACT.classifier, \n" + + " GENERATED_MAVEN_ARTIFACT.version as version, GENERATED_MAVEN_ARTIFACT.extension \n" + + "from JENKINS_JOB as upstream_job \n" + + "inner join JENKINS_BUILD as upstream_build on upstream_job.id = upstream_build.job_id \n" + + "inner join GENERATED_MAVEN_ARTIFACT on (upstream_build.id = GENERATED_MAVEN_ARTIFACT.build_id and GENERATED_MAVEN_ARTIFACT.skip_downstream_triggers = false) \n" + + "inner join MAVEN_ARTIFACT on GENERATED_MAVEN_ARTIFACT.artifact_id = MAVEN_ARTIFACT.id \n" + + "inner join MAVEN_PARENT_PROJECT on (MAVEN_PARENT_PROJECT.artifact_id = MAVEN_ARTIFACT.id and MAVEN_PARENT_PROJECT.ignore_upstream_triggers = false) \n" + + "inner join JENKINS_BUILD as downstream_build on MAVEN_PARENT_PROJECT.build_id = downstream_build.id \n" + + "inner join JENKINS_JOB as downstream_job on (downstream_build.number = downstream_job.last_successful_build_number and downstream_build.job_id = downstream_job.id) \n" + + "where upstream_job.full_name = ? and upstream_job.jenkins_master_id = ? and upstream_build.number = ? and downstream_job.jenkins_master_id = ?"; + + LOGGER.log( + Level.FINER, "sql: {0}, jobFullName:{1}, buildNumber: {2}", new Object[] {sql, jobFullName, buildNumber + }); Map> results = new HashMap<>(); @@ -1202,7 +1388,10 @@ protected Map> listDownstreamJobsByArtifactBase } catch (SQLException e) { throw new RuntimeSqlException(e); } - LOGGER.log(Level.FINE, "listDownstreamJobsByArtifactBasedOnParentProjectDependencies({0}, {1}): {2}", new Object[]{jobFullName, buildNumber, results}); + LOGGER.log( + Level.FINE, + "listDownstreamJobsByArtifactBasedOnParentProjectDependencies({0}, {1}): {2}", + new Object[] {jobFullName, buildNumber, results}); return results; } @@ -1225,31 +1414,37 @@ public Map listUpstreamJobs(@NonNull String jobFullName, int bu * @param downstreamBuildNumber * @return */ - protected Map listUpstreamPipelinesBasedOnMavenDependencies(@NonNull String downstreamJobFullName, int downstreamBuildNumber) { - LOGGER.log(Level.FINER, "listUpstreamPipelinesBasedOnMavenDependencies({0}, {1})", new Object[]{downstreamJobFullName, downstreamBuildNumber}); + protected Map listUpstreamPipelinesBasedOnMavenDependencies( + @NonNull String downstreamJobFullName, int downstreamBuildNumber) { + LOGGER.log(Level.FINER, "listUpstreamPipelinesBasedOnMavenDependencies({0}, {1})", new Object[] { + downstreamJobFullName, downstreamBuildNumber + }); - // if we join JENKINS_JOB to the listUpstreamPipelinesBasedOnMavenDependencies query we get performance problems + // if we join JENKINS_JOB to the listUpstreamPipelinesBasedOnMavenDependencies query we get performance problems // in large setups with postgres. - // The analyzer does not use an index for JENKINS_JOB and uses a sequential scan in the query plan and + // The analyzer does not use an index for JENKINS_JOB and uses a sequential scan in the query plan and // the query needs some minutes to execute! // There is a workaround: you can give the query a hint that only one row is selected on JENKINS_JOB // I tried this out with Solution 4 of https://learnsql.com/blog/sql-join-only-first-row/ and it worked. // - // ... - // inner join JENKINS_BUILD as downstream_build on (MAVEN_DEPENDENCY.build_id = downstream_build.id and downstream_build.job_id = ( - // SELECT downstream_job.id FROM JENKINS_JOB as downstream_job + // ... + // inner join JENKINS_BUILD as downstream_build on (MAVEN_DEPENDENCY.build_id = downstream_build.id and + // downstream_build.job_id = ( + // SELECT downstream_job.id FROM JENKINS_JOB as downstream_job // WHERE downstream_job.full_name = ? and downstream_job.jenkins_master_id = ? // LIMIT 1)) // - // The LIMIT 1 gives the optimizer a hint that should not be necessary because it has a unique index on full_name and jenkins_master_id + // The LIMIT 1 gives the optimizer a hint that should not be necessary because it has a unique index on + // full_name and jenkins_master_id // // Problem: is LIMIT or a similar solutions supported by all databases? // Therefore i made a second query that reads the primaryKey of the matching JENKINS_JOB first. - // The second query does not need the problematic join on JENKINS_BUILD and performs very well. - + // The second query does not need the problematic join on JENKINS_BUILD and performs very well. + Long jobPrimaryKey; try (Connection cnn = ds.getConnection()) { - try (PreparedStatement stmt = cnn.prepareStatement("SELECT ID FROM JENKINS_JOB WHERE FULL_NAME = ? AND JENKINS_MASTER_ID = ?")) { + try (PreparedStatement stmt = + cnn.prepareStatement("SELECT ID FROM JENKINS_JOB WHERE FULL_NAME = ? AND JENKINS_MASTER_ID = ?")) { stmt.setString(1, downstreamJobFullName); stmt.setLong(2, getJenkinsMasterPrimaryKey(cnn)); try (ResultSet rst = stmt.executeQuery()) { @@ -1268,17 +1463,19 @@ protected Map listUpstreamPipelinesBasedOnMavenDependencies(@No return new HashMap<>(); } - String sql = "select distinct upstream_job.full_name, upstream_build.number\n" + - "from JENKINS_JOB as upstream_job\n" + - "inner join JENKINS_BUILD as upstream_build on (upstream_job.id = upstream_build.job_id and upstream_job.last_successful_build_number = upstream_build.number)\n" + - "inner join GENERATED_MAVEN_ARTIFACT on (upstream_build.id = GENERATED_MAVEN_ARTIFACT.build_id and GENERATED_MAVEN_ARTIFACT.skip_downstream_triggers = false)\n" + - "inner join MAVEN_ARTIFACT on GENERATED_MAVEN_ARTIFACT.artifact_id = MAVEN_ARTIFACT.id\n" + - "inner join MAVEN_DEPENDENCY on (MAVEN_DEPENDENCY.artifact_id = MAVEN_ARTIFACT.id and MAVEN_DEPENDENCY.ignore_upstream_triggers = false)\n" + - "inner join JENKINS_BUILD as downstream_build on MAVEN_DEPENDENCY.build_id = downstream_build.id\n" + - "where downstream_build.job_id = ? and downstream_build.number = ? and upstream_job.jenkins_master_id = ?"; + String sql = + "select distinct upstream_job.full_name, upstream_build.number\n" + "from JENKINS_JOB as upstream_job\n" + + "inner join JENKINS_BUILD as upstream_build on (upstream_job.id = upstream_build.job_id and upstream_job.last_successful_build_number = upstream_build.number)\n" + + "inner join GENERATED_MAVEN_ARTIFACT on (upstream_build.id = GENERATED_MAVEN_ARTIFACT.build_id and GENERATED_MAVEN_ARTIFACT.skip_downstream_triggers = false)\n" + + "inner join MAVEN_ARTIFACT on GENERATED_MAVEN_ARTIFACT.artifact_id = MAVEN_ARTIFACT.id\n" + + "inner join MAVEN_DEPENDENCY on (MAVEN_DEPENDENCY.artifact_id = MAVEN_ARTIFACT.id and MAVEN_DEPENDENCY.ignore_upstream_triggers = false)\n" + + "inner join JENKINS_BUILD as downstream_build on MAVEN_DEPENDENCY.build_id = downstream_build.id\n" + + "where downstream_build.job_id = ? and downstream_build.number = ? and upstream_job.jenkins_master_id = ?"; Map upstreamJobsFullNames = new HashMap<>(); - LOGGER.log(Level.FINER, "sql: {0}, jobFullName:{1}, buildNumber: {2}", new Object[]{sql, downstreamJobFullName, downstreamBuildNumber}); + LOGGER.log(Level.FINER, "sql: {0}, jobFullName:{1}, buildNumber: {2}", new Object[] { + sql, downstreamJobFullName, downstreamBuildNumber + }); try (Connection cnn = ds.getConnection()) { try (PreparedStatement stmt = cnn.prepareStatement(sql)) { @@ -1294,26 +1491,33 @@ protected Map listUpstreamPipelinesBasedOnMavenDependencies(@No } catch (SQLException e) { throw new RuntimeSqlException(e); } - LOGGER.log(Level.FINE, "listUpstreamPipelinesBasedOnMavenDependencies({0}, {1}): {2}", new Object[]{downstreamJobFullName, downstreamBuildNumber, upstreamJobsFullNames}); + LOGGER.log(Level.FINE, "listUpstreamPipelinesBasedOnMavenDependencies({0}, {1}): {2}", new Object[] { + downstreamJobFullName, downstreamBuildNumber, upstreamJobsFullNames + }); return upstreamJobsFullNames; } - protected Map listUpstreamPipelinesBasedOnParentProjectDependencies(@NonNull String downstreamJobFullName, int downstreamBuildNumber) { - LOGGER.log(Level.FINER, "listUpstreamPipelinesBasedOnParentProjectDependencies({0}, {1})", new Object[]{downstreamJobFullName, downstreamBuildNumber}); - - String sql = "select distinct upstream_job.full_name, upstream_build.number\n" + - "from JENKINS_JOB as upstream_job\n" + - "inner join JENKINS_BUILD as upstream_build on (upstream_job.id = upstream_build.job_id and upstream_job.last_successful_build_number = upstream_build.number)\n" + - "inner join GENERATED_MAVEN_ARTIFACT on (upstream_build.id = GENERATED_MAVEN_ARTIFACT.build_id and GENERATED_MAVEN_ARTIFACT.skip_downstream_triggers = false)\n" + - "inner join MAVEN_ARTIFACT on GENERATED_MAVEN_ARTIFACT.artifact_id = MAVEN_ARTIFACT.id\n" + - "inner join MAVEN_PARENT_PROJECT on (MAVEN_PARENT_PROJECT.artifact_id = MAVEN_ARTIFACT.id and MAVEN_PARENT_PROJECT.ignore_upstream_triggers = false)\n" + - "inner join JENKINS_BUILD as downstream_build on MAVEN_PARENT_PROJECT.build_id = downstream_build.id\n" + - "inner join JENKINS_JOB as downstream_job on downstream_build.job_id = downstream_job.id\n" + - "where downstream_job.full_name = ? and downstream_job.jenkins_master_id = ? and downstream_build.number = ? and upstream_job.jenkins_master_id = ?"; + protected Map listUpstreamPipelinesBasedOnParentProjectDependencies( + @NonNull String downstreamJobFullName, int downstreamBuildNumber) { + LOGGER.log(Level.FINER, "listUpstreamPipelinesBasedOnParentProjectDependencies({0}, {1})", new Object[] { + downstreamJobFullName, downstreamBuildNumber + }); + + String sql = + "select distinct upstream_job.full_name, upstream_build.number\n" + "from JENKINS_JOB as upstream_job\n" + + "inner join JENKINS_BUILD as upstream_build on (upstream_job.id = upstream_build.job_id and upstream_job.last_successful_build_number = upstream_build.number)\n" + + "inner join GENERATED_MAVEN_ARTIFACT on (upstream_build.id = GENERATED_MAVEN_ARTIFACT.build_id and GENERATED_MAVEN_ARTIFACT.skip_downstream_triggers = false)\n" + + "inner join MAVEN_ARTIFACT on GENERATED_MAVEN_ARTIFACT.artifact_id = MAVEN_ARTIFACT.id\n" + + "inner join MAVEN_PARENT_PROJECT on (MAVEN_PARENT_PROJECT.artifact_id = MAVEN_ARTIFACT.id and MAVEN_PARENT_PROJECT.ignore_upstream_triggers = false)\n" + + "inner join JENKINS_BUILD as downstream_build on MAVEN_PARENT_PROJECT.build_id = downstream_build.id\n" + + "inner join JENKINS_JOB as downstream_job on downstream_build.job_id = downstream_job.id\n" + + "where downstream_job.full_name = ? and downstream_job.jenkins_master_id = ? and downstream_build.number = ? and upstream_job.jenkins_master_id = ?"; Map upstreamJobsFullNames = new HashMap<>(); - LOGGER.log(Level.FINER, "sql: {0}, jobFullName:{1}, buildNumber: {2}", new Object[]{sql, downstreamJobFullName, downstreamBuildNumber}); + LOGGER.log(Level.FINER, "sql: {0}, jobFullName:{1}, buildNumber: {2}", new Object[] { + sql, downstreamJobFullName, downstreamBuildNumber + }); try (Connection cnn = ds.getConnection()) { try (PreparedStatement stmt = cnn.prepareStatement(sql)) { @@ -1330,23 +1534,31 @@ protected Map listUpstreamPipelinesBasedOnParentProjectDependen } catch (SQLException e) { throw new RuntimeSqlException(e); } - LOGGER.log(Level.FINE, "listUpstreamPipelinesBasedOnParentProjectDependencies({0}, {1}): {2}", new Object[]{downstreamJobFullName, downstreamBuildNumber, upstreamJobsFullNames}); + LOGGER.log(Level.FINE, "listUpstreamPipelinesBasedOnParentProjectDependencies({0}, {1}): {2}", new Object[] { + downstreamJobFullName, downstreamBuildNumber, upstreamJobsFullNames + }); return upstreamJobsFullNames; } @NonNull public Map listTransitiveUpstreamJobs(@NonNull String jobFullName, int buildNumber) { - UpstreamMemory upstreamMemory = new UpstreamMemory(); + UpstreamMemory upstreamMemory = new UpstreamMemory(); return listTransitiveUpstreamJobs(jobFullName, buildNumber, new HashMap<>(), 0, upstreamMemory); } @NonNull - public Map listTransitiveUpstreamJobs(@NonNull String jobFullName, int buildNumber, UpstreamMemory upstreamMemory) { + public Map listTransitiveUpstreamJobs( + @NonNull String jobFullName, int buildNumber, UpstreamMemory upstreamMemory) { return listTransitiveUpstreamJobs(jobFullName, buildNumber, new HashMap<>(), 0, upstreamMemory); } - private Map listTransitiveUpstreamJobs(@NonNull String jobFullName, int buildNumber, Map transitiveUpstreamBuilds, int recursionDepth, UpstreamMemory upstreamMemory) { + private Map listTransitiveUpstreamJobs( + @NonNull String jobFullName, + int buildNumber, + Map transitiveUpstreamBuilds, + int recursionDepth, + UpstreamMemory upstreamMemory) { Map upstreamBuilds = upstreamMemory.listUpstreamJobs(this, jobFullName, buildNumber); for (Entry upstreamBuild : upstreamBuilds.entrySet()) { String upstreamJobFullName = upstreamBuild.getKey(); @@ -1356,7 +1568,12 @@ private Map listTransitiveUpstreamJobs(@NonNull String jobFullN } else { transitiveUpstreamBuilds.put(upstreamJobFullName, upstreamBuildNumber); if (recursionDepth < OPTIMIZATION_MAX_RECURSION_DEPTH) { - listTransitiveUpstreamJobs(upstreamJobFullName, upstreamBuildNumber, transitiveUpstreamBuilds, recursionDepth++, upstreamMemory); + listTransitiveUpstreamJobs( + upstreamJobFullName, + upstreamBuildNumber, + transitiveUpstreamBuilds, + recursionDepth++, + upstreamMemory); } } } @@ -1372,17 +1589,18 @@ private Map listTransitiveUpstreamJobs(@NonNull String jobFullN */ @NonNull public List getGeneratedArtifacts(@NonNull String jobFullName, @NonNull int buildNumber) { - LOGGER.log(Level.FINER, "getGeneratedArtifacts({0}, {1})", new Object[]{jobFullName, buildNumber}); - String generatedArtifactsSql = "SELECT DISTINCT MAVEN_ARTIFACT.group_id, MAVEN_ARTIFACT.artifact_id, MAVEN_ARTIFACT.type, MAVEN_ARTIFACT.classifier, MAVEN_ARTIFACT.version as base_version, " + - "GENERATED_MAVEN_ARTIFACT.version as version, GENERATED_MAVEN_ARTIFACT.repository_url, GENERATED_MAVEN_ARTIFACT.extension" + - " FROM MAVEN_ARTIFACT " + - " INNER JOIN GENERATED_MAVEN_ARTIFACT ON MAVEN_ARTIFACT.ID = GENERATED_MAVEN_ARTIFACT.ARTIFACT_ID" + - " INNER JOIN JENKINS_BUILD AS UPSTREAM_BUILD ON GENERATED_MAVEN_ARTIFACT.BUILD_ID = UPSTREAM_BUILD.ID " + - " INNER JOIN JENKINS_JOB AS UPSTREAM_JOB ON UPSTREAM_BUILD.JOB_ID = UPSTREAM_JOB.ID " + - " WHERE " + - " UPSTREAM_JOB.FULL_NAME = ? AND" + - " UPSTREAM_JOB.JENKINS_MASTER_ID = ? AND" + - " UPSTREAM_BUILD.NUMBER = ? "; + LOGGER.log(Level.FINER, "getGeneratedArtifacts({0}, {1})", new Object[] {jobFullName, buildNumber}); + String generatedArtifactsSql = + "SELECT DISTINCT MAVEN_ARTIFACT.group_id, MAVEN_ARTIFACT.artifact_id, MAVEN_ARTIFACT.type, MAVEN_ARTIFACT.classifier, MAVEN_ARTIFACT.version as base_version, " + + "GENERATED_MAVEN_ARTIFACT.version as version, GENERATED_MAVEN_ARTIFACT.repository_url, GENERATED_MAVEN_ARTIFACT.extension" + + " FROM MAVEN_ARTIFACT " + + " INNER JOIN GENERATED_MAVEN_ARTIFACT ON MAVEN_ARTIFACT.ID = GENERATED_MAVEN_ARTIFACT.ARTIFACT_ID" + + " INNER JOIN JENKINS_BUILD AS UPSTREAM_BUILD ON GENERATED_MAVEN_ARTIFACT.BUILD_ID = UPSTREAM_BUILD.ID " + + " INNER JOIN JENKINS_JOB AS UPSTREAM_JOB ON UPSTREAM_BUILD.JOB_ID = UPSTREAM_JOB.ID " + + " WHERE " + + " UPSTREAM_JOB.FULL_NAME = ? AND" + + " UPSTREAM_JOB.JENKINS_MASTER_ID = ? AND" + + " UPSTREAM_BUILD.NUMBER = ? "; List results = new ArrayList<>(); try (Connection cnn = this.ds.getConnection()) { @@ -1409,7 +1627,8 @@ public List getGeneratedArtifacts(@NonNull String jobFullName, @N artifact.setExtension(rst.getString("extension")); artifact.setSnapshot(artifact.getVersion().endsWith("-SNAPSHOT")); - // artifact.put("skip_downstream_triggers", rst.getString("GENERATED_MAVEN_ARTIFACT.skip_downstream_triggers")); + // artifact.put("skip_downstream_triggers", + // rst.getString("GENERATED_MAVEN_ARTIFACT.skip_downstream_triggers")); results.add(artifact); } } @@ -1429,7 +1648,8 @@ public synchronized Long getJenkinsMasterPrimaryKey(Connection cnn) throws SQLEx String jenkinsMasterUrl = getJenkinsDetails().getMasterRootUrl(); String jenkinsMasterUrlValueInDb = null; - try (PreparedStatement stmt = cnn.prepareStatement("SELECT ID, URL FROM JENKINS_MASTER WHERE LEGACY_INSTANCE_ID=?")) { + try (PreparedStatement stmt = + cnn.prepareStatement("SELECT ID, URL FROM JENKINS_MASTER WHERE LEGACY_INSTANCE_ID=?")) { stmt.setString(1, jenkinsMasterLegacyInstanceId); try (ResultSet rst = stmt.executeQuery()) { if (rst.next()) { @@ -1439,7 +1659,9 @@ public synchronized Long getJenkinsMasterPrimaryKey(Connection cnn) throws SQLEx } } if (this.jenkinsMasterPrimaryKey == null) { // NOT FOUND IN DB - try (PreparedStatement stmt = cnn.prepareStatement("INSERT INTO JENKINS_MASTER(LEGACY_INSTANCE_ID, URL) values (?, ?)", Statement.RETURN_GENERATED_KEYS)) { + try (PreparedStatement stmt = cnn.prepareStatement( + "INSERT INTO JENKINS_MASTER(LEGACY_INSTANCE_ID, URL) values (?, ?)", + Statement.RETURN_GENERATED_KEYS)) { stmt.setString(1, jenkinsMasterLegacyInstanceId); stmt.setString(2, jenkinsMasterUrl); stmt.execute(); @@ -1449,13 +1671,18 @@ public synchronized Long getJenkinsMasterPrimaryKey(Connection cnn) throws SQLEx } } else { // FOUND IN DB, UPDATE IF NEEDED if (!Objects.equals(jenkinsMasterUrl, jenkinsMasterUrlValueInDb)) { - LOGGER.log(Level.INFO, "Update url from \"{0}\" to \"{1}\" for master with legacyId {2}", new Object[]{jenkinsMasterUrlValueInDb, jenkinsMasterUrl, jenkinsMasterLegacyInstanceId}); - try (PreparedStatement stmt = cnn.prepareStatement("UPDATE JENKINS_MASTER set URL = ? where ID = ?")) { + LOGGER.log( + Level.INFO, + "Update url from \"{0}\" to \"{1}\" for master with legacyId {2}", + new Object[] {jenkinsMasterUrlValueInDb, jenkinsMasterUrl, jenkinsMasterLegacyInstanceId}); + try (PreparedStatement stmt = + cnn.prepareStatement("UPDATE JENKINS_MASTER set URL = ? where ID = ?")) { stmt.setString(1, jenkinsMasterUrl); stmt.setLong(2, this.jenkinsMasterPrimaryKey); int count = stmt.executeUpdate(); if (count != 1) { - LOGGER.warning("Updated more/less than 1 JENKINS_MASTER.URL=" + jenkinsMasterUrl + " for ID=" + this.jenkinsMasterPrimaryKey); + LOGGER.warning("Updated more/less than 1 JENKINS_MASTER.URL=" + jenkinsMasterUrl + + " for ID=" + this.jenkinsMasterPrimaryKey); } } finally { cnn.commit(); @@ -1478,8 +1705,15 @@ public String toPrettyString() { List prettyStrings = new ArrayList<>(); try (Connection cnn = ds.getConnection()) { prettyStrings.add("JDBC URL: " + cnn.getMetaData().getURL()); - List tables = Arrays.asList("JENKINS_MASTER", "MAVEN_ARTIFACT", "JENKINS_JOB", "JENKINS_BUILD", - "MAVEN_DEPENDENCY", "GENERATED_MAVEN_ARTIFACT", "MAVEN_PARENT_PROJECT", "JENKINS_BUILD_UPSTREAM_CAUSE"); + List tables = Arrays.asList( + "JENKINS_MASTER", + "MAVEN_ARTIFACT", + "JENKINS_JOB", + "JENKINS_BUILD", + "MAVEN_DEPENDENCY", + "GENERATED_MAVEN_ARTIFACT", + "MAVEN_PARENT_PROJECT", + "JENKINS_BUILD_UPSTREAM_CAUSE"); for (String table : tables) { try (Statement stmt = cnn.createStatement()) { try (ResultSet rst = stmt.executeQuery("SELECT count(*) FROM " + table)) { @@ -1487,7 +1721,8 @@ public String toPrettyString() { int count = rst.getInt(1); prettyStrings.add("Table " + table + ": " + count + " rows"); } else { - prettyStrings.add("Table " + table + ": #IllegalStateException 'select count(*)' didn't return any row#"); + prettyStrings.add("Table " + table + + ": #IllegalStateException 'select count(*)' didn't return any row#"); } } } catch (SQLException e) { @@ -1500,7 +1735,8 @@ public String toPrettyString() { LOGGER.log(Level.WARNING, "SQLException getting a connection to " + ds, e); } - StringBuilder result = new StringBuilder(StringUtils.substringAfterLast(getClass().getName(), ".") + " - " + getDatabaseDescription()); + StringBuilder result = new StringBuilder( + StringUtils.substringAfterLast(getClass().getName(), ".") + " - " + getDatabaseDescription()); for (String prettyString : prettyStrings) { result.append("\r\n\t").append(prettyString); } @@ -1510,62 +1746,80 @@ public String toPrettyString() { protected String getDatabaseDescription() { try (Connection cnn = ds.getConnection()) { DatabaseMetaData metaData = cnn.getMetaData(); - return metaData. getDatabaseProductName() + " " + metaData.getDatabaseProductVersion(); + return metaData.getDatabaseProductName() + " " + metaData.getDatabaseProductVersion(); } catch (SQLException e) { return "#" + e.toString() + "#"; } } @Override - public void updateBuildOnCompletion(@NonNull String jobFullName, int buildNumber, int buildResultOrdinal, long startTimeInMillis, long durationInMillis) { - LOGGER.log(Level.FINE, "updateBuildOnCompletion({0}, {1}, result: {2}, startTime): {3}, duration: {4}", - new Object[]{jobFullName, buildNumber, buildResultOrdinal, startTimeInMillis, durationInMillis}); + public void updateBuildOnCompletion( + @NonNull String jobFullName, + int buildNumber, + int buildResultOrdinal, + long startTimeInMillis, + long durationInMillis) { + LOGGER.log( + Level.FINE, + "updateBuildOnCompletion({0}, {1}, result: {2}, startTime): {3}, duration: {4}", + new Object[] {jobFullName, buildNumber, buildResultOrdinal, startTimeInMillis, durationInMillis}); long buildPrimaryKey = getOrCreateBuildPrimaryKey(jobFullName, buildNumber); try (Connection cnn = ds.getConnection()) { cnn.setAutoCommit(false); - try (PreparedStatement stmt = cnn.prepareStatement("UPDATE JENKINS_BUILD " + - "SET RESULT_ID = ?, START_TIME = ?, DURATION_IN_MILLIS = ? " + - "WHERE ID = ?")) { + try (PreparedStatement stmt = cnn.prepareStatement("UPDATE JENKINS_BUILD " + + "SET RESULT_ID = ?, START_TIME = ?, DURATION_IN_MILLIS = ? " + "WHERE ID = ?")) { stmt.setInt(1, buildResultOrdinal); stmt.setTimestamp(2, new Timestamp(startTimeInMillis)); stmt.setLong(3, durationInMillis); stmt.setLong(4, buildPrimaryKey); int count = stmt.executeUpdate(); if (count != 1) { - LOGGER.log(Level.WARNING, "updateBuildOnCompletion - more/less than 1 JENKINS_BUILD record updated (" + - count + ") for " + jobFullName + "#" + buildNumber + ", buildPrimaryKey=" + buildPrimaryKey); + LOGGER.log( + Level.WARNING, + "updateBuildOnCompletion - more/less than 1 JENKINS_BUILD record updated (" + count + + ") for " + jobFullName + "#" + buildNumber + ", buildPrimaryKey=" + + buildPrimaryKey); } } if (Result.SUCCESS.ordinal == buildResultOrdinal) { - try (PreparedStatement stmt = cnn.prepareStatement("UPDATE JENKINS_JOB set LAST_BUILD_NUMBER = ?, LAST_SUCCESSFUL_BUILD_NUMBER = ? where FULL_NAME = ? and JENKINS_MASTER_ID = ?")) { + try (PreparedStatement stmt = cnn.prepareStatement( + "UPDATE JENKINS_JOB set LAST_BUILD_NUMBER = ?, LAST_SUCCESSFUL_BUILD_NUMBER = ? where FULL_NAME = ? and JENKINS_MASTER_ID = ?")) { stmt.setInt(1, buildNumber); stmt.setInt(2, buildNumber); stmt.setString(3, jobFullName); stmt.setLong(4, getJenkinsMasterPrimaryKey(cnn)); int count = stmt.executeUpdate(); if (count != 1) { - LOGGER.log(Level.WARNING, "updateBuildOnCompletion - more/less than 1 JENKINS_JOB record updated (" + - count + ") for " + jobFullName + "#" + buildNumber); + LOGGER.log( + Level.WARNING, + "updateBuildOnCompletion - more/less than 1 JENKINS_JOB record updated (" + count + + ") for " + jobFullName + "#" + buildNumber); } } } else { - try (PreparedStatement stmt = cnn.prepareStatement("UPDATE JENKINS_JOB set LAST_BUILD_NUMBER = ? where FULL_NAME = ? and JENKINS_MASTER_ID = ?")) { + try (PreparedStatement stmt = cnn.prepareStatement( + "UPDATE JENKINS_JOB set LAST_BUILD_NUMBER = ? where FULL_NAME = ? and JENKINS_MASTER_ID = ?")) { stmt.setInt(1, buildNumber); stmt.setString(2, jobFullName); stmt.setLong(3, getJenkinsMasterPrimaryKey(cnn)); int count = stmt.executeUpdate(); if (count != 1) { - LOGGER.log(Level.WARNING, "updateBuildOnCompletion - more/less than 1 JENKINS_JOB record updated (" + - count + ") for " + jobFullName + "#" + buildNumber); + LOGGER.log( + Level.WARNING, + "updateBuildOnCompletion - more/less than 1 JENKINS_JOB record updated (" + count + + ") for " + jobFullName + "#" + buildNumber); } } } cnn.commit(); } catch (SQLException e) { - throw new RuntimeSqlException("Exception updating build " + jobFullName + "#" + buildNumber + " with result " + buildResultOrdinal, e); + throw new RuntimeSqlException( + "Exception updating build " + jobFullName + "#" + buildNumber + " with result " + + buildResultOrdinal, + e); } } diff --git a/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginH2Dao.java b/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginH2Dao.java index 5f63879f..db9e65a5 100644 --- a/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginH2Dao.java +++ b/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginH2Dao.java @@ -24,13 +24,8 @@ package org.jenkinsci.plugins.pipeline.maven.db; -import hudson.Extension; -import jenkins.model.Jenkins; -import org.h2.jdbcx.JdbcConnectionPool; - import edu.umd.cs.findbugs.annotations.NonNull; - -import javax.sql.DataSource; +import hudson.Extension; import java.io.File; import java.io.IOException; import java.sql.Connection; @@ -38,6 +33,9 @@ import java.sql.SQLException; import java.sql.Statement; import java.util.logging.Level; +import javax.sql.DataSource; +import jenkins.model.Jenkins; +import org.h2.jdbcx.JdbcConnectionPool; /** * @author Cyrille Le Clerc @@ -63,18 +61,20 @@ public String getDescription() { } public PipelineMavenPluginH2Dao(@NonNull File rootDir) { - this(JdbcConnectionPool.create("jdbc:h2:file:" + new File(rootDir, "jenkins-jobs").getAbsolutePath() + ";" + - "AUTO_SERVER=TRUE;MULTI_THREADED=1;QUERY_CACHE_SIZE=25;JMX=TRUE", "sa", "sa")); + this(JdbcConnectionPool.create( + "jdbc:h2:file:" + new File(rootDir, "jenkins-jobs").getAbsolutePath() + ";" + + "AUTO_SERVER=TRUE;MULTI_THREADED=1;QUERY_CACHE_SIZE=25;JMX=TRUE", + "sa", + "sa")); } - - @Override protected void registerJdbcDriver() { try { Class.forName("org.h2.Driver"); } catch (ClassNotFoundException e) { - throw new RuntimeException("H2 driver 'org.h2.Driver' not found. Please install the 'H2 Database Plugin' to install the H2 driver"); + throw new RuntimeException( + "H2 driver 'org.h2.Driver' not found. Please install the 'H2 Database Plugin' to install the H2 driver"); } } @@ -129,7 +129,7 @@ public void close() throws IOException { stmt.execute("SHUTDOWN"); } } catch (SQLException e) { - if (e.getErrorCode() == 90121) { + if (e.getErrorCode() == 90121) { // DATABASE_CALLED_AT_SHUTDOWN (the JVM shutdown hooks are running already :-o ) LOGGER.log(Level.FINE, "Failed to close the database as it is already closed", e); } else { @@ -150,8 +150,7 @@ public String getDefaultJdbcUrl() { throw new IllegalStateException("Failure to create database root dir " + databaseRootDir); } } - return "jdbc:h2:file:" + new File(databaseRootDir, "jenkins-jobs").getAbsolutePath() + ";" + - "AUTO_SERVER=TRUE;MULTI_THREADED=1;QUERY_CACHE_SIZE=25;JMX=TRUE"; + return "jdbc:h2:file:" + new File(databaseRootDir, "jenkins-jobs").getAbsolutePath() + ";" + + "AUTO_SERVER=TRUE;MULTI_THREADED=1;QUERY_CACHE_SIZE=25;JMX=TRUE"; } - } diff --git a/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginMySqlDao.java b/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginMySqlDao.java index 2f3e7db6..6b10c3ec 100644 --- a/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginMySqlDao.java +++ b/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginMySqlDao.java @@ -25,19 +25,18 @@ package org.jenkinsci.plugins.pipeline.maven.db; import com.mysql.cj.exceptions.MysqlErrorNumbers; -import hudson.Extension; -import org.apache.commons.lang.StringUtils; -import org.jenkinsci.plugins.pipeline.maven.db.util.RuntimeSqlException; - import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; -import javax.sql.DataSource; +import hudson.Extension; import java.sql.Connection; import java.sql.DatabaseMetaData; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.logging.Level; +import javax.sql.DataSource; +import org.apache.commons.lang.StringUtils; +import org.jenkinsci.plugins.pipeline.maven.db.util.RuntimeSqlException; /** * @author Cyrille Le Clerc @@ -61,25 +60,24 @@ public String getDescription() { * @return {@code null} if this is not a MariaDB version, the MariaDB server version (e.g. "10.2.20", "10.3.11") if parsed, the entire {@link DatabaseMetaData#getDatabaseProductVersion()} if the parsing oof the MariaDB server version failed */ @Nullable - public static String extractMariaDbVersion(@Nullable String jdbcDatabaseProductVersion) { + public static String extractMariaDbVersion(@Nullable String jdbcDatabaseProductVersion) { if (jdbcDatabaseProductVersion == null) { return null; } - if(!jdbcDatabaseProductVersion.contains("MariaDB")) { + if (!jdbcDatabaseProductVersion.contains("MariaDB")) { return null; } String mariaDbVersion = StringUtils.substringBetween(jdbcDatabaseProductVersion, "-", "-MariaDB"); if (mariaDbVersion == null) { // MariaDB version format has changed. - return jdbcDatabaseProductVersion; + return jdbcDatabaseProductVersion; } else { return mariaDbVersion; } } - public PipelineMavenPluginMySqlDao(@NonNull DataSource ds) { super(ds); } @@ -91,7 +89,7 @@ public String getJdbcScheme() { @Override protected void handleDatabaseInitialisationException(SQLException e) { - if ( MysqlErrorNumbers.SQL_STATE_ILLEGAL_ARGUMENT.equals(e.getSQLState())) { + if (MysqlErrorNumbers.SQL_STATE_ILLEGAL_ARGUMENT.equals(e.getSQLState())) { LOGGER.log(Level.FINE, "Ignore sql exception " + e.getErrorCode() + " - " + e.getSQLState(), e); } else if (MysqlErrorNumbers.ER_EMPTY_QUERY == e.getErrorCode()) { LOGGER.log(Level.FINE, "Ignore sql exception " + e.getErrorCode() + " - " + e.getSQLState(), e); @@ -108,7 +106,8 @@ protected void registerJdbcDriver() { try { Class.forName("com.mysql.cj.jdbc.Driver"); } catch (ClassNotFoundException e) { - throw new RuntimeException("MySql driver 'com.mysql.cj.jdbc.Driver' not found. Please install the 'MySQL Database Plugin' to install the MySql driver"); + throw new RuntimeException( + "MySql driver 'com.mysql.cj.jdbc.Driver' not found. Please install the 'MySQL Database Plugin' to install the MySql driver"); } } @@ -116,7 +115,7 @@ protected void registerJdbcDriver() { protected String getDatabaseDescription() { try (Connection cnn = getDataSource().getConnection()) { DatabaseMetaData metaData = cnn.getMetaData(); - String version = metaData. getDatabaseProductName() + " " + metaData.getDatabaseProductVersion(); + String version = metaData.getDatabaseProductName() + " " + metaData.getDatabaseProductVersion(); try (Statement stmt = cnn.createStatement()) { try (ResultSet rst = stmt.executeQuery("select AURORA_VERSION()")) { rst.next(); diff --git a/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginPostgreSqlDao.java b/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginPostgreSqlDao.java index 130d1798..f6300ed0 100644 --- a/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginPostgreSqlDao.java +++ b/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginPostgreSqlDao.java @@ -24,11 +24,8 @@ package org.jenkinsci.plugins.pipeline.maven.db; -import hudson.Extension; -import org.postgresql.util.PSQLState; - import edu.umd.cs.findbugs.annotations.NonNull; -import javax.sql.DataSource; +import hudson.Extension; import java.sql.Connection; import java.sql.DatabaseMetaData; import java.sql.PreparedStatement; @@ -36,6 +33,8 @@ import java.sql.SQLException; import java.sql.Statement; import java.util.logging.Level; +import javax.sql.DataSource; +import org.postgresql.util.PSQLState; /** * @author Cyrille Le Clerc @@ -51,7 +50,6 @@ public PipelineMavenPluginPostgreSqlDao(@NonNull DataSource ds) { super(ds); } - @Override public String getDescription() { return Messages.dao_postgesql_description(); @@ -67,7 +65,8 @@ protected void registerJdbcDriver() { try { Class.forName("org.postgresql.Driver"); } catch (ClassNotFoundException e) { - throw new RuntimeException("PostgreSQL driver 'org.postgresql.Driver' not found. Please install the 'PostgreSQL API Plugin' to install the PostgreSQL driver"); + throw new RuntimeException( + "PostgreSQL driver 'org.postgresql.Driver' not found. Please install the 'PostgreSQL API Plugin' to install the PostgreSQL driver"); } } @@ -75,13 +74,15 @@ protected void registerJdbcDriver() { protected String getDatabaseDescription() { try (Connection cnn = getDataSource().getConnection()) { DatabaseMetaData metaData = cnn.getMetaData(); - String version = metaData. getDatabaseProductName() + " " + metaData.getDatabaseProductVersion(); + String version = metaData.getDatabaseProductName() + " " + metaData.getDatabaseProductVersion(); try (Statement stmt = cnn.createStatement()) { try (ResultSet rst = stmt.executeQuery("select AURORA_VERSION()")) { rst.next(); version += " / Amazon Aurora " + rst.getString(1); } catch (SQLException e) { - if (PSQLState.UNDEFINED_FUNCTION.getState().equals(e.getSQLState())) { // " 42883 - ERROR: function aurora_version() does not exist" + if (PSQLState.UNDEFINED_FUNCTION + .getState() + .equals(e.getSQLState())) { // " 42883 - ERROR: function aurora_version() does not exist" // not Amazon aurora, the function aurora_version() does not exist } else { LOGGER.log(Level.WARNING, "Exception checking Amazon Aurora version", e); diff --git a/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/migration/MigrationStep.java b/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/migration/MigrationStep.java index 90dcb470..62949c37 100644 --- a/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/migration/MigrationStep.java +++ b/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/migration/MigrationStep.java @@ -1,11 +1,10 @@ package org.jenkinsci.plugins.pipeline.maven.db.migration; -import jenkins.model.Jenkins; - import edu.umd.cs.findbugs.annotations.NonNull; import java.sql.Connection; import java.sql.SQLException; import java.util.Objects; +import jenkins.model.Jenkins; public interface MigrationStep { void execute(@NonNull Connection cnn, @NonNull JenkinsDetails jenkinsDetails) throws SQLException; @@ -20,7 +19,7 @@ public String getMasterLegacyInstanceId() { } @NonNull - public String getMasterRootUrl(){ + public String getMasterRootUrl() { return Objects.toString(Jenkins.get().getRootUrl(), ""); } } diff --git a/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/migration/h2/MigrationStep10.java b/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/migration/h2/MigrationStep10.java index 2458a247..262c095b 100644 --- a/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/migration/h2/MigrationStep10.java +++ b/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/migration/h2/MigrationStep10.java @@ -1,20 +1,19 @@ package org.jenkinsci.plugins.pipeline.maven.db.migration.h2; -import hudson.model.Run; -import org.jenkinsci.plugins.pipeline.maven.db.migration.MigrationStep; - import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; +import hudson.model.Run; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.logging.Level; import java.util.logging.Logger; +import org.jenkinsci.plugins.pipeline.maven.db.migration.MigrationStep; public class MigrationStep10 implements MigrationStep { - private final static Logger LOGGER = Logger.getLogger(MigrationStep10.class.getName()); + private static final Logger LOGGER = Logger.getLogger(MigrationStep10.class.getName()); @Override public void execute(@NonNull Connection cnn, @NonNull JenkinsDetails jenkinsDetails) throws SQLException { @@ -25,9 +24,9 @@ public void execute(@NonNull Connection cnn, @NonNull JenkinsDetails jenkinsDeta try (ResultSet rst = stmt.executeQuery()) { while (rst.next()) { count++; - if ((count < 100 && (count % 10) == 0) || - (count < 500 && (count % 20) == 0) || - ((count % 50) == 0)) { + if ((count < 100 && (count % 10) == 0) + || (count < 500 && (count % 20) == 0) + || ((count % 50) == 0)) { LOGGER.log(Level.INFO, "#" + count + " - " + rst.getString("FULL_NAME") + "..."); } @@ -42,11 +41,12 @@ public void execute(@NonNull Connection cnn, @NonNull JenkinsDetails jenkinsDeta } } LOGGER.info("Successfully upgraded table JENKINS_JOB, " + count + " records upgraded"); - } - protected void updateJenkinsJobRecord(@NonNull Connection cnn, long jenkinsJobPrimaryKey, int lastBuildNumber) throws SQLException { - try (PreparedStatement stmt = cnn.prepareStatement("UPDATE JENKINS_JOB set LAST_BUILD_NUMBER = ?, LAST_SUCCESSFUL_BUILD_NUMBER = ? where ID = ?")) { + protected void updateJenkinsJobRecord(@NonNull Connection cnn, long jenkinsJobPrimaryKey, int lastBuildNumber) + throws SQLException { + try (PreparedStatement stmt = cnn.prepareStatement( + "UPDATE JENKINS_JOB set LAST_BUILD_NUMBER = ?, LAST_SUCCESSFUL_BUILD_NUMBER = ? where ID = ?")) { stmt.setInt(1, lastBuildNumber); // TRICK we assume that the last build is successful stmt.setInt(2, lastBuildNumber); @@ -60,7 +60,8 @@ protected void updateJenkinsJobRecord(@NonNull Connection cnn, long jenkinsJobPr */ @Nullable protected Integer findLastBuildNumber(@NonNull Connection cnn, long jobPrimaryKey) throws SQLException { - try (PreparedStatement stmt2 = cnn.prepareStatement("SELECT * FROM JENKINS_BUILD WHERE JOB_ID = ? ORDER BY JENKINS_BUILD.NUMBER DESC LIMIT 1")) { + try (PreparedStatement stmt2 = cnn.prepareStatement( + "SELECT * FROM JENKINS_BUILD WHERE JOB_ID = ? ORDER BY JENKINS_BUILD.NUMBER DESC LIMIT 1")) { stmt2.setLong(1, jobPrimaryKey); try (ResultSet rst2 = stmt2.executeQuery()) { if (rst2.next()) { diff --git a/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/migration/h2/MigrationStep11.java b/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/migration/h2/MigrationStep11.java index 88d9b111..7a6485c1 100644 --- a/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/migration/h2/MigrationStep11.java +++ b/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/migration/h2/MigrationStep11.java @@ -1,23 +1,22 @@ package org.jenkinsci.plugins.pipeline.maven.db.migration.h2; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; import hudson.model.Cause; import hudson.model.Job; import hudson.model.Run; -import jenkins.model.Jenkins; -import org.jenkinsci.plugins.pipeline.maven.db.migration.MigrationStep; - -import edu.umd.cs.findbugs.annotations.NonNull; -import edu.umd.cs.findbugs.annotations.Nullable; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.logging.Level; import java.util.logging.Logger; +import jenkins.model.Jenkins; +import org.jenkinsci.plugins.pipeline.maven.db.migration.MigrationStep; public class MigrationStep11 implements MigrationStep { - private final static Logger LOGGER = Logger.getLogger(MigrationStep11.class.getName()); + private static final Logger LOGGER = Logger.getLogger(MigrationStep11.class.getName()); @Override public void execute(@NonNull Connection cnn, @NonNull JenkinsDetails jenkinsDetails) throws SQLException { @@ -25,25 +24,26 @@ public void execute(@NonNull Connection cnn, @NonNull JenkinsDetails jenkinsDeta int buildCauseCount = 0; LOGGER.info("Upgrade table JENKINS_BUILD_UPSTREAM_CAUSE..."); - String select = "select jenkins_job.full_name, jenkins_job.jenkins_master_id, jenkins_build.number, jenkins_build.id " + - " from jenkins_build inner join jenkins_job on jenkins_build.job_id = jenkins_job.id order by jenkins_job.full_name, jenkins_build.number"; + String select = + "select jenkins_job.full_name, jenkins_job.jenkins_master_id, jenkins_build.number, jenkins_build.id " + + " from jenkins_build inner join jenkins_job on jenkins_build.job_id = jenkins_job.id order by jenkins_job.full_name, jenkins_build.number"; - String insert = " insert into JENKINS_BUILD_UPSTREAM_CAUSE (upstream_build_id, downstream_build_id) " + - " select upstream_build.id, ? " + - " from jenkins_build as upstream_build, jenkins_job as upstream_job " + - " where " + - " upstream_build.job_id = upstream_job.id and" + - " upstream_job.full_name = ? and" + - " upstream_job.jenkins_master_id = ? and" + - " upstream_build.number = ? "; + String insert = " insert into JENKINS_BUILD_UPSTREAM_CAUSE (upstream_build_id, downstream_build_id) " + + " select upstream_build.id, ? " + + " from jenkins_build as upstream_build, jenkins_job as upstream_job " + + " where " + + " upstream_build.job_id = upstream_job.id and" + + " upstream_job.full_name = ? and" + + " upstream_job.jenkins_master_id = ? and" + + " upstream_build.number = ? "; try (PreparedStatement insertStmt = cnn.prepareStatement(insert)) { try (PreparedStatement selectStmt = cnn.prepareStatement(select)) { try (ResultSet rst = selectStmt.executeQuery()) { while (rst.next()) { jobCount++; - if ((jobCount < 100 && (jobCount % 10) == 0) || - (jobCount < 500 && (jobCount % 20) == 0) || - ((jobCount % 50) == 0)) { + if ((jobCount < 100 && (jobCount % 10) == 0) + || (jobCount < 500 && (jobCount % 20) == 0) + || ((jobCount % 50) == 0)) { LOGGER.log(Level.INFO, "#" + jobCount + " - " + rst.getString("FULL_NAME") + "..."); } @@ -77,20 +77,24 @@ public void execute(@NonNull Connection cnn, @NonNull JenkinsDetails jenkinsDeta } } } catch (RuntimeException e) { - LOGGER.log(Level.WARNING, "Silently ignore exception migrating build " + jobFullName + "#" + buildNumber, e); + LOGGER.log( + Level.WARNING, + "Silently ignore exception migrating build " + jobFullName + "#" + buildNumber, + e); } - } insertStmt.executeBatch(); } } } - LOGGER.info("Successfully upgraded table JENKINS_BUILD_UPSTREAM_CAUSE, " + jobCount + " jobs scanned, " + buildCauseCount + " job causes inserted"); - + LOGGER.info("Successfully upgraded table JENKINS_BUILD_UPSTREAM_CAUSE, " + jobCount + " jobs scanned, " + + buildCauseCount + " job causes inserted"); } - protected void updateJenkinsJobRecord(@NonNull Connection cnn, long jenkinsJobPrimaryKey, int lastBuildNumber) throws SQLException { - try (PreparedStatement stmt = cnn.prepareStatement("UPDATE JENKINS_JOB set LAST_BUILD_NUMBER = ?, LAST_SUCCESSFUL_BUILD_NUMBER = ? where ID = ?")) { + protected void updateJenkinsJobRecord(@NonNull Connection cnn, long jenkinsJobPrimaryKey, int lastBuildNumber) + throws SQLException { + try (PreparedStatement stmt = cnn.prepareStatement( + "UPDATE JENKINS_JOB set LAST_BUILD_NUMBER = ?, LAST_SUCCESSFUL_BUILD_NUMBER = ? where ID = ?")) { stmt.setInt(1, lastBuildNumber); // TRICK we assume that the last build is successful stmt.setInt(2, lastBuildNumber); @@ -104,7 +108,8 @@ protected void updateJenkinsJobRecord(@NonNull Connection cnn, long jenkinsJobPr */ @Nullable protected Integer findLastBuildNumber(@NonNull Connection cnn, long jobPrimaryKey) throws SQLException { - try (PreparedStatement stmt2 = cnn.prepareStatement("SELECT * FROM JENKINS_BUILD WHERE JOB_ID = ? ORDER BY JENKINS_BUILD.NUMBER DESC LIMIT 1")) { + try (PreparedStatement stmt2 = cnn.prepareStatement( + "SELECT * FROM JENKINS_BUILD WHERE JOB_ID = ? ORDER BY JENKINS_BUILD.NUMBER DESC LIMIT 1")) { stmt2.setLong(1, jobPrimaryKey); try (ResultSet rst2 = stmt2.executeQuery()) { if (rst2.next()) { diff --git a/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/migration/h2/MigrationStep8.java b/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/migration/h2/MigrationStep8.java index 0bc3143d..b587ca06 100644 --- a/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/migration/h2/MigrationStep8.java +++ b/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/migration/h2/MigrationStep8.java @@ -1,12 +1,11 @@ package org.jenkinsci.plugins.pipeline.maven.db.migration.h2; -import org.jenkinsci.plugins.pipeline.maven.db.migration.MigrationStep; - import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; +import org.jenkinsci.plugins.pipeline.maven.db.migration.MigrationStep; public class MigrationStep8 implements MigrationStep { @@ -22,7 +21,9 @@ public void execute(Connection cnn, JenkinsDetails jenkinsDetails) throws SQLExc } } if (masterId == null) { - try (PreparedStatement stmt = cnn.prepareStatement("INSERT INTO JENKINS_MASTER(LEGACY_INSTANCE_ID, URL) values (?, ?)", Statement.RETURN_GENERATED_KEYS)) { + try (PreparedStatement stmt = cnn.prepareStatement( + "INSERT INTO JENKINS_MASTER(LEGACY_INSTANCE_ID, URL) values (?, ?)", + Statement.RETURN_GENERATED_KEYS)) { stmt.setString(1, jenkinsDetails.getMasterLegacyInstanceId()); stmt.setString(2, jenkinsDetails.getMasterRootUrl()); stmt.execute(); @@ -35,7 +36,8 @@ public void execute(Connection cnn, JenkinsDetails jenkinsDetails) throws SQLExc } } } - try (PreparedStatement stmt = cnn.prepareStatement("UPDATE JENKINS_JOB set JENKINS_MASTER_ID=? where JENKINS_MASTER_ID IS NULL")) { + try (PreparedStatement stmt = + cnn.prepareStatement("UPDATE JENKINS_JOB set JENKINS_MASTER_ID=? where JENKINS_MASTER_ID IS NULL")) { stmt.setInt(1, masterId); stmt.execute(); } diff --git a/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/migration/mysql/MigrationStep12.java b/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/migration/mysql/MigrationStep12.java index 3d77498d..26c7223e 100644 --- a/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/migration/mysql/MigrationStep12.java +++ b/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/migration/mysql/MigrationStep12.java @@ -1,29 +1,30 @@ package org.jenkinsci.plugins.pipeline.maven.db.migration.mysql; -import org.jenkinsci.plugins.pipeline.maven.db.migration.MigrationStep; - import edu.umd.cs.findbugs.annotations.NonNull; import java.sql.Connection; import java.sql.SQLException; import java.sql.Statement; import java.util.logging.Level; import java.util.logging.Logger; +import org.jenkinsci.plugins.pipeline.maven.db.migration.MigrationStep; public class MigrationStep12 implements MigrationStep { - private final static Logger LOGGER = Logger.getLogger(MigrationStep12.class.getName()); + private static final Logger LOGGER = Logger.getLogger(MigrationStep12.class.getName()); @Override public void execute(@NonNull Connection cnn, @NonNull JenkinsDetails jenkinsDetails) throws SQLException { try (Statement stmt = cnn.createStatement()) { stmt.execute("ALTER TABLE MAVEN_ARTIFACT MODIFY COLUMN VERSION varchar(100)"); - LOGGER.log(Level.INFO, "Successfully resized column MAVEN_ARTIFACT.VERSION to varchar(100)" ); + LOGGER.log(Level.INFO, "Successfully resized column MAVEN_ARTIFACT.VERSION to varchar(100)"); } catch (SQLException e) { // some old mysql version may not accept the resize due to constraints on the index size - LOGGER.log(Level.WARNING, "Silently ignore failure to resize column MAVEN_ARTIFACT.VERSION to varchar(100). " + - "It is probably caused by the old version of the MySQL engine, it will not restrict the capabilities, " + - "it will just continue to restrict the max size of the maven_artifact.version column to 56 chars" ); + LOGGER.log( + Level.WARNING, + "Silently ignore failure to resize column MAVEN_ARTIFACT.VERSION to varchar(100). " + + "It is probably caused by the old version of the MySQL engine, it will not restrict the capabilities, " + + "it will just continue to restrict the max size of the maven_artifact.version column to 56 chars"); } } } diff --git a/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/util/ClassUtils.java b/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/util/ClassUtils.java index deb95b51..4af6cf57 100644 --- a/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/util/ClassUtils.java +++ b/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/util/ClassUtils.java @@ -1,9 +1,8 @@ package org.jenkinsci.plugins.pipeline.maven.db.util; -import java.io.InputStream; - import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; +import java.io.InputStream; /** * @author Cyrille Le Clerc @@ -18,5 +17,4 @@ public static InputStream getResourceAsStream(@NonNull String resourcePath) { } return result; } - } diff --git a/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/util/RuntimeSqlException.java b/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/util/RuntimeSqlException.java index 495440b6..a4fc5b0c 100644 --- a/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/util/RuntimeSqlException.java +++ b/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/util/RuntimeSqlException.java @@ -24,7 +24,8 @@ public RuntimeSqlException(Throwable cause) { super(cause); } - protected RuntimeSqlException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) { + protected RuntimeSqlException( + String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) { super(message, cause, enableSuppression, writableStackTrace); } } diff --git a/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/util/SqlUtils.java b/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/util/SqlUtils.java index de4d1f9e..82324d20 100644 --- a/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/util/SqlUtils.java +++ b/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/util/SqlUtils.java @@ -10,9 +10,8 @@ */ public class SqlUtils { - private SqlUtils() { + private SqlUtils() {} - } public static void dumpResultsetMetadata(ResultSet rst, PrintStream out) { try { ResultSetMetaData metaData = rst.getMetaData(); diff --git a/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginDaoAbstractTest.java b/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginDaoAbstractTest.java index ef658a1d..d9048ec9 100644 --- a/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginDaoAbstractTest.java +++ b/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginDaoAbstractTest.java @@ -26,15 +26,15 @@ import static org.assertj.core.api.Assertions.assertThat; +import edu.umd.cs.findbugs.annotations.NonNull; +import hudson.model.Result; import java.io.Closeable; import java.io.IOException; import java.util.List; import java.util.Map; import java.util.SortedSet; import java.util.stream.Collectors; - import javax.sql.DataSource; - import org.jenkinsci.plugins.pipeline.maven.MavenArtifact; import org.jenkinsci.plugins.pipeline.maven.MavenDependency; import org.jenkinsci.plugins.pipeline.maven.db.util.SqlTestsUtils; @@ -43,9 +43,6 @@ import org.junit.jupiter.api.Test; import org.jvnet.hudson.test.Issue; -import edu.umd.cs.findbugs.annotations.NonNull; -import hudson.model.Result; - /** * @author Cyrille Le Clerc */ @@ -58,7 +55,13 @@ public abstract class PipelineMavenPluginDaoAbstractTest { @BeforeEach public void before() throws Exception { ds = before_newDataSource(); - SqlTestsUtils.silentlyDeleteTableRows(ds, "JENKINS_MASTER", "JENKINS_JOB", "JENKINS_BUILD", "MAVEN_ARTIFACT", "MAVEN_DEPENDENCY", + SqlTestsUtils.silentlyDeleteTableRows( + ds, + "JENKINS_MASTER", + "JENKINS_JOB", + "JENKINS_BUILD", + "MAVEN_ARTIFACT", + "MAVEN_DEPENDENCY", "GENERATED_MAVEN_ARTIFACT"); dao = before_newAbstractPipelineMavenPluginDao(ds); } @@ -96,11 +99,13 @@ public void getOrCreateArtifactPrimaryKey() throws Exception { @Test public void getOrCreateArtifactPrimaryKey_jarWithDependencies() throws Exception { - long primaryKey = dao.getOrCreateArtifactPrimaryKey("com.example", "my-bundle", "1.2.3", "jar", "jar-with-dependencies"); + long primaryKey = + dao.getOrCreateArtifactPrimaryKey("com.example", "my-bundle", "1.2.3", "jar", "jar-with-dependencies"); System.out.println(primaryKey); SqlTestsUtils.dump("select * from MAVEN_ARTIFACT", ds, System.out); - long primaryKeySecondCall = dao.getOrCreateArtifactPrimaryKey("com.example", "my-bundle", "1.2.3", "jar", "jar-with-dependencies"); + long primaryKeySecondCall = + dao.getOrCreateArtifactPrimaryKey("com.example", "my-bundle", "1.2.3", "jar", "jar-with-dependencies"); SqlTestsUtils.dump("select * from MAVEN_ARTIFACT", ds, System.out); assertThat(primaryKeySecondCall).isEqualTo(primaryKey); @@ -121,7 +126,6 @@ public void getOrCreateJobPrimaryKey() throws Exception { SqlTestsUtils.dump("select * from JENKINS_BUILD", ds, System.out); assertThat(SqlTestsUtils.countRows("select * from JENKINS_BUILD", ds)).isEqualTo(1); - } @Test @@ -140,11 +144,12 @@ public void create_job_and_2_builds() throws Exception { SqlTestsUtils.dump("select * from JENKINS_JOB", ds, System.out); SqlTestsUtils.dump("select * from JENKINS_BUILD", ds, System.out); - assertThat(SqlTestsUtils.countRows("select * from JENKINS_JOB where FULL_NAME='my-pipeline' AND LAST_BUILD_NUMBER=2 AND LAST_SUCCESSFUL_BUILD_NUMBER=2", - ds)).isEqualTo(1); + assertThat(SqlTestsUtils.countRows( + "select * from JENKINS_JOB where FULL_NAME='my-pipeline' AND LAST_BUILD_NUMBER=2 AND LAST_SUCCESSFUL_BUILD_NUMBER=2", + ds)) + .isEqualTo(1); assertThat(SqlTestsUtils.countRows("select * from JENKINS_BUILD", ds)).isEqualTo(2); - } @Test @@ -177,7 +182,6 @@ public void create_job_and_3_builds_and_delete_builds() throws Exception { dao.deleteBuild("my-pipeline", 3); System.out.println("AFTER DELETE LAST BUILD"); SqlTestsUtils.dump("select * from JENKINS_JOB", ds, System.out); - } @Test @@ -185,14 +189,18 @@ public void record_one_dependency() throws Exception { dao.recordDependency("my-pipeline", 1, "com.h2.database", "h2", "1.4.196", "jar", "compile", false, null); - SqlTestsUtils.dump("select * from JENKINS_BUILD LEFT OUTER JOIN JENKINS_JOB ON JENKINS_BUILD.JOB_ID = JENKINS_JOB.ID", ds, System.out); + SqlTestsUtils.dump( + "select * from JENKINS_BUILD LEFT OUTER JOIN JENKINS_JOB ON JENKINS_BUILD.JOB_ID = JENKINS_JOB.ID", + ds, + System.out); SqlTestsUtils.dump("select * from MAVEN_ARTIFACT", ds, System.out); SqlTestsUtils.dump("select * from MAVEN_DEPENDENCY", ds, System.out); assertThat(SqlTestsUtils.countRows("select * from JENKINS_BUILD", ds)).isEqualTo(1); assertThat(SqlTestsUtils.countRows("select * from MAVEN_ARTIFACT", ds)).isEqualTo(1); - assertThat(SqlTestsUtils.countRows("select * from MAVEN_DEPENDENCY", ds)).isEqualTo(1); + assertThat(SqlTestsUtils.countRows("select * from MAVEN_DEPENDENCY", ds)) + .isEqualTo(1); List mavenDependencies = dao.listDependencies("my-pipeline", 1); assertThat(mavenDependencies).hasSize(1); @@ -203,22 +211,26 @@ public void record_one_dependency() throws Exception { assertThat(dependency.getVersion()).isEqualTo("1.4.196"); assertThat(dependency.getType()).isEqualTo("jar"); assertThat(dependency.getScope()).isEqualTo("compile"); - } @Test public void record_one_parent_project() throws Exception { - dao.recordParentProject("my-pipeline", 1, "org.springframework.boot", "spring-boot-starter-parent", "1.5.4.RELEASE", false); + dao.recordParentProject( + "my-pipeline", 1, "org.springframework.boot", "spring-boot-starter-parent", "1.5.4.RELEASE", false); - SqlTestsUtils.dump("select * from JENKINS_BUILD LEFT OUTER JOIN JENKINS_JOB ON JENKINS_BUILD.JOB_ID = JENKINS_JOB.ID", ds, System.out); + SqlTestsUtils.dump( + "select * from JENKINS_BUILD LEFT OUTER JOIN JENKINS_JOB ON JENKINS_BUILD.JOB_ID = JENKINS_JOB.ID", + ds, + System.out); SqlTestsUtils.dump("select * from MAVEN_ARTIFACT", ds, System.out); SqlTestsUtils.dump("select * from MAVEN_PARENT_PROJECT", ds, System.out); assertThat(SqlTestsUtils.countRows("select * from JENKINS_BUILD", ds)).isEqualTo(1); assertThat(SqlTestsUtils.countRows("select * from MAVEN_ARTIFACT", ds)).isEqualTo(1); - assertThat(SqlTestsUtils.countRows("select * from MAVEN_PARENT_PROJECT", ds)).isEqualTo(1); + assertThat(SqlTestsUtils.countRows("select * from MAVEN_PARENT_PROJECT", ds)) + .isEqualTo(1); } @Test @@ -228,18 +240,23 @@ public void rename_job() throws Exception { dao.renameJob("my-pipeline-name-1", "my-pipeline-name-2"); - SqlTestsUtils.dump("select * from JENKINS_BUILD LEFT OUTER JOIN JENKINS_JOB ON JENKINS_BUILD.JOB_ID = JENKINS_JOB.ID", ds, System.out); + SqlTestsUtils.dump( + "select * from JENKINS_BUILD LEFT OUTER JOIN JENKINS_JOB ON JENKINS_BUILD.JOB_ID = JENKINS_JOB.ID", + ds, + System.out); SqlTestsUtils.dump("select * from MAVEN_ARTIFACT", ds, System.out); SqlTestsUtils.dump("select * from MAVEN_DEPENDENCY", ds, System.out); assertThat(SqlTestsUtils.countRows("select * from JENKINS_JOB", ds)).isEqualTo(1); - assertThat(SqlTestsUtils.countRows("select * from JENKINS_JOB WHERE FULL_NAME='my-pipeline-name-2'", ds)).isEqualTo(1); + assertThat(SqlTestsUtils.countRows("select * from JENKINS_JOB WHERE FULL_NAME='my-pipeline-name-2'", ds)) + .isEqualTo(1); assertThat(SqlTestsUtils.countRows("select * from JENKINS_BUILD", ds)).isEqualTo(1); assertThat(SqlTestsUtils.countRows("select * from MAVEN_ARTIFACT", ds)).isEqualTo(1); - assertThat(SqlTestsUtils.countRows("select * from MAVEN_DEPENDENCY", ds)).isEqualTo(1); + assertThat(SqlTestsUtils.countRows("select * from MAVEN_DEPENDENCY", ds)) + .isEqualTo(1); } @Test @@ -250,7 +267,10 @@ public void delete_job() throws Exception { dao.deleteJob("my-pipeline"); - SqlTestsUtils.dump("select * from JENKINS_BUILD LEFT OUTER JOIN JENKINS_JOB ON JENKINS_BUILD.JOB_ID = JENKINS_JOB.ID", ds, System.out); + SqlTestsUtils.dump( + "select * from JENKINS_BUILD LEFT OUTER JOIN JENKINS_JOB ON JENKINS_BUILD.JOB_ID = JENKINS_JOB.ID", + ds, + System.out); SqlTestsUtils.dump("select * from MAVEN_ARTIFACT", ds, System.out); SqlTestsUtils.dump("select * from MAVEN_DEPENDENCY", ds, System.out); @@ -259,7 +279,8 @@ public void delete_job() throws Exception { assertThat(SqlTestsUtils.countRows("select * from JENKINS_BUILD", ds)).isEqualTo(0); assertThat(SqlTestsUtils.countRows("select * from MAVEN_ARTIFACT", ds)).isEqualTo(2); - assertThat(SqlTestsUtils.countRows("select * from MAVEN_DEPENDENCY", ds)).isEqualTo(0); + assertThat(SqlTestsUtils.countRows("select * from MAVEN_DEPENDENCY", ds)) + .isEqualTo(0); } @Test @@ -272,7 +293,10 @@ public void delete_build() throws Exception { dao.deleteBuild("my-pipeline", 2); - SqlTestsUtils.dump("select * from JENKINS_BUILD LEFT OUTER JOIN JENKINS_JOB ON JENKINS_BUILD.JOB_ID = JENKINS_JOB.ID", ds, System.out); + SqlTestsUtils.dump( + "select * from JENKINS_BUILD LEFT OUTER JOIN JENKINS_JOB ON JENKINS_BUILD.JOB_ID = JENKINS_JOB.ID", + ds, + System.out); SqlTestsUtils.dump("select * from MAVEN_ARTIFACT", ds, System.out); SqlTestsUtils.dump("select * from MAVEN_DEPENDENCY", ds, System.out); @@ -281,7 +305,8 @@ public void delete_build() throws Exception { assertThat(SqlTestsUtils.countRows("select * from JENKINS_BUILD", ds)).isEqualTo(1); assertThat(SqlTestsUtils.countRows("select * from MAVEN_ARTIFACT", ds)).isEqualTo(2); - assertThat(SqlTestsUtils.countRows("select * from MAVEN_DEPENDENCY", ds)).isEqualTo(2); + assertThat(SqlTestsUtils.countRows("select * from MAVEN_DEPENDENCY", ds)) + .isEqualTo(2); } @Test @@ -290,31 +315,59 @@ public void move_build() throws Exception { dao.recordDependency("my-pipeline", 1, "com.h2database", "h2", "1.4.196", "jar", "compile", false, null); dao.renameJob("my-pipeline", "my-new-pipeline"); - SqlTestsUtils.dump("select * from JENKINS_BUILD LEFT OUTER JOIN JENKINS_JOB ON JENKINS_BUILD.JOB_ID = JENKINS_JOB.ID", ds, System.out); + SqlTestsUtils.dump( + "select * from JENKINS_BUILD LEFT OUTER JOIN JENKINS_JOB ON JENKINS_BUILD.JOB_ID = JENKINS_JOB.ID", + ds, + System.out); SqlTestsUtils.dump("select * from MAVEN_ARTIFACT", ds, System.out); SqlTestsUtils.dump("select * from MAVEN_DEPENDENCY", ds, System.out); assertThat(SqlTestsUtils.countRows("select * from JENKINS_JOB", ds)).isEqualTo(1); - assertThat(SqlTestsUtils.countRows("select * from JENKINS_JOB where full_name='my-new-pipeline'", ds)).isEqualTo(1); + assertThat(SqlTestsUtils.countRows("select * from JENKINS_JOB where full_name='my-new-pipeline'", ds)) + .isEqualTo(1); assertThat(SqlTestsUtils.countRows("select * from JENKINS_BUILD", ds)).isEqualTo(1); assertThat(SqlTestsUtils.countRows("select * from MAVEN_ARTIFACT", ds)).isEqualTo(1); - assertThat(SqlTestsUtils.countRows("select * from MAVEN_DEPENDENCY", ds)).isEqualTo(1); + assertThat(SqlTestsUtils.countRows("select * from MAVEN_DEPENDENCY", ds)) + .isEqualTo(1); } @Test public void record_two_generated_artifacts_on_the_same_build() throws Exception { - dao.recordGeneratedArtifact("my-upstream-pipeline-1", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "1.0-SNAPSHOT", null, false, "jar", null); - dao.recordGeneratedArtifact("my-upstream-pipeline-1", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "war", "1.0-SNAPSHOT", null, false, "war", null); + dao.recordGeneratedArtifact( + "my-upstream-pipeline-1", + 1, + "com.mycompany", + "core", + "1.0-SNAPSHOT", + "jar", + "1.0-SNAPSHOT", + null, + false, + "jar", + null); + dao.recordGeneratedArtifact( + "my-upstream-pipeline-1", + 1, + "com.mycompany", + "core", + "1.0-SNAPSHOT", + "war", + "1.0-SNAPSHOT", + null, + false, + "war", + null); assertThat(SqlTestsUtils.countRows("select * from JENKINS_JOB", ds)).isEqualTo(1); assertThat(SqlTestsUtils.countRows("select * from JENKINS_BUILD", ds)).isEqualTo(1); assertThat(SqlTestsUtils.countRows("select * from MAVEN_ARTIFACT", ds)).isEqualTo(2); - assertThat(SqlTestsUtils.countRows("select * from GENERATED_MAVEN_ARTIFACT", ds)).isEqualTo(2); + assertThat(SqlTestsUtils.countRows("select * from GENERATED_MAVEN_ARTIFACT", ds)) + .isEqualTo(2); List generatedArtifacts = dao.getGeneratedArtifacts("my-upstream-pipeline-1", 1); assertThat(generatedArtifacts).hasSize(2); @@ -326,10 +379,12 @@ public void record_two_generated_artifacts_on_the_same_build() throws Exception assertThat(generatedArtifact.getExtension()).isIn("war", "jar"); } - SqlTestsUtils.dump("select * from JENKINS_BUILD LEFT OUTER JOIN JENKINS_JOB ON JENKINS_BUILD.JOB_ID = JENKINS_JOB.ID", ds, System.out); + SqlTestsUtils.dump( + "select * from JENKINS_BUILD LEFT OUTER JOIN JENKINS_JOB ON JENKINS_BUILD.JOB_ID = JENKINS_JOB.ID", + ds, + System.out); SqlTestsUtils.dump("select * from MAVEN_ARTIFACT", ds, System.out); SqlTestsUtils.dump("select * from GENERATED_MAVEN_ARTIFACT", ds, System.out); - } @Test @@ -343,12 +398,15 @@ public void record_two_dependencies_on_the_same_build() throws Exception { assertThat(SqlTestsUtils.countRows("select * from JENKINS_BUILD", ds)).isEqualTo(1); assertThat(SqlTestsUtils.countRows("select * from MAVEN_ARTIFACT", ds)).isEqualTo(2); - assertThat(SqlTestsUtils.countRows("select * from MAVEN_DEPENDENCY", ds)).isEqualTo(2); + assertThat(SqlTestsUtils.countRows("select * from MAVEN_DEPENDENCY", ds)) + .isEqualTo(2); - SqlTestsUtils.dump("select * from JENKINS_BUILD LEFT OUTER JOIN JENKINS_JOB ON JENKINS_BUILD.JOB_ID = JENKINS_JOB.ID", ds, System.out); + SqlTestsUtils.dump( + "select * from JENKINS_BUILD LEFT OUTER JOIN JENKINS_JOB ON JENKINS_BUILD.JOB_ID = JENKINS_JOB.ID", + ds, + System.out); SqlTestsUtils.dump("select * from MAVEN_ARTIFACT", ds, System.out); SqlTestsUtils.dump("select * from MAVEN_DEPENDENCY", ds, System.out); - } @Test @@ -359,7 +417,10 @@ public void record_two_dependencies_on_consecutive_builds_of_the_same_job() thro dao.recordDependency("my-pipeline", 2, "com.h2database", "h2", "1.4.196", "jar", "compile", false, null); dao.recordDependency("my-pipeline", 2, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null); - SqlTestsUtils.dump("select * from JENKINS_BUILD LEFT OUTER JOIN JENKINS_JOB ON JENKINS_BUILD.JOB_ID = JENKINS_JOB.ID", ds, System.out); + SqlTestsUtils.dump( + "select * from JENKINS_BUILD LEFT OUTER JOIN JENKINS_JOB ON JENKINS_BUILD.JOB_ID = JENKINS_JOB.ID", + ds, + System.out); SqlTestsUtils.dump("select * from MAVEN_ARTIFACT", ds, System.out); SqlTestsUtils.dump("select * from MAVEN_DEPENDENCY", ds, System.out); @@ -368,19 +429,24 @@ public void record_two_dependencies_on_consecutive_builds_of_the_same_job() thro assertThat(SqlTestsUtils.countRows("select * from JENKINS_BUILD", ds)).isEqualTo(2); assertThat(SqlTestsUtils.countRows("select * from MAVEN_ARTIFACT", ds)).isEqualTo(2); - assertThat(SqlTestsUtils.countRows("select * from MAVEN_DEPENDENCY", ds)).isEqualTo(4); - + assertThat(SqlTestsUtils.countRows("select * from MAVEN_DEPENDENCY", ds)) + .isEqualTo(4); } @Test public void record_two_dependencies_on_two_jobs() throws Exception { dao.recordDependency("my-pipeline-1", 1, "com.h2database", "h2", "1.4.196", "jar", "compile", false, null); - dao.recordDependency("my-pipeline-1", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null); + dao.recordDependency( + "my-pipeline-1", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null); dao.recordDependency("my-pipeline-2", 2, "com.h2database", "h2", "1.4.196", "jar", "compile", false, null); - dao.recordDependency("my-pipeline-2", 2, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null); + dao.recordDependency( + "my-pipeline-2", 2, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null); - SqlTestsUtils.dump("select * from JENKINS_BUILD LEFT OUTER JOIN JENKINS_JOB ON JENKINS_BUILD.JOB_ID = JENKINS_JOB.ID", ds, System.out); + SqlTestsUtils.dump( + "select * from JENKINS_BUILD LEFT OUTER JOIN JENKINS_JOB ON JENKINS_BUILD.JOB_ID = JENKINS_JOB.ID", + ds, + System.out); SqlTestsUtils.dump("select * from MAVEN_ARTIFACT", ds, System.out); SqlTestsUtils.dump("select * from MAVEN_DEPENDENCY", ds, System.out); @@ -389,8 +455,8 @@ public void record_two_dependencies_on_two_jobs() throws Exception { assertThat(SqlTestsUtils.countRows("select * from JENKINS_BUILD", ds)).isEqualTo(2); assertThat(SqlTestsUtils.countRows("select * from MAVEN_ARTIFACT", ds)).isEqualTo(2); - assertThat(SqlTestsUtils.countRows("select * from MAVEN_DEPENDENCY", ds)).isEqualTo(4); - + assertThat(SqlTestsUtils.countRows("select * from MAVEN_DEPENDENCY", ds)) + .isEqualTo(4); } @Deprecated @@ -398,31 +464,85 @@ public void record_two_dependencies_on_two_jobs() throws Exception { public void listDownstreamJobs_upstream_jar_triggers_downstream_pipelines() { dao.getOrCreateBuildPrimaryKey("my-upstream-pipeline-1", 1); - dao.recordGeneratedArtifact("my-upstream-pipeline-1", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "1.0-SNAPSHOT", null, false, "jar", null); - dao.recordGeneratedArtifact("my-upstream-pipeline-1", 1, "com.mycompany", "service", "1.0-SNAPSHOT", "war", "1.0-SNAPSHOT", null, false, "war", null); - dao.updateBuildOnCompletion("my-upstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5); + dao.recordGeneratedArtifact( + "my-upstream-pipeline-1", + 1, + "com.mycompany", + "core", + "1.0-SNAPSHOT", + "jar", + "1.0-SNAPSHOT", + null, + false, + "jar", + null); + dao.recordGeneratedArtifact( + "my-upstream-pipeline-1", + 1, + "com.mycompany", + "service", + "1.0-SNAPSHOT", + "war", + "1.0-SNAPSHOT", + null, + false, + "war", + null); + dao.updateBuildOnCompletion( + "my-upstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5); dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-1", 1); - dao.recordDependency("my-downstream-pipeline-1", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null); - dao.updateBuildOnCompletion("my-downstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5); + dao.recordDependency( + "my-downstream-pipeline-1", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null); + dao.updateBuildOnCompletion( + "my-downstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5); dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-2", 1); - dao.recordDependency("my-downstream-pipeline-2", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null); - dao.updateBuildOnCompletion("my-downstream-pipeline-2", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5); + dao.recordDependency( + "my-downstream-pipeline-2", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null); + dao.updateBuildOnCompletion( + "my-downstream-pipeline-2", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5); List downstreamPipelinesForBuild1 = dao.listDownstreamJobs("my-upstream-pipeline-1", 1); assertThat(downstreamPipelinesForBuild1).contains("my-downstream-pipeline-1", "my-downstream-pipeline-2"); dao.getOrCreateBuildPrimaryKey("my-upstream-pipeline-1", 2); - dao.recordGeneratedArtifact("my-upstream-pipeline-1", 2, "com.mycompany", "core", "1.1-SNAPSHOT", "jar", "1.1-SNAPSHOT", null, false, "jar", null); - dao.recordGeneratedArtifact("my-upstream-pipeline-1", 2, "com.mycompany", "service", "1.1-SNAPSHOT", "war", "1.1-SNAPSHOT", null, false, "war", null); - dao.updateBuildOnCompletion("my-upstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5); + dao.recordGeneratedArtifact( + "my-upstream-pipeline-1", + 2, + "com.mycompany", + "core", + "1.1-SNAPSHOT", + "jar", + "1.1-SNAPSHOT", + null, + false, + "jar", + null); + dao.recordGeneratedArtifact( + "my-upstream-pipeline-1", + 2, + "com.mycompany", + "service", + "1.1-SNAPSHOT", + "war", + "1.1-SNAPSHOT", + null, + false, + "war", + null); + dao.updateBuildOnCompletion( + "my-upstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5); - dao.recordDependency("my-downstream-pipeline-1", 2, "com.mycompany", "core", "1.1-SNAPSHOT", "jar", "compile", false, null); - dao.updateBuildOnCompletion("my-downstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5); + dao.recordDependency( + "my-downstream-pipeline-1", 2, "com.mycompany", "core", "1.1-SNAPSHOT", "jar", "compile", false, null); + dao.updateBuildOnCompletion( + "my-downstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5); - dao.recordDependency("my-downstream-pipeline-2", 2, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null); - dao.updateBuildOnCompletion("my-downstream-pipeline-2", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5); + dao.recordDependency( + "my-downstream-pipeline-2", 2, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null); + dao.updateBuildOnCompletion( + "my-downstream-pipeline-2", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5); List downstreamPipelinesForBuild2 = dao.listDownstreamJobs("my-upstream-pipeline-1", 2); assertThat(downstreamPipelinesForBuild2).contains("my-downstream-pipeline-1"); @@ -432,17 +552,44 @@ public void listDownstreamJobs_upstream_jar_triggers_downstream_pipelines() { public void listDownstreamJobsByArtifact_upstream_jar_triggers_downstream_pipelines() { dao.getOrCreateBuildPrimaryKey("my-upstream-pipeline-1", 1); - dao.recordGeneratedArtifact("my-upstream-pipeline-1", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "1.0-SNAPSHOT", null, false, "jar", null); - dao.recordGeneratedArtifact("my-upstream-pipeline-1", 1, "com.mycompany", "service", "1.0-SNAPSHOT", "war", "1.0-SNAPSHOT", null, false, "war", null); - dao.updateBuildOnCompletion("my-upstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5); + dao.recordGeneratedArtifact( + "my-upstream-pipeline-1", + 1, + "com.mycompany", + "core", + "1.0-SNAPSHOT", + "jar", + "1.0-SNAPSHOT", + null, + false, + "jar", + null); + dao.recordGeneratedArtifact( + "my-upstream-pipeline-1", + 1, + "com.mycompany", + "service", + "1.0-SNAPSHOT", + "war", + "1.0-SNAPSHOT", + null, + false, + "war", + null); + dao.updateBuildOnCompletion( + "my-upstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5); dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-1", 1); - dao.recordDependency("my-downstream-pipeline-1", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null); - dao.updateBuildOnCompletion("my-downstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5); + dao.recordDependency( + "my-downstream-pipeline-1", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null); + dao.updateBuildOnCompletion( + "my-downstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5); dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-2", 1); - dao.recordDependency("my-downstream-pipeline-2", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null); - dao.updateBuildOnCompletion("my-downstream-pipeline-2", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5); + dao.recordDependency( + "my-downstream-pipeline-2", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null); + dao.updateBuildOnCompletion( + "my-downstream-pipeline-2", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5); { MavenArtifact expectedMavenArtifact = new MavenArtifact(); @@ -453,7 +600,8 @@ public void listDownstreamJobsByArtifact_upstream_jar_triggers_downstream_pipeli expectedMavenArtifact.setType("jar"); expectedMavenArtifact.setExtension("jar"); - Map> downstreamJobsByArtifactForBuild1 = dao.listDownstreamJobsByArtifact("my-upstream-pipeline-1", 1); + Map> downstreamJobsByArtifactForBuild1 = + dao.listDownstreamJobsByArtifact("my-upstream-pipeline-1", 1); SortedSet actualJobs = downstreamJobsByArtifactForBuild1.get(expectedMavenArtifact); assertThat(actualJobs).contains("my-downstream-pipeline-1", "my-downstream-pipeline-2"); @@ -462,15 +610,42 @@ public void listDownstreamJobsByArtifact_upstream_jar_triggers_downstream_pipeli } dao.getOrCreateBuildPrimaryKey("my-upstream-pipeline-1", 2); - dao.recordGeneratedArtifact("my-upstream-pipeline-1", 2, "com.mycompany", "core", "1.1-SNAPSHOT", "jar", "1.1-SNAPSHOT", null, false, "jar", null); - dao.recordGeneratedArtifact("my-upstream-pipeline-1", 2, "com.mycompany", "service", "1.1-SNAPSHOT", "war", "1.1-SNAPSHOT", null, false, "war", null); - dao.updateBuildOnCompletion("my-upstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5); + dao.recordGeneratedArtifact( + "my-upstream-pipeline-1", + 2, + "com.mycompany", + "core", + "1.1-SNAPSHOT", + "jar", + "1.1-SNAPSHOT", + null, + false, + "jar", + null); + dao.recordGeneratedArtifact( + "my-upstream-pipeline-1", + 2, + "com.mycompany", + "service", + "1.1-SNAPSHOT", + "war", + "1.1-SNAPSHOT", + null, + false, + "war", + null); + dao.updateBuildOnCompletion( + "my-upstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5); - dao.recordDependency("my-downstream-pipeline-1", 2, "com.mycompany", "core", "1.1-SNAPSHOT", "jar", "compile", false, null); - dao.updateBuildOnCompletion("my-downstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5); + dao.recordDependency( + "my-downstream-pipeline-1", 2, "com.mycompany", "core", "1.1-SNAPSHOT", "jar", "compile", false, null); + dao.updateBuildOnCompletion( + "my-downstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5); - dao.recordDependency("my-downstream-pipeline-2", 2, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null); - dao.updateBuildOnCompletion("my-downstream-pipeline-2", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5); + dao.recordDependency( + "my-downstream-pipeline-2", 2, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null); + dao.updateBuildOnCompletion( + "my-downstream-pipeline-2", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5); { MavenArtifact expectedMavenArtifact = new MavenArtifact(); @@ -481,7 +656,8 @@ public void listDownstreamJobsByArtifact_upstream_jar_triggers_downstream_pipeli expectedMavenArtifact.setType("jar"); expectedMavenArtifact.setExtension("jar"); - Map> downstreamJobsByArtifactForBuild1 = dao.listDownstreamJobsByArtifact("my-upstream-pipeline-1", 2); + Map> downstreamJobsByArtifactForBuild1 = + dao.listDownstreamJobsByArtifact("my-upstream-pipeline-1", 2); SortedSet actualJobs = downstreamJobsByArtifactForBuild1.get(expectedMavenArtifact); assertThat(actualJobs).contains("my-downstream-pipeline-1"); @@ -494,29 +670,91 @@ public void listDownstreamJobsByArtifact_upstream_jar_triggers_downstream_pipeli public void listDownstreamJobsByArtifact_upstream_jar_with_classifier_triggers_downstream_pipelines() { dao.getOrCreateBuildPrimaryKey("my-upstream-pipeline-1", 1); - dao.recordGeneratedArtifact("my-upstream-pipeline-1", 1, "com.mycompany", "upstream-1", "1.0-SNAPSHOT", "aType", "1.0-SNAPSHOT", null, false, "jar", + dao.recordGeneratedArtifact( + "my-upstream-pipeline-1", + 1, + "com.mycompany", + "upstream-1", + "1.0-SNAPSHOT", + "aType", + "1.0-SNAPSHOT", + null, + false, + "jar", null); - dao.recordGeneratedArtifact("my-upstream-pipeline-1", 1, "com.mycompany", "upstream-1", "1.0-SNAPSHOT", "anotherType", "1.0-SNAPSHOT", null, false, - "jar", "aClassifier"); - dao.updateBuildOnCompletion("my-upstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5); + dao.recordGeneratedArtifact( + "my-upstream-pipeline-1", + 1, + "com.mycompany", + "upstream-1", + "1.0-SNAPSHOT", + "anotherType", + "1.0-SNAPSHOT", + null, + false, + "jar", + "aClassifier"); + dao.updateBuildOnCompletion( + "my-upstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5); dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-1", 1); - dao.recordDependency("my-downstream-pipeline-1", 1, "com.mycompany", "upstream-1", "1.0-SNAPSHOT", "aType", "compile", false, null); - dao.updateBuildOnCompletion("my-downstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5); + dao.recordDependency( + "my-downstream-pipeline-1", + 1, + "com.mycompany", + "upstream-1", + "1.0-SNAPSHOT", + "aType", + "compile", + false, + null); + dao.updateBuildOnCompletion( + "my-downstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5); dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-2", 1); - dao.recordDependency("my-downstream-pipeline-2", 1, "com.mycompany", "upstream-1", "1.0-SNAPSHOT", "aType", "compile", false, "whatever"); - dao.updateBuildOnCompletion("my-downstream-pipeline-2", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5); + dao.recordDependency( + "my-downstream-pipeline-2", + 1, + "com.mycompany", + "upstream-1", + "1.0-SNAPSHOT", + "aType", + "compile", + false, + "whatever"); + dao.updateBuildOnCompletion( + "my-downstream-pipeline-2", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5); dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-3", 1); - dao.recordDependency("my-downstream-pipeline-3", 1, "com.mycompany", "upstream-1", "1.0-SNAPSHOT", "whatever", "compile", false, null); - dao.updateBuildOnCompletion("my-downstream-pipeline-3", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5); + dao.recordDependency( + "my-downstream-pipeline-3", + 1, + "com.mycompany", + "upstream-1", + "1.0-SNAPSHOT", + "whatever", + "compile", + false, + null); + dao.updateBuildOnCompletion( + "my-downstream-pipeline-3", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5); dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-4", 1); - dao.recordDependency("my-downstream-pipeline-4", 1, "com.mycompany", "upstream-1", "1.0-SNAPSHOT", "anotherType", "compile", false, "aClassifier"); - dao.updateBuildOnCompletion("my-downstream-pipeline-4", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5); + dao.recordDependency( + "my-downstream-pipeline-4", + 1, + "com.mycompany", + "upstream-1", + "1.0-SNAPSHOT", + "anotherType", + "compile", + false, + "aClassifier"); + dao.updateBuildOnCompletion( + "my-downstream-pipeline-4", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5); - Map> downstreamJobsByArtifactForBuild1 = dao.listDownstreamJobsByArtifact("my-upstream-pipeline-1", 1); + Map> downstreamJobsByArtifactForBuild1 = + dao.listDownstreamJobsByArtifact("my-upstream-pipeline-1", 1); System.out.println(downstreamJobsByArtifactForBuild1); assertThat(downstreamJobsByArtifactForBuild1).hasSize(2); @@ -543,18 +781,68 @@ public void listDownstreamJobsByArtifact_upstream_jar_with_classifier_triggers_d public void listDownstreamJobsByArtifact_doesnt_return_artifacts_with_no_pipelines() { dao.getOrCreateBuildPrimaryKey("my-upstream-pipeline-1", 1); - dao.recordGeneratedArtifact("my-upstream-pipeline-1", 1, "com.mycompany", "upstream-shared", "1.0-SNAPSHOT", "jar", "1.0-SNAPSHOT", null, false, "jar", + dao.recordGeneratedArtifact( + "my-upstream-pipeline-1", + 1, + "com.mycompany", + "upstream-shared", + "1.0-SNAPSHOT", + "jar", + "1.0-SNAPSHOT", + null, + false, + "jar", + null); + dao.recordGeneratedArtifact( + "my-upstream-pipeline-1", + 1, + "com.mycompany", + "upstream-1", + "1.0-SNAPSHOT", + "jar", + "1.0-SNAPSHOT", + null, + false, + "jar", null); - dao.recordGeneratedArtifact("my-upstream-pipeline-1", 1, "com.mycompany", "upstream-1", "1.0-SNAPSHOT", "jar", "1.0-SNAPSHOT", null, false, "jar", + dao.recordGeneratedArtifact( + "my-upstream-pipeline-1", + 1, + "com.mycompany", + "upstream-2", + "1.0-SNAPSHOT", + "jar", + "1.0-SNAPSHOT", + null, + false, + "jar", null); - dao.recordGeneratedArtifact("my-upstream-pipeline-1", 1, "com.mycompany", "upstream-2", "1.0-SNAPSHOT", "jar", "1.0-SNAPSHOT", null, false, "jar", + dao.recordDependency( + "my-upstream-pipeline-1", + 1, + "com.mycompany", + "upstream-shared", + "1.0-SNAPSHOT", + "jar", + "compile", + false, null); - dao.recordDependency("my-upstream-pipeline-1", 1, "com.mycompany", "upstream-shared", "1.0-SNAPSHOT", "jar", "compile", false, null); - dao.updateBuildOnCompletion("my-upstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5); + dao.updateBuildOnCompletion( + "my-upstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5); dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-1", 1); - dao.recordDependency("my-downstream-pipeline-1", 1, "com.mycompany", "upstream-1", "1.0-SNAPSHOT", "jar", "compile", false, null); - dao.updateBuildOnCompletion("my-downstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5); + dao.recordDependency( + "my-downstream-pipeline-1", + 1, + "com.mycompany", + "upstream-1", + "1.0-SNAPSHOT", + "jar", + "compile", + false, + null); + dao.updateBuildOnCompletion( + "my-downstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5); { MavenArtifact expectedMavenArtifact = new MavenArtifact(); @@ -565,7 +853,8 @@ public void listDownstreamJobsByArtifact_doesnt_return_artifacts_with_no_pipelin expectedMavenArtifact.setType("jar"); expectedMavenArtifact.setExtension("jar"); - Map> downstreamJobsByArtifactForBuild1 = dao.listDownstreamJobsByArtifact("my-upstream-pipeline-1", 1); + Map> downstreamJobsByArtifactForBuild1 = + dao.listDownstreamJobsByArtifact("my-upstream-pipeline-1", 1); System.out.println(downstreamJobsByArtifactForBuild1); SortedSet actualJobs = downstreamJobsByArtifactForBuild1.get(expectedMavenArtifact); @@ -578,14 +867,35 @@ public void listDownstreamJobsByArtifact_doesnt_return_artifacts_with_no_pipelin @Test public void listDownstreamPipelinesBasedOnMavenDependencies_noBaseVersion() { dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-1", 1); - dao.recordDependency("my-downstream-pipeline-1", 1, "com.mycompany", "dependency-1", "1.0-SNAPSHOT", "jar", "compile", false, null); - dao.updateBuildOnCompletion("my-downstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5); + dao.recordDependency( + "my-downstream-pipeline-1", + 1, + "com.mycompany", + "dependency-1", + "1.0-SNAPSHOT", + "jar", + "compile", + false, + null); + dao.updateBuildOnCompletion( + "my-downstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5); dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-2", 1); - dao.recordDependency("my-downstream-pipeline-2", 1, "com.mycompany", "dependency-1", "1.0-SNAPSHOT", "jar", "compile", false, null); - dao.updateBuildOnCompletion("my-downstream-pipeline-2", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 2222, 22); + dao.recordDependency( + "my-downstream-pipeline-2", + 1, + "com.mycompany", + "dependency-1", + "1.0-SNAPSHOT", + "jar", + "compile", + false, + null); + dao.updateBuildOnCompletion( + "my-downstream-pipeline-2", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 2222, 22); - SortedSet downstreamJobs = dao.listDownstreamJobs("com.mycompany", "dependency-1", "1.0-SNAPSHOT", null, "jar"); + SortedSet downstreamJobs = + dao.listDownstreamJobs("com.mycompany", "dependency-1", "1.0-SNAPSHOT", null, "jar"); System.out.println(downstreamJobs); assertThat(downstreamJobs).contains("my-downstream-pipeline-1", "my-downstream-pipeline-2"); } @@ -593,14 +903,35 @@ public void listDownstreamPipelinesBasedOnMavenDependencies_noBaseVersion() { @Test public void listDownstreamPipelinesBasedOnMavenDependencies_withBaseVersion() { dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-1", 1); - dao.recordDependency("my-downstream-pipeline-1", 1, "com.mycompany", "dependency-1", "1.0-SNAPSHOT", "jar", "compile", false, null); - dao.updateBuildOnCompletion("my-downstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5); + dao.recordDependency( + "my-downstream-pipeline-1", + 1, + "com.mycompany", + "dependency-1", + "1.0-SNAPSHOT", + "jar", + "compile", + false, + null); + dao.updateBuildOnCompletion( + "my-downstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5); dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-2", 1); - dao.recordDependency("my-downstream-pipeline-2", 1, "com.mycompany", "dependency-1", "1.0-SNAPSHOT", "jar", "compile", false, null); - dao.updateBuildOnCompletion("my-downstream-pipeline-2", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 2222, 22); + dao.recordDependency( + "my-downstream-pipeline-2", + 1, + "com.mycompany", + "dependency-1", + "1.0-SNAPSHOT", + "jar", + "compile", + false, + null); + dao.updateBuildOnCompletion( + "my-downstream-pipeline-2", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 2222, 22); - SortedSet downstreamJobs = dao.listDownstreamJobs("com.mycompany", "dependency-1", "1.0-20180318.225603-3", "1.0-SNAPSHOT", "jar"); + SortedSet downstreamJobs = + dao.listDownstreamJobs("com.mycompany", "dependency-1", "1.0-20180318.225603-3", "1.0-SNAPSHOT", "jar"); System.out.println(downstreamJobs); assertThat(downstreamJobs).contains("my-downstream-pipeline-1", "my-downstream-pipeline-2"); } @@ -608,24 +939,48 @@ public void listDownstreamPipelinesBasedOnMavenDependencies_withBaseVersion() { @Test public void listDownstreamPipelinesBasedOnMavenDependencies_withClassifier() { dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-1", 1); - dao.recordDependency("my-downstream-pipeline-1", 1, "com.mycompany", "dependency-1", "1.0-SNAPSHOT", "aType", "compile", false, null); - dao.updateBuildOnCompletion("my-downstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5); + dao.recordDependency( + "my-downstream-pipeline-1", + 1, + "com.mycompany", + "dependency-1", + "1.0-SNAPSHOT", + "aType", + "compile", + false, + null); + dao.updateBuildOnCompletion( + "my-downstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5); dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-2", 1); - dao.recordDependency("my-downstream-pipeline-2", 1, "com.mycompany", "dependency-1", "1.0-SNAPSHOT", "anotherType", "compile", false, "aClassifier"); - dao.updateBuildOnCompletion("my-downstream-pipeline-2", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 2222, 22); + dao.recordDependency( + "my-downstream-pipeline-2", + 1, + "com.mycompany", + "dependency-1", + "1.0-SNAPSHOT", + "anotherType", + "compile", + false, + "aClassifier"); + dao.updateBuildOnCompletion( + "my-downstream-pipeline-2", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 2222, 22); - SortedSet downstreamJobs = dao.listDownstreamJobs("com.mycompany", "dependency-1", "1.0-20180318.225603-3", "1.0-SNAPSHOT", "aType"); + SortedSet downstreamJobs = dao.listDownstreamJobs( + "com.mycompany", "dependency-1", "1.0-20180318.225603-3", "1.0-SNAPSHOT", "aType"); System.out.println(downstreamJobs); assertThat(downstreamJobs).contains("my-downstream-pipeline-1"); - downstreamJobs = dao.listDownstreamJobs("com.mycompany", "dependency-1", "1.0-20180318.225603-3", "1.0-SNAPSHOT", "aType", "whatever"); + downstreamJobs = dao.listDownstreamJobs( + "com.mycompany", "dependency-1", "1.0-20180318.225603-3", "1.0-SNAPSHOT", "aType", "whatever"); assertThat(downstreamJobs).isEmpty(); - downstreamJobs = dao.listDownstreamJobs("com.mycompany", "dependency-1", "1.0-20180318.225603-3", "1.0-SNAPSHOT", "whatever"); + downstreamJobs = dao.listDownstreamJobs( + "com.mycompany", "dependency-1", "1.0-20180318.225603-3", "1.0-SNAPSHOT", "whatever"); assertThat(downstreamJobs).isEmpty(); - downstreamJobs = dao.listDownstreamJobs("com.mycompany", "dependency-1", "1.0-20180318.225603-3", "1.0-SNAPSHOT", "whatever", "aClassifier"); + downstreamJobs = dao.listDownstreamJobs( + "com.mycompany", "dependency-1", "1.0-20180318.225603-3", "1.0-SNAPSHOT", "whatever", "aClassifier"); assertThat(downstreamJobs).isEmpty(); } @@ -634,18 +989,38 @@ public void listDownstreamPipelinesBasedOnMavenDependencies_withClassifier() { public void listDownstreamJobs_upstream_pom_triggers_downstream_pipelines() { dao.getOrCreateBuildPrimaryKey("my-upstream-pom-pipeline-1", 1); - dao.recordGeneratedArtifact("my-upstream-pom-pipeline-1", 1, "com.mycompany.pom", "parent-pom", "1.0-SNAPSHOT", "pom", "1.0-SNAPSHOT", null, false, - "pom", null); - dao.updateBuildOnCompletion("my-upstream-pom-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5); + dao.recordGeneratedArtifact( + "my-upstream-pom-pipeline-1", + 1, + "com.mycompany.pom", + "parent-pom", + "1.0-SNAPSHOT", + "pom", + "1.0-SNAPSHOT", + null, + false, + "pom", + null); + dao.updateBuildOnCompletion( + "my-upstream-pom-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5); dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-1", 2); - dao.recordParentProject("my-downstream-pipeline-1", 2, "com.mycompany.pom", "parent-pom", "1.0-SNAPSHOT", false); - dao.updateBuildOnCompletion("my-downstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 555, 5); - - SqlTestsUtils.dump("select * from JENKINS_BUILD LEFT OUTER JOIN JENKINS_JOB ON JENKINS_BUILD.JOB_ID = JENKINS_JOB.ID", ds, System.out); - SqlTestsUtils.dump("select * from MAVEN_ARTIFACT INNER JOIN GENERATED_MAVEN_ARTIFACT ON MAVEN_ARTIFACT.ID = GENERATED_MAVEN_ARTIFACT.ARTIFACT_ID", ds, + dao.recordParentProject( + "my-downstream-pipeline-1", 2, "com.mycompany.pom", "parent-pom", "1.0-SNAPSHOT", false); + dao.updateBuildOnCompletion( + "my-downstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 555, 5); + + SqlTestsUtils.dump( + "select * from JENKINS_BUILD LEFT OUTER JOIN JENKINS_JOB ON JENKINS_BUILD.JOB_ID = JENKINS_JOB.ID", + ds, + System.out); + SqlTestsUtils.dump( + "select * from MAVEN_ARTIFACT INNER JOIN GENERATED_MAVEN_ARTIFACT ON MAVEN_ARTIFACT.ID = GENERATED_MAVEN_ARTIFACT.ARTIFACT_ID", + ds, System.out); - SqlTestsUtils.dump("select * from MAVEN_ARTIFACT INNER JOIN MAVEN_PARENT_PROJECT ON MAVEN_ARTIFACT.ID = MAVEN_PARENT_PROJECT.ARTIFACT_ID", ds, + SqlTestsUtils.dump( + "select * from MAVEN_ARTIFACT INNER JOIN MAVEN_PARENT_PROJECT ON MAVEN_ARTIFACT.ID = MAVEN_PARENT_PROJECT.ARTIFACT_ID", + ds, System.out); List downstreamJobs = dao.listDownstreamJobs("my-upstream-pom-pipeline-1", 1); @@ -657,18 +1032,38 @@ public void listDownstreamJobs_upstream_pom_triggers_downstream_pipelines() { public void listDownstreamJobsbyArtifact_upstream_pom_triggers_downstream_pipelines() { dao.getOrCreateBuildPrimaryKey("my-upstream-pom-pipeline-1", 1); - dao.recordGeneratedArtifact("my-upstream-pom-pipeline-1", 1, "com.mycompany.pom", "parent-pom", "1.0-SNAPSHOT", "pom", "1.0-SNAPSHOT", null, false, - "pom", null); - dao.updateBuildOnCompletion("my-upstream-pom-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5); + dao.recordGeneratedArtifact( + "my-upstream-pom-pipeline-1", + 1, + "com.mycompany.pom", + "parent-pom", + "1.0-SNAPSHOT", + "pom", + "1.0-SNAPSHOT", + null, + false, + "pom", + null); + dao.updateBuildOnCompletion( + "my-upstream-pom-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5); dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-1", 2); - dao.recordParentProject("my-downstream-pipeline-1", 2, "com.mycompany.pom", "parent-pom", "1.0-SNAPSHOT", false); - dao.updateBuildOnCompletion("my-downstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 555, 5); - - SqlTestsUtils.dump("select * from JENKINS_BUILD LEFT OUTER JOIN JENKINS_JOB ON JENKINS_BUILD.JOB_ID = JENKINS_JOB.ID", ds, System.out); - SqlTestsUtils.dump("select * from MAVEN_ARTIFACT INNER JOIN GENERATED_MAVEN_ARTIFACT ON MAVEN_ARTIFACT.ID = GENERATED_MAVEN_ARTIFACT.ARTIFACT_ID", ds, + dao.recordParentProject( + "my-downstream-pipeline-1", 2, "com.mycompany.pom", "parent-pom", "1.0-SNAPSHOT", false); + dao.updateBuildOnCompletion( + "my-downstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 555, 5); + + SqlTestsUtils.dump( + "select * from JENKINS_BUILD LEFT OUTER JOIN JENKINS_JOB ON JENKINS_BUILD.JOB_ID = JENKINS_JOB.ID", + ds, System.out); - SqlTestsUtils.dump("select * from MAVEN_ARTIFACT INNER JOIN MAVEN_PARENT_PROJECT ON MAVEN_ARTIFACT.ID = MAVEN_PARENT_PROJECT.ARTIFACT_ID", ds, + SqlTestsUtils.dump( + "select * from MAVEN_ARTIFACT INNER JOIN GENERATED_MAVEN_ARTIFACT ON MAVEN_ARTIFACT.ID = GENERATED_MAVEN_ARTIFACT.ARTIFACT_ID", + ds, + System.out); + SqlTestsUtils.dump( + "select * from MAVEN_ARTIFACT INNER JOIN MAVEN_PARENT_PROJECT ON MAVEN_ARTIFACT.ID = MAVEN_PARENT_PROJECT.ARTIFACT_ID", + ds, System.out); { @@ -680,7 +1075,8 @@ public void listDownstreamJobsbyArtifact_upstream_pom_triggers_downstream_pipeli expectedMavenArtifact.setType("pom"); expectedMavenArtifact.setExtension("pom"); - Map> downstreamJobsByArtifactForBuild1 = dao.listDownstreamJobsByArtifact("my-upstream-pom-pipeline-1", 1); + Map> downstreamJobsByArtifactForBuild1 = + dao.listDownstreamJobsByArtifact("my-upstream-pom-pipeline-1", 1); SortedSet actualJobs = downstreamJobsByArtifactForBuild1.get(expectedMavenArtifact); assertThat(actualJobs).contains("my-downstream-pipeline-1"); @@ -695,33 +1091,87 @@ public void listDownstreamJobsbyArtifact_upstream_pom_triggers_downstream_pipeli public void list_downstream_jobs_with_ignoreUpstreamTriggers_activated() { dao.getOrCreateBuildPrimaryKey("my-upstream-pipeline-1", 1); - dao.recordGeneratedArtifact("my-upstream-pipeline-1", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "1.0-SNAPSHOT", null, false, "jar", null); - dao.recordGeneratedArtifact("my-upstream-pipeline-1", 1, "com.mycompany", "service", "1.0-SNAPSHOT", "war", "1.0-SNAPSHOT", null, false, "war", null); - dao.updateBuildOnCompletion("my-upstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 111); + dao.recordGeneratedArtifact( + "my-upstream-pipeline-1", + 1, + "com.mycompany", + "core", + "1.0-SNAPSHOT", + "jar", + "1.0-SNAPSHOT", + null, + false, + "jar", + null); + dao.recordGeneratedArtifact( + "my-upstream-pipeline-1", + 1, + "com.mycompany", + "service", + "1.0-SNAPSHOT", + "war", + "1.0-SNAPSHOT", + null, + false, + "war", + null); + dao.updateBuildOnCompletion( + "my-upstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 111); dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-1", 1); - dao.recordDependency("my-downstream-pipeline-1", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", true, null); - dao.updateBuildOnCompletion("my-downstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 111, 11); + dao.recordDependency( + "my-downstream-pipeline-1", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", true, null); + dao.updateBuildOnCompletion( + "my-downstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 111, 11); dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-2", 1); - dao.recordDependency("my-downstream-pipeline-2", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null); - dao.updateBuildOnCompletion("my-downstream-pipeline-2", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 111, 11); + dao.recordDependency( + "my-downstream-pipeline-2", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null); + dao.updateBuildOnCompletion( + "my-downstream-pipeline-2", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 111, 11); List downstreamPipelinesForBuild1 = dao.listDownstreamJobs("my-upstream-pipeline-1", 1); assertThat(downstreamPipelinesForBuild1).contains("my-downstream-pipeline-2"); dao.getOrCreateBuildPrimaryKey("my-upstream-pipeline-1", 2); - dao.recordGeneratedArtifact("my-upstream-pipeline-1", 2, "com.mycompany", "core", "1.1-SNAPSHOT", "jar", "1.1-SNAPSHOT", null, false, "jar", null); - dao.recordGeneratedArtifact("my-upstream-pipeline-1", 2, "com.mycompany", "service", "1.1-SNAPSHOT", "war", "1.1-SNAPSHOT", null, false, "war", null); - dao.updateBuildOnCompletion("my-upstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 11, 5); + dao.recordGeneratedArtifact( + "my-upstream-pipeline-1", + 2, + "com.mycompany", + "core", + "1.1-SNAPSHOT", + "jar", + "1.1-SNAPSHOT", + null, + false, + "jar", + null); + dao.recordGeneratedArtifact( + "my-upstream-pipeline-1", + 2, + "com.mycompany", + "service", + "1.1-SNAPSHOT", + "war", + "1.1-SNAPSHOT", + null, + false, + "war", + null); + dao.updateBuildOnCompletion( + "my-upstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 11, 5); dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-1", 2); - dao.recordDependency("my-downstream-pipeline-1", 2, "com.mycompany", "core", "1.1-SNAPSHOT", "jar", "compile", false, null); - dao.updateBuildOnCompletion("my-downstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 11, 5); + dao.recordDependency( + "my-downstream-pipeline-1", 2, "com.mycompany", "core", "1.1-SNAPSHOT", "jar", "compile", false, null); + dao.updateBuildOnCompletion( + "my-downstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 11, 5); dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-2", 2); - dao.recordDependency("my-downstream-pipeline-2", 2, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null); - dao.updateBuildOnCompletion("my-downstream-pipeline-2", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 11, 5); + dao.recordDependency( + "my-downstream-pipeline-2", 2, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null); + dao.updateBuildOnCompletion( + "my-downstream-pipeline-2", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 11, 5); List downstreamPipelinesForBuild2 = dao.listDownstreamJobs("my-upstream-pipeline-1", 2); assertThat(downstreamPipelinesForBuild2).contains("my-downstream-pipeline-1"); @@ -731,17 +1181,44 @@ public void list_downstream_jobs_with_ignoreUpstreamTriggers_activated() { public void list_downstream_jobs_by_artifact_with_ignoreUpstreamTriggers_activated() { dao.getOrCreateBuildPrimaryKey("my-upstream-pipeline-1", 1); - dao.recordGeneratedArtifact("my-upstream-pipeline-1", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "1.0-SNAPSHOT", null, false, "jar", null); - dao.recordGeneratedArtifact("my-upstream-pipeline-1", 1, "com.mycompany", "service", "1.0-SNAPSHOT", "war", "1.0-SNAPSHOT", null, false, "war", null); - dao.updateBuildOnCompletion("my-upstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 111); + dao.recordGeneratedArtifact( + "my-upstream-pipeline-1", + 1, + "com.mycompany", + "core", + "1.0-SNAPSHOT", + "jar", + "1.0-SNAPSHOT", + null, + false, + "jar", + null); + dao.recordGeneratedArtifact( + "my-upstream-pipeline-1", + 1, + "com.mycompany", + "service", + "1.0-SNAPSHOT", + "war", + "1.0-SNAPSHOT", + null, + false, + "war", + null); + dao.updateBuildOnCompletion( + "my-upstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 111); dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-1", 1); - dao.recordDependency("my-downstream-pipeline-1", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", true, null); - dao.updateBuildOnCompletion("my-downstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 111, 11); + dao.recordDependency( + "my-downstream-pipeline-1", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", true, null); + dao.updateBuildOnCompletion( + "my-downstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 111, 11); dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-2", 1); - dao.recordDependency("my-downstream-pipeline-2", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null); - dao.updateBuildOnCompletion("my-downstream-pipeline-2", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 111, 11); + dao.recordDependency( + "my-downstream-pipeline-2", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null); + dao.updateBuildOnCompletion( + "my-downstream-pipeline-2", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 111, 11); { MavenArtifact expectedMavenArtifact = new MavenArtifact(); @@ -752,7 +1229,8 @@ public void list_downstream_jobs_by_artifact_with_ignoreUpstreamTriggers_activat expectedMavenArtifact.setType("jar"); expectedMavenArtifact.setExtension("jar"); - Map> downstreamJobsByArtifactForBuild1 = dao.listDownstreamJobsByArtifact("my-upstream-pipeline-1", 1); + Map> downstreamJobsByArtifactForBuild1 = + dao.listDownstreamJobsByArtifact("my-upstream-pipeline-1", 1); SortedSet actualJobs = downstreamJobsByArtifactForBuild1.get(expectedMavenArtifact); assertThat(actualJobs).contains("my-downstream-pipeline-2"); @@ -762,17 +1240,44 @@ public void list_downstream_jobs_by_artifact_with_ignoreUpstreamTriggers_activat } dao.getOrCreateBuildPrimaryKey("my-upstream-pipeline-1", 2); - dao.recordGeneratedArtifact("my-upstream-pipeline-1", 2, "com.mycompany", "core", "1.1-SNAPSHOT", "jar", "1.1-SNAPSHOT", null, false, "jar", null); - dao.recordGeneratedArtifact("my-upstream-pipeline-1", 2, "com.mycompany", "service", "1.1-SNAPSHOT", "war", "1.1-SNAPSHOT", null, false, "war", null); - dao.updateBuildOnCompletion("my-upstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 11, 5); + dao.recordGeneratedArtifact( + "my-upstream-pipeline-1", + 2, + "com.mycompany", + "core", + "1.1-SNAPSHOT", + "jar", + "1.1-SNAPSHOT", + null, + false, + "jar", + null); + dao.recordGeneratedArtifact( + "my-upstream-pipeline-1", + 2, + "com.mycompany", + "service", + "1.1-SNAPSHOT", + "war", + "1.1-SNAPSHOT", + null, + false, + "war", + null); + dao.updateBuildOnCompletion( + "my-upstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 11, 5); dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-1", 2); - dao.recordDependency("my-downstream-pipeline-1", 2, "com.mycompany", "core", "1.1-SNAPSHOT", "jar", "compile", false, null); - dao.updateBuildOnCompletion("my-downstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 11, 5); + dao.recordDependency( + "my-downstream-pipeline-1", 2, "com.mycompany", "core", "1.1-SNAPSHOT", "jar", "compile", false, null); + dao.updateBuildOnCompletion( + "my-downstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 11, 5); dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-2", 2); - dao.recordDependency("my-downstream-pipeline-2", 2, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null); - dao.updateBuildOnCompletion("my-downstream-pipeline-2", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 11, 5); + dao.recordDependency( + "my-downstream-pipeline-2", 2, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null); + dao.updateBuildOnCompletion( + "my-downstream-pipeline-2", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 11, 5); { MavenArtifact expectedMavenArtifact = new MavenArtifact(); @@ -783,7 +1288,8 @@ public void list_downstream_jobs_by_artifact_with_ignoreUpstreamTriggers_activat expectedMavenArtifact.setType("jar"); expectedMavenArtifact.setExtension("jar"); - Map> downstreamJobsByArtifactForBuild1 = dao.listDownstreamJobsByArtifact("my-upstream-pipeline-1", 2); + Map> downstreamJobsByArtifactForBuild1 = + dao.listDownstreamJobsByArtifact("my-upstream-pipeline-1", 2); SortedSet actualJobs = downstreamJobsByArtifactForBuild1.get(expectedMavenArtifact); assertThat(actualJobs).contains("my-downstream-pipeline-1"); @@ -798,32 +1304,91 @@ public void list_downstream_jobs_by_artifact_with_ignoreUpstreamTriggers_activat public void list_downstream_jobs_with_skippedDownstreamTriggersActivated() { dao.getOrCreateBuildPrimaryKey("my-upstream-pipeline-1", 1); - dao.recordGeneratedArtifact("my-upstream-pipeline-1", 1, "com.mycompany", "shared", "1.0-SNAPSHOT", "jar", "1.0-SNAPSHOT", null, true, "jar", null); - dao.updateBuildOnCompletion("my-upstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5); + dao.recordGeneratedArtifact( + "my-upstream-pipeline-1", + 1, + "com.mycompany", + "shared", + "1.0-SNAPSHOT", + "jar", + "1.0-SNAPSHOT", + null, + true, + "jar", + null); + dao.updateBuildOnCompletion( + "my-upstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5); dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-1", 1); - dao.recordDependency("my-downstream-pipeline-1", 1, "com.mycompany", "shared", "1.0-SNAPSHOT", "jar", "compile", false, null); - dao.updateBuildOnCompletion("my-downstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 555, 5); + dao.recordDependency( + "my-downstream-pipeline-1", + 1, + "com.mycompany", + "shared", + "1.0-SNAPSHOT", + "jar", + "compile", + false, + null); + dao.updateBuildOnCompletion( + "my-downstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 555, 5); dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-2", 1); - dao.recordDependency("my-downstream-pipeline-2", 1, "com.mycompany", "shared", "1.0-SNAPSHOT", "jar", "compile", false, null); - dao.updateBuildOnCompletion("my-downstream-pipeline-2", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 123, 5); + dao.recordDependency( + "my-downstream-pipeline-2", + 1, + "com.mycompany", + "shared", + "1.0-SNAPSHOT", + "jar", + "compile", + false, + null); + dao.updateBuildOnCompletion( + "my-downstream-pipeline-2", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 123, 5); List downstreamPipelinesForBuild1 = dao.listDownstreamJobs("my-upstream-pipeline-1", 1); assertThat(downstreamPipelinesForBuild1).isEmpty(); dao.getOrCreateBuildPrimaryKey("my-upstream-pipeline-1", 2); - dao.recordGeneratedArtifact("my-upstream-pipeline-1", 2, "com.mycompany", "core", "1.1-SNAPSHOT", "jar", "1.1-SNAPSHOT", null, false, "jar", null); - dao.recordGeneratedArtifact("my-upstream-pipeline-1", 2, "com.mycompany", "service", "1.1-SNAPSHOT", "war", "1.1-SNAPSHOT", null, false, "war", null); - dao.updateBuildOnCompletion("my-upstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 11, 5); + dao.recordGeneratedArtifact( + "my-upstream-pipeline-1", + 2, + "com.mycompany", + "core", + "1.1-SNAPSHOT", + "jar", + "1.1-SNAPSHOT", + null, + false, + "jar", + null); + dao.recordGeneratedArtifact( + "my-upstream-pipeline-1", + 2, + "com.mycompany", + "service", + "1.1-SNAPSHOT", + "war", + "1.1-SNAPSHOT", + null, + false, + "war", + null); + dao.updateBuildOnCompletion( + "my-upstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 11, 5); dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-1", 2); - dao.recordDependency("my-downstream-pipeline-1", 2, "com.mycompany", "core", "1.1-SNAPSHOT", "jar", "compile", false, null); - dao.updateBuildOnCompletion("my-downstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 9, 5); + dao.recordDependency( + "my-downstream-pipeline-1", 2, "com.mycompany", "core", "1.1-SNAPSHOT", "jar", "compile", false, null); + dao.updateBuildOnCompletion( + "my-downstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 9, 5); dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-2", 2); - dao.recordDependency("my-downstream-pipeline-2", 2, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null); - dao.updateBuildOnCompletion("my-downstream-pipeline-2", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 9, 5); + dao.recordDependency( + "my-downstream-pipeline-2", 2, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null); + dao.updateBuildOnCompletion( + "my-downstream-pipeline-2", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 9, 5); List downstreamPipelinesForBuild2 = dao.listDownstreamJobs("my-upstream-pipeline-1", 2); assertThat(downstreamPipelinesForBuild2).contains("my-downstream-pipeline-1"); @@ -833,16 +1398,48 @@ public void list_downstream_jobs_with_skippedDownstreamTriggersActivated() { public void list_downstream_jobs_by_artifact_with_skippedDownstreamTriggersActivated() { dao.getOrCreateBuildPrimaryKey("my-upstream-pipeline-1", 1); - dao.recordGeneratedArtifact("my-upstream-pipeline-1", 1, "com.mycompany", "shared", "1.0-SNAPSHOT", "jar", "1.0-SNAPSHOT", null, true, "jar", null); - dao.updateBuildOnCompletion("my-upstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5); + dao.recordGeneratedArtifact( + "my-upstream-pipeline-1", + 1, + "com.mycompany", + "shared", + "1.0-SNAPSHOT", + "jar", + "1.0-SNAPSHOT", + null, + true, + "jar", + null); + dao.updateBuildOnCompletion( + "my-upstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5); dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-1", 1); - dao.recordDependency("my-downstream-pipeline-1", 1, "com.mycompany", "shared", "1.0-SNAPSHOT", "jar", "compile", false, null); - dao.updateBuildOnCompletion("my-downstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 555, 5); + dao.recordDependency( + "my-downstream-pipeline-1", + 1, + "com.mycompany", + "shared", + "1.0-SNAPSHOT", + "jar", + "compile", + false, + null); + dao.updateBuildOnCompletion( + "my-downstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 555, 5); dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-2", 1); - dao.recordDependency("my-downstream-pipeline-2", 1, "com.mycompany", "shared", "1.0-SNAPSHOT", "jar", "compile", false, null); - dao.updateBuildOnCompletion("my-downstream-pipeline-2", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 123, 5); + dao.recordDependency( + "my-downstream-pipeline-2", + 1, + "com.mycompany", + "shared", + "1.0-SNAPSHOT", + "jar", + "compile", + false, + null); + dao.updateBuildOnCompletion( + "my-downstream-pipeline-2", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 123, 5); { MavenArtifact expectedMavenArtifact = new MavenArtifact(); @@ -851,22 +1448,50 @@ public void list_downstream_jobs_by_artifact_with_skippedDownstreamTriggersActiv expectedMavenArtifact.setVersion("1.0-SNAPSHOT"); expectedMavenArtifact.setType("jar"); - Map> downstreamJobsByArtifactForBuild1 = dao.listDownstreamJobsByArtifact("my-upstream-pipeline-1", 1); + Map> downstreamJobsByArtifactForBuild1 = + dao.listDownstreamJobsByArtifact("my-upstream-pipeline-1", 1); assertThat(downstreamJobsByArtifactForBuild1).hasSize(0); } dao.getOrCreateBuildPrimaryKey("my-upstream-pipeline-1", 2); - dao.recordGeneratedArtifact("my-upstream-pipeline-1", 2, "com.mycompany", "core", "1.1-SNAPSHOT", "jar", "1.1-SNAPSHOT", null, false, "jar", null); - dao.recordGeneratedArtifact("my-upstream-pipeline-1", 2, "com.mycompany", "service", "1.1-SNAPSHOT", "war", "1.1-SNAPSHOT", null, false, "war", null); - dao.updateBuildOnCompletion("my-upstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 11, 5); + dao.recordGeneratedArtifact( + "my-upstream-pipeline-1", + 2, + "com.mycompany", + "core", + "1.1-SNAPSHOT", + "jar", + "1.1-SNAPSHOT", + null, + false, + "jar", + null); + dao.recordGeneratedArtifact( + "my-upstream-pipeline-1", + 2, + "com.mycompany", + "service", + "1.1-SNAPSHOT", + "war", + "1.1-SNAPSHOT", + null, + false, + "war", + null); + dao.updateBuildOnCompletion( + "my-upstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 11, 5); dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-1", 2); - dao.recordDependency("my-downstream-pipeline-1", 2, "com.mycompany", "core", "1.1-SNAPSHOT", "jar", "compile", false, null); - dao.updateBuildOnCompletion("my-downstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 9, 5); + dao.recordDependency( + "my-downstream-pipeline-1", 2, "com.mycompany", "core", "1.1-SNAPSHOT", "jar", "compile", false, null); + dao.updateBuildOnCompletion( + "my-downstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 9, 5); dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-2", 2); - dao.recordDependency("my-downstream-pipeline-2", 2, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null); - dao.updateBuildOnCompletion("my-downstream-pipeline-2", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 9, 5); + dao.recordDependency( + "my-downstream-pipeline-2", 2, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null); + dao.updateBuildOnCompletion( + "my-downstream-pipeline-2", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 9, 5); { MavenArtifact expectedMavenArtifact = new MavenArtifact(); @@ -877,7 +1502,8 @@ public void list_downstream_jobs_by_artifact_with_skippedDownstreamTriggersActiv expectedMavenArtifact.setType("jar"); expectedMavenArtifact.setExtension("jar"); - Map> downstreamJobsByArtifactForBuild1 = dao.listDownstreamJobsByArtifact("my-upstream-pipeline-1", 2); + Map> downstreamJobsByArtifactForBuild1 = + dao.listDownstreamJobsByArtifact("my-upstream-pipeline-1", 2); SortedSet actualJobs = downstreamJobsByArtifactForBuild1.get(expectedMavenArtifact); assertThat(actualJobs).contains("my-downstream-pipeline-1"); @@ -892,37 +1518,87 @@ public void list_downstream_jobs_by_artifact_with_skippedDownstreamTriggersActiv public void list_downstream_jobs_timestamped_snapshot_version() { dao.getOrCreateBuildPrimaryKey("my-upstream-pipeline-1", 1); - dao.recordGeneratedArtifact("my-upstream-pipeline-1", 1, "com.mycompany", "core", "1.0-20170808.155524-63", "jar", "1.0-SNAPSHOT", null, false, "jar", + dao.recordGeneratedArtifact( + "my-upstream-pipeline-1", + 1, + "com.mycompany", + "core", + "1.0-20170808.155524-63", + "jar", + "1.0-SNAPSHOT", + null, + false, + "jar", + null); + dao.recordGeneratedArtifact( + "my-upstream-pipeline-1", + 1, + "com.mycompany", + "service", + "1.0-20170808.155524-64", + "war", + "1.0-SNAPSHOT", + null, + false, + "war", null); - dao.recordGeneratedArtifact("my-upstream-pipeline-1", 1, "com.mycompany", "service", "1.0-20170808.155524-64", "war", "1.0-SNAPSHOT", null, false, - "war", null); - dao.updateBuildOnCompletion("my-upstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11); + dao.updateBuildOnCompletion( + "my-upstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11); dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-1", 1); - dao.recordDependency("my-downstream-pipeline-1", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null); - dao.updateBuildOnCompletion("my-downstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 70, 22); + dao.recordDependency( + "my-downstream-pipeline-1", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null); + dao.updateBuildOnCompletion( + "my-downstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 70, 22); dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-2", 1); - dao.recordDependency("my-downstream-pipeline-2", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null); - dao.updateBuildOnCompletion("my-downstream-pipeline-2", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 50, 22); + dao.recordDependency( + "my-downstream-pipeline-2", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null); + dao.updateBuildOnCompletion( + "my-downstream-pipeline-2", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 50, 22); List downstreamPipelinesForBuild1 = dao.listDownstreamJobs("my-upstream-pipeline-1", 1); assertThat(downstreamPipelinesForBuild1).contains("my-downstream-pipeline-1", "my-downstream-pipeline-2"); dao.getOrCreateBuildPrimaryKey("my-upstream-pipeline-1", 2); - dao.recordGeneratedArtifact("my-upstream-pipeline-1", 2, "com.mycompany", "core", "1.1-20170808.155524-65", "jar", "1.1-SNAPSHOT", null, false, "jar", + dao.recordGeneratedArtifact( + "my-upstream-pipeline-1", + 2, + "com.mycompany", + "core", + "1.1-20170808.155524-65", + "jar", + "1.1-SNAPSHOT", + null, + false, + "jar", null); - dao.recordGeneratedArtifact("my-upstream-pipeline-1", 2, "com.mycompany", "service", "1.1-20170808.155524-66", "war", "1.1-SNAPSHOT", null, false, - "war", null); - dao.updateBuildOnCompletion("my-upstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 20, 9); + dao.recordGeneratedArtifact( + "my-upstream-pipeline-1", + 2, + "com.mycompany", + "service", + "1.1-20170808.155524-66", + "war", + "1.1-SNAPSHOT", + null, + false, + "war", + null); + dao.updateBuildOnCompletion( + "my-upstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 20, 9); dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-1", 2); - dao.recordDependency("my-downstream-pipeline-1", 2, "com.mycompany", "core", "1.1-SNAPSHOT", "jar", "compile", false, null); - dao.updateBuildOnCompletion("my-downstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 20, 9); + dao.recordDependency( + "my-downstream-pipeline-1", 2, "com.mycompany", "core", "1.1-SNAPSHOT", "jar", "compile", false, null); + dao.updateBuildOnCompletion( + "my-downstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 20, 9); dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-2", 2); - dao.recordDependency("my-downstream-pipeline-2", 2, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null); - dao.updateBuildOnCompletion("my-downstream-pipeline-2", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 20, 9); + dao.recordDependency( + "my-downstream-pipeline-2", 2, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null); + dao.updateBuildOnCompletion( + "my-downstream-pipeline-2", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 20, 9); List downstreamPipelinesForBuild2 = dao.listDownstreamJobs("my-upstream-pipeline-1", 2); assertThat(downstreamPipelinesForBuild2).contains("my-downstream-pipeline-1"); @@ -932,19 +1608,44 @@ public void list_downstream_jobs_timestamped_snapshot_version() { public void list_downstream_jobs_by_artifact_timestamped_snapshot_version() { dao.getOrCreateBuildPrimaryKey("my-upstream-pipeline-1", 1); - dao.recordGeneratedArtifact("my-upstream-pipeline-1", 1, "com.mycompany", "core", "1.0-20170808.155524-63", "jar", "1.0-SNAPSHOT", null, false, "jar", + dao.recordGeneratedArtifact( + "my-upstream-pipeline-1", + 1, + "com.mycompany", + "core", + "1.0-20170808.155524-63", + "jar", + "1.0-SNAPSHOT", + null, + false, + "jar", + null); + dao.recordGeneratedArtifact( + "my-upstream-pipeline-1", + 1, + "com.mycompany", + "service", + "1.0-20170808.155524-64", + "war", + "1.0-SNAPSHOT", + null, + false, + "war", null); - dao.recordGeneratedArtifact("my-upstream-pipeline-1", 1, "com.mycompany", "service", "1.0-20170808.155524-64", "war", "1.0-SNAPSHOT", null, false, - "war", null); - dao.updateBuildOnCompletion("my-upstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11); + dao.updateBuildOnCompletion( + "my-upstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11); dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-1", 1); - dao.recordDependency("my-downstream-pipeline-1", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null); - dao.updateBuildOnCompletion("my-downstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 70, 22); + dao.recordDependency( + "my-downstream-pipeline-1", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null); + dao.updateBuildOnCompletion( + "my-downstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 70, 22); dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-2", 1); - dao.recordDependency("my-downstream-pipeline-2", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null); - dao.updateBuildOnCompletion("my-downstream-pipeline-2", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 50, 22); + dao.recordDependency( + "my-downstream-pipeline-2", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null); + dao.updateBuildOnCompletion( + "my-downstream-pipeline-2", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 50, 22); { MavenArtifact expectedMavenArtifact = new MavenArtifact(); @@ -955,7 +1656,8 @@ public void list_downstream_jobs_by_artifact_timestamped_snapshot_version() { expectedMavenArtifact.setType("jar"); expectedMavenArtifact.setExtension("jar"); - Map> downstreamJobsByArtifactForBuild1 = dao.listDownstreamJobsByArtifact("my-upstream-pipeline-1", 1); + Map> downstreamJobsByArtifactForBuild1 = + dao.listDownstreamJobsByArtifact("my-upstream-pipeline-1", 1); SortedSet actualJobs = downstreamJobsByArtifactForBuild1.get(expectedMavenArtifact); assertThat(actualJobs).contains("my-downstream-pipeline-1", "my-downstream-pipeline-2"); @@ -964,19 +1666,44 @@ public void list_downstream_jobs_by_artifact_timestamped_snapshot_version() { } dao.getOrCreateBuildPrimaryKey("my-upstream-pipeline-1", 2); - dao.recordGeneratedArtifact("my-upstream-pipeline-1", 2, "com.mycompany", "core", "1.1-20170808.155524-65", "jar", "1.1-SNAPSHOT", null, false, "jar", + dao.recordGeneratedArtifact( + "my-upstream-pipeline-1", + 2, + "com.mycompany", + "core", + "1.1-20170808.155524-65", + "jar", + "1.1-SNAPSHOT", + null, + false, + "jar", + null); + dao.recordGeneratedArtifact( + "my-upstream-pipeline-1", + 2, + "com.mycompany", + "service", + "1.1-20170808.155524-66", + "war", + "1.1-SNAPSHOT", + null, + false, + "war", null); - dao.recordGeneratedArtifact("my-upstream-pipeline-1", 2, "com.mycompany", "service", "1.1-20170808.155524-66", "war", "1.1-SNAPSHOT", null, false, - "war", null); - dao.updateBuildOnCompletion("my-upstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 20, 9); + dao.updateBuildOnCompletion( + "my-upstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 20, 9); dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-1", 2); - dao.recordDependency("my-downstream-pipeline-1", 2, "com.mycompany", "core", "1.1-SNAPSHOT", "jar", "compile", false, null); - dao.updateBuildOnCompletion("my-downstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 20, 9); + dao.recordDependency( + "my-downstream-pipeline-1", 2, "com.mycompany", "core", "1.1-SNAPSHOT", "jar", "compile", false, null); + dao.updateBuildOnCompletion( + "my-downstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 20, 9); dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-2", 2); - dao.recordDependency("my-downstream-pipeline-2", 2, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null); - dao.updateBuildOnCompletion("my-downstream-pipeline-2", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 20, 9); + dao.recordDependency( + "my-downstream-pipeline-2", 2, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null); + dao.updateBuildOnCompletion( + "my-downstream-pipeline-2", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 20, 9); { MavenArtifact expectedMavenArtifact = new MavenArtifact(); @@ -987,7 +1714,8 @@ public void list_downstream_jobs_by_artifact_timestamped_snapshot_version() { expectedMavenArtifact.setType("jar"); expectedMavenArtifact.setExtension("jar"); - Map> downstreamJobsByArtifactForBuild1 = dao.listDownstreamJobsByArtifact("my-upstream-pipeline-1", 2); + Map> downstreamJobsByArtifactForBuild1 = + dao.listDownstreamJobsByArtifact("my-upstream-pipeline-1", 2); SortedSet actualJobs = downstreamJobsByArtifactForBuild1.get(expectedMavenArtifact); assertThat(actualJobs).contains("my-downstream-pipeline-1"); @@ -1002,14 +1730,27 @@ public void list_downstream_jobs_by_artifact_timestamped_snapshot_version() { public void get_generated_artifacts_with_timestamped_snapshot_version() { dao.getOrCreateBuildPrimaryKey("my-upstream-pipeline-1", 1); - dao.recordGeneratedArtifact("my-upstream-pipeline-1", 1, "com.mycompany", "core", "1.0-20170808.155524-63", "jar", "1.0-SNAPSHOT", null, false, "jar", + dao.recordGeneratedArtifact( + "my-upstream-pipeline-1", + 1, + "com.mycompany", + "core", + "1.0-20170808.155524-63", + "jar", + "1.0-SNAPSHOT", + null, + false, + "jar", null); - dao.updateBuildOnCompletion("my-upstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11); + dao.updateBuildOnCompletion( + "my-upstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11); List generatedArtifacts = dao.getGeneratedArtifacts("my-upstream-pipeline-1", 1); - System.out.println("GeneratedArtifacts " + generatedArtifacts.stream() - .map(mavenArtifact -> mavenArtifact.getId() + ", version: " + mavenArtifact.getVersion() + ", baseVersion: " + mavenArtifact.getBaseVersion()) - .collect(Collectors.joining(", "))); + System.out.println("GeneratedArtifacts " + + generatedArtifacts.stream() + .map(mavenArtifact -> mavenArtifact.getId() + ", version: " + mavenArtifact.getVersion() + + ", baseVersion: " + mavenArtifact.getBaseVersion()) + .collect(Collectors.joining(", "))); assertThat(generatedArtifacts).hasSize(1); MavenArtifact jar = generatedArtifacts.get(0); @@ -1023,13 +1764,27 @@ public void get_generated_artifacts_with_timestamped_snapshot_version() { public void get_generated_artifacts_with_non_timestamped_snapshot_version() { dao.getOrCreateBuildPrimaryKey("my-upstream-pipeline-1", 1); - dao.recordGeneratedArtifact("my-upstream-pipeline-1", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "1.0-SNAPSHOT", null, false, "jar", null); - dao.updateBuildOnCompletion("my-upstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11); + dao.recordGeneratedArtifact( + "my-upstream-pipeline-1", + 1, + "com.mycompany", + "core", + "1.0-SNAPSHOT", + "jar", + "1.0-SNAPSHOT", + null, + false, + "jar", + null); + dao.updateBuildOnCompletion( + "my-upstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11); List generatedArtifacts = dao.getGeneratedArtifacts("my-upstream-pipeline-1", 1); - System.out.println("GeneratedArtifacts " + generatedArtifacts.stream() - .map(mavenArtifact -> mavenArtifact.getId() + ", version: " + mavenArtifact.getVersion() + ", baseVersion: " + mavenArtifact.getBaseVersion()) - .collect(Collectors.joining(", "))); + System.out.println("GeneratedArtifacts " + + generatedArtifacts.stream() + .map(mavenArtifact -> mavenArtifact.getId() + ", version: " + mavenArtifact.getVersion() + + ", baseVersion: " + mavenArtifact.getBaseVersion()) + .collect(Collectors.joining(", "))); assertThat(generatedArtifacts).hasSize(1); MavenArtifact jar = generatedArtifacts.get(0); @@ -1047,13 +1802,27 @@ public void get_generated_artifacts_with_non_timestamped_snapshot_version() { public void get_generated_artifacts_with_null_version() { dao.getOrCreateBuildPrimaryKey("my-upstream-pipeline-1", 1); - dao.recordGeneratedArtifact("my-upstream-pipeline-1", 1, "com.mycompany", "core", null, "jar", "1.0-SNAPSHOT", null, false, "jar", null); - dao.updateBuildOnCompletion("my-upstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11); + dao.recordGeneratedArtifact( + "my-upstream-pipeline-1", + 1, + "com.mycompany", + "core", + null, + "jar", + "1.0-SNAPSHOT", + null, + false, + "jar", + null); + dao.updateBuildOnCompletion( + "my-upstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11); List generatedArtifacts = dao.getGeneratedArtifacts("my-upstream-pipeline-1", 1); - System.out.println("GeneratedArtifacts " + generatedArtifacts.stream() - .map(mavenArtifact -> mavenArtifact.getId() + ", version: " + mavenArtifact.getVersion() + ", baseVersion: " + mavenArtifact.getBaseVersion()) - .collect(Collectors.joining(", "))); + System.out.println("GeneratedArtifacts " + + generatedArtifacts.stream() + .map(mavenArtifact -> mavenArtifact.getId() + ", version: " + mavenArtifact.getVersion() + + ", baseVersion: " + mavenArtifact.getBaseVersion()) + .collect(Collectors.joining(", "))); assertThat(generatedArtifacts).hasSize(1); MavenArtifact jar = generatedArtifacts.get(0); @@ -1067,17 +1836,30 @@ public void get_generated_artifacts_with_null_version() { public void list_downstream_jobs_by_parent_pom_timestamped_snapshot_version() { dao.getOrCreateBuildPrimaryKey("my-upstream-pipeline-1", 1); - dao.recordGeneratedArtifact("my-upstream-pipeline-1", 1, "com.mycompany", "parent-pom", "1.0-20170808.155524-63", "pom", "1.0-SNAPSHOT", null, false, - "pom", null); - dao.updateBuildOnCompletion("my-upstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11); + dao.recordGeneratedArtifact( + "my-upstream-pipeline-1", + 1, + "com.mycompany", + "parent-pom", + "1.0-20170808.155524-63", + "pom", + "1.0-SNAPSHOT", + null, + false, + "pom", + null); + dao.updateBuildOnCompletion( + "my-upstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11); dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-1", 1); dao.recordParentProject("my-downstream-pipeline-1", 1, "com.mycompany", "parent-pom", "1.0-SNAPSHOT", false); - dao.updateBuildOnCompletion("my-downstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 70, 22); + dao.updateBuildOnCompletion( + "my-downstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 70, 22); dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-2", 1); dao.recordParentProject("my-downstream-pipeline-2", 1, "com.mycompany", "parent-pom", "1.0-SNAPSHOT", false); - dao.updateBuildOnCompletion("my-downstream-pipeline-2", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 50, 22); + dao.updateBuildOnCompletion( + "my-downstream-pipeline-2", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 50, 22); { MavenArtifact expectedMavenArtifact = new MavenArtifact(); @@ -1088,7 +1870,8 @@ public void list_downstream_jobs_by_parent_pom_timestamped_snapshot_version() { expectedMavenArtifact.setType("pom"); expectedMavenArtifact.setExtension("pom"); - Map> downstreamJobsByArtifactForBuild1 = dao.listDownstreamJobsByArtifact("my-upstream-pipeline-1", 1); + Map> downstreamJobsByArtifactForBuild1 = + dao.listDownstreamJobsByArtifact("my-upstream-pipeline-1", 1); SortedSet actualJobs = downstreamJobsByArtifactForBuild1.get(expectedMavenArtifact); assertThat(actualJobs).contains("my-downstream-pipeline-1", "my-downstream-pipeline-2"); @@ -1097,17 +1880,30 @@ public void list_downstream_jobs_by_parent_pom_timestamped_snapshot_version() { } dao.getOrCreateBuildPrimaryKey("my-upstream-pipeline-1", 2); - dao.recordGeneratedArtifact("my-upstream-pipeline-1", 2, "com.mycompany", "parent-pom", "1.1-20170808.155524-65", "pom", "1.1-SNAPSHOT", null, false, - "pom", null); - dao.updateBuildOnCompletion("my-upstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 20, 9); + dao.recordGeneratedArtifact( + "my-upstream-pipeline-1", + 2, + "com.mycompany", + "parent-pom", + "1.1-20170808.155524-65", + "pom", + "1.1-SNAPSHOT", + null, + false, + "pom", + null); + dao.updateBuildOnCompletion( + "my-upstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 20, 9); dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-1", 2); dao.recordParentProject("my-downstream-pipeline-1", 2, "com.mycompany", "parent-pom", "1.1-SNAPSHOT", false); - dao.updateBuildOnCompletion("my-downstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 20, 9); + dao.updateBuildOnCompletion( + "my-downstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 20, 9); dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-2", 2); dao.recordParentProject("my-downstream-pipeline-2", 2, "com.mycompany", "parent-pom", "1.0-SNAPSHOT", false); - dao.updateBuildOnCompletion("my-downstream-pipeline-2", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 20, 9); + dao.updateBuildOnCompletion( + "my-downstream-pipeline-2", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 20, 9); { MavenArtifact expectedMavenArtifact = new MavenArtifact(); @@ -1118,7 +1914,8 @@ public void list_downstream_jobs_by_parent_pom_timestamped_snapshot_version() { expectedMavenArtifact.setType("pom"); expectedMavenArtifact.setExtension("pom"); - Map> downstreamJobsByArtifactForBuild1 = dao.listDownstreamJobsByArtifact("my-upstream-pipeline-1", 2); + Map> downstreamJobsByArtifactForBuild1 = + dao.listDownstreamJobsByArtifact("my-upstream-pipeline-1", 2); SortedSet actualJobs = downstreamJobsByArtifactForBuild1.get(expectedMavenArtifact); assertThat(actualJobs).contains("my-downstream-pipeline-1"); @@ -1132,68 +1929,148 @@ public void list_downstream_jobs_by_parent_pom_timestamped_snapshot_version() { public void list_upstream_pipelines_based_on_maven_dependencies() { dao.getOrCreateBuildPrimaryKey("pipeline-framework", 1); - dao.recordGeneratedArtifact("pipeline-framework", 1, "com.mycompany", "framework", "1.0-SNAPSHOT", "jar", "1.0-SNAPSHOT", null, false, "jar", null); - dao.updateBuildOnCompletion("pipeline-framework", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11); + dao.recordGeneratedArtifact( + "pipeline-framework", + 1, + "com.mycompany", + "framework", + "1.0-SNAPSHOT", + "jar", + "1.0-SNAPSHOT", + null, + false, + "jar", + null); + dao.updateBuildOnCompletion( + "pipeline-framework", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11); dao.getOrCreateBuildPrimaryKey("pipeline-core", 1); - dao.recordDependency("pipeline-core", 1, "com.mycompany", "framework", "1.0-SNAPSHOT", "jar", "compile", false, null); - dao.recordGeneratedArtifact("pipeline-core", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "1.0-SNAPSHOT", null, false, "jar", null); + dao.recordDependency( + "pipeline-core", 1, "com.mycompany", "framework", "1.0-SNAPSHOT", "jar", "compile", false, null); + dao.recordGeneratedArtifact( + "pipeline-core", + 1, + "com.mycompany", + "core", + "1.0-SNAPSHOT", + "jar", + "1.0-SNAPSHOT", + null, + false, + "jar", + null); dao.updateBuildOnCompletion("pipeline-core", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11); SqlTestsUtils.dump("select * from JOB_DEPENDENCIES", this.ds, System.out); SqlTestsUtils.dump("select * from JOB_GENERATED_ARTIFACTS", this.ds, System.out); SqlTestsUtils.dump("select * from JENKINS_JOB", this.ds, System.out); - Map upstreamPipelinesForBuild1 = dao.listUpstreamPipelinesBasedOnMavenDependencies("pipeline-core", 1); + Map upstreamPipelinesForBuild1 = + dao.listUpstreamPipelinesBasedOnMavenDependencies("pipeline-core", 1); assertThat(upstreamPipelinesForBuild1.keySet()).contains("pipeline-framework"); - } @Test public void list_upstream_pipelines_based_on_maven_dependencies_with_classifier() { dao.getOrCreateBuildPrimaryKey("pipeline-framework", 1); - dao.recordGeneratedArtifact("pipeline-framework", 1, "com.mycompany", "framework", "1.0-SNAPSHOT", "aType", "1.0-SNAPSHOT", null, false, "jar", null); - dao.recordGeneratedArtifact("pipeline-framework", 1, "com.mycompany", "framework", "1.0-SNAPSHOT", "anotherType", "1.0-SNAPSHOT", null, false, "jar", + dao.recordGeneratedArtifact( + "pipeline-framework", + 1, + "com.mycompany", + "framework", + "1.0-SNAPSHOT", + "aType", + "1.0-SNAPSHOT", + null, + false, + "jar", + null); + dao.recordGeneratedArtifact( + "pipeline-framework", + 1, + "com.mycompany", + "framework", + "1.0-SNAPSHOT", + "anotherType", + "1.0-SNAPSHOT", + null, + false, + "jar", "aClassifier"); - dao.updateBuildOnCompletion("pipeline-framework", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11); + dao.updateBuildOnCompletion( + "pipeline-framework", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11); dao.getOrCreateBuildPrimaryKey("pipeline-core1", 1); - dao.recordDependency("pipeline-core1", 1, "com.mycompany", "framework", "1.0-SNAPSHOT", "aType", "compile", false, null); + dao.recordDependency( + "pipeline-core1", 1, "com.mycompany", "framework", "1.0-SNAPSHOT", "aType", "compile", false, null); dao.updateBuildOnCompletion("pipeline-core1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11); dao.getOrCreateBuildPrimaryKey("pipeline-core2", 1); - dao.recordDependency("pipeline-core2", 1, "com.mycompany", "framework", "1.0-SNAPSHOT", "aType", "compile", false, "whatever"); + dao.recordDependency( + "pipeline-core2", + 1, + "com.mycompany", + "framework", + "1.0-SNAPSHOT", + "aType", + "compile", + false, + "whatever"); dao.updateBuildOnCompletion("pipeline-core2", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11); dao.getOrCreateBuildPrimaryKey("pipeline-core3", 1); - dao.recordDependency("pipeline-core3", 1, "com.mycompany", "framework", "1.0-SNAPSHOT", "whatever", "compile", false, null); + dao.recordDependency( + "pipeline-core3", 1, "com.mycompany", "framework", "1.0-SNAPSHOT", "whatever", "compile", false, null); dao.updateBuildOnCompletion("pipeline-core3", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11); dao.getOrCreateBuildPrimaryKey("pipeline-core4", 1); - dao.recordDependency("pipeline-core4", 1, "com.mycompany", "framework", "1.0-SNAPSHOT", "whatever", "compile", false, "aClassifier"); + dao.recordDependency( + "pipeline-core4", + 1, + "com.mycompany", + "framework", + "1.0-SNAPSHOT", + "whatever", + "compile", + false, + "aClassifier"); dao.updateBuildOnCompletion("pipeline-core4", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11); dao.getOrCreateBuildPrimaryKey("pipeline-core5", 1); - dao.recordDependency("pipeline-core5", 1, "com.mycompany", "framework", "1.0-SNAPSHOT", "anotherType", "compile", false, "aClassifier"); + dao.recordDependency( + "pipeline-core5", + 1, + "com.mycompany", + "framework", + "1.0-SNAPSHOT", + "anotherType", + "compile", + false, + "aClassifier"); dao.updateBuildOnCompletion("pipeline-core5", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11); SqlTestsUtils.dump("select * from JOB_DEPENDENCIES", this.ds, System.out); SqlTestsUtils.dump("select * from JOB_GENERATED_ARTIFACTS", this.ds, System.out); SqlTestsUtils.dump("select * from JENKINS_JOB", this.ds, System.out); - Map upstreamPipelinesForBuild1 = dao.listUpstreamPipelinesBasedOnMavenDependencies("pipeline-core1", 1); + Map upstreamPipelinesForBuild1 = + dao.listUpstreamPipelinesBasedOnMavenDependencies("pipeline-core1", 1); assertThat(upstreamPipelinesForBuild1.keySet()).contains("pipeline-framework"); - Map upstreamPipelinesForBuild2 = dao.listUpstreamPipelinesBasedOnMavenDependencies("pipeline-core2", 1); + Map upstreamPipelinesForBuild2 = + dao.listUpstreamPipelinesBasedOnMavenDependencies("pipeline-core2", 1); assertThat(upstreamPipelinesForBuild2.keySet()).isEmpty(); - Map upstreamPipelinesForBuild3 = dao.listUpstreamPipelinesBasedOnMavenDependencies("pipeline-core3", 1); + Map upstreamPipelinesForBuild3 = + dao.listUpstreamPipelinesBasedOnMavenDependencies("pipeline-core3", 1); assertThat(upstreamPipelinesForBuild3.keySet()).isEmpty(); - Map upstreamPipelinesForBuild4 = dao.listUpstreamPipelinesBasedOnMavenDependencies("pipeline-core4", 1); + Map upstreamPipelinesForBuild4 = + dao.listUpstreamPipelinesBasedOnMavenDependencies("pipeline-core4", 1); assertThat(upstreamPipelinesForBuild4.keySet()).isEmpty(); - Map upstreamPipelinesForBuild5 = dao.listUpstreamPipelinesBasedOnMavenDependencies("pipeline-core5", 1); + Map upstreamPipelinesForBuild5 = + dao.listUpstreamPipelinesBasedOnMavenDependencies("pipeline-core5", 1); assertThat(upstreamPipelinesForBuild5.keySet()).contains("pipeline-framework"); } @@ -1201,41 +2078,102 @@ public void list_upstream_pipelines_based_on_maven_dependencies_with_classifier( public void list_upstream_pipelines_based_on_parent_project() { dao.getOrCreateBuildPrimaryKey("pipeline-parent-pom", 1); - dao.recordGeneratedArtifact("pipeline-parent-pom", 1, "com.mycompany", "company-parent-pom", "1.0-SNAPSHOT", "pom", "1.0-SNAPSHOT", null, false, "pom", + dao.recordGeneratedArtifact( + "pipeline-parent-pom", + 1, + "com.mycompany", + "company-parent-pom", + "1.0-SNAPSHOT", + "pom", + "1.0-SNAPSHOT", + null, + false, + "pom", null); - dao.updateBuildOnCompletion("pipeline-parent-pom", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11); + dao.updateBuildOnCompletion( + "pipeline-parent-pom", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11); dao.getOrCreateBuildPrimaryKey("pipeline-core", 1); dao.recordParentProject("pipeline-core", 1, "com.mycompany", "company-parent-pom", "1.0-SNAPSHOT", false); - dao.recordDependency("pipeline-core", 1, "com.mycompany", "framework", "1.0-SNAPSHOT", "jar", "compile", false, null); - dao.recordGeneratedArtifact("pipeline-core", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "1.0-SNAPSHOT", null, false, "jar", null); + dao.recordDependency( + "pipeline-core", 1, "com.mycompany", "framework", "1.0-SNAPSHOT", "jar", "compile", false, null); + dao.recordGeneratedArtifact( + "pipeline-core", + 1, + "com.mycompany", + "core", + "1.0-SNAPSHOT", + "jar", + "1.0-SNAPSHOT", + null, + false, + "jar", + null); dao.updateBuildOnCompletion("pipeline-core", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11); SqlTestsUtils.dump("select * from JOB_DEPENDENCIES", this.ds, System.out); SqlTestsUtils.dump("select * from JOB_GENERATED_ARTIFACTS", this.ds, System.out); SqlTestsUtils.dump("select * from JENKINS_JOB", this.ds, System.out); - Map upstreamPipelinesForBuild1 = dao.listUpstreamPipelinesBasedOnParentProjectDependencies("pipeline-core", 1); + Map upstreamPipelinesForBuild1 = + dao.listUpstreamPipelinesBasedOnParentProjectDependencies("pipeline-core", 1); assertThat(upstreamPipelinesForBuild1.keySet()).contains("pipeline-parent-pom"); - } @Test public void list_transitive_upstream_jobs() { dao.getOrCreateBuildPrimaryKey("pipeline-framework", 1); - dao.recordGeneratedArtifact("pipeline-framework", 1, "com.mycompany", "framework", "1.0-SNAPSHOT", "jar", "1.0-SNAPSHOT", null, false, "jar", null); - dao.updateBuildOnCompletion("pipeline-framework", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11); + dao.recordGeneratedArtifact( + "pipeline-framework", + 1, + "com.mycompany", + "framework", + "1.0-SNAPSHOT", + "jar", + "1.0-SNAPSHOT", + null, + false, + "jar", + null); + dao.updateBuildOnCompletion( + "pipeline-framework", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11); dao.getOrCreateBuildPrimaryKey("pipeline-core", 1); - dao.recordDependency("pipeline-core", 1, "com.mycompany", "framework", "1.0-SNAPSHOT", "jar", "compile", false, null); - dao.recordGeneratedArtifact("pipeline-core", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "1.0-SNAPSHOT", null, false, "jar", null); + dao.recordDependency( + "pipeline-core", 1, "com.mycompany", "framework", "1.0-SNAPSHOT", "jar", "compile", false, null); + dao.recordGeneratedArtifact( + "pipeline-core", + 1, + "com.mycompany", + "core", + "1.0-SNAPSHOT", + "jar", + "1.0-SNAPSHOT", + null, + false, + "jar", + null); dao.updateBuildOnCompletion("pipeline-core", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11); dao.getOrCreateBuildPrimaryKey("pipeline-service", 1); - dao.recordDependency("pipeline-service", 1, "com.mycompany", "framework", "1.0-SNAPSHOT", "jar", "compile", false, null); - dao.recordDependency("pipeline-service", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null); - dao.recordGeneratedArtifact("pipeline-service", 1, "com.mycompany", "service", "1.0-SNAPSHOT", "war", "1.0-SNAPSHOT", null, false, "war", null); - dao.updateBuildOnCompletion("pipeline-service", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 22); + dao.recordDependency( + "pipeline-service", 1, "com.mycompany", "framework", "1.0-SNAPSHOT", "jar", "compile", false, null); + dao.recordDependency( + "pipeline-service", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null); + dao.recordGeneratedArtifact( + "pipeline-service", + 1, + "com.mycompany", + "service", + "1.0-SNAPSHOT", + "war", + "1.0-SNAPSHOT", + null, + false, + "war", + null); + dao.updateBuildOnCompletion( + "pipeline-service", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 22); SqlTestsUtils.dump("select * from JOB_DEPENDENCIES", this.ds, System.out); SqlTestsUtils.dump("select * from JOB_GENERATED_ARTIFACTS", this.ds, System.out); @@ -1248,24 +2186,66 @@ public void list_transitive_upstream_jobs() { public void list_transitive_upstream_jobs_with_classifier() { dao.getOrCreateBuildPrimaryKey("pipeline-framework", 1); - dao.recordGeneratedArtifact("pipeline-framework", 1, "com.mycompany", "framework", "1.0-SNAPSHOT", "aType", "1.0-SNAPSHOT", null, false, "jar", + dao.recordGeneratedArtifact( + "pipeline-framework", + 1, + "com.mycompany", + "framework", + "1.0-SNAPSHOT", + "aType", + "1.0-SNAPSHOT", + null, + false, + "jar", "aClassifier"); - dao.updateBuildOnCompletion("pipeline-framework", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11); + dao.updateBuildOnCompletion( + "pipeline-framework", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11); dao.getOrCreateBuildPrimaryKey("pipeline-core1", 1); - dao.recordDependency("pipeline-core1", 1, "com.mycompany", "framework", "1.0-SNAPSHOT", "aType", "compile", false, "aClassifier"); - dao.recordGeneratedArtifact("pipeline-core1", 1, "com.mycompany", "core1", "1.0-SNAPSHOT", "jar", "1.0-SNAPSHOT", null, false, "jar", "aClassifier"); + dao.recordDependency( + "pipeline-core1", + 1, + "com.mycompany", + "framework", + "1.0-SNAPSHOT", + "aType", + "compile", + false, + "aClassifier"); + dao.recordGeneratedArtifact( + "pipeline-core1", + 1, + "com.mycompany", + "core1", + "1.0-SNAPSHOT", + "jar", + "1.0-SNAPSHOT", + null, + false, + "jar", + "aClassifier"); dao.updateBuildOnCompletion("pipeline-core1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11); dao.getOrCreateBuildPrimaryKey("pipeline-service1", 1); - dao.recordDependency("pipeline-service1", 1, "com.mycompany", "core1", "1.0-SNAPSHOT", "jar", "compile", false, "aClassifier"); - dao.updateBuildOnCompletion("pipeline-service1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 22); + dao.recordDependency( + "pipeline-service1", + 1, + "com.mycompany", + "core1", + "1.0-SNAPSHOT", + "jar", + "compile", + false, + "aClassifier"); + dao.updateBuildOnCompletion( + "pipeline-service1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 22); SqlTestsUtils.dump("select * from JOB_DEPENDENCIES", this.ds, System.out); SqlTestsUtils.dump("select * from JOB_GENERATED_ARTIFACTS", this.ds, System.out); SqlTestsUtils.dump("select * from JENKINS_JOB", this.ds, System.out); - assertThat(dao.listTransitiveUpstreamJobs("pipeline-service1", 1).keySet()).contains("pipeline-framework", "pipeline-core1"); + assertThat(dao.listTransitiveUpstreamJobs("pipeline-service1", 1).keySet()) + .contains("pipeline-framework", "pipeline-core1"); } @Deprecated @@ -1273,18 +2253,41 @@ public void list_transitive_upstream_jobs_with_classifier() { public void list_downstream_jobs_with_failed_last_build() { dao.getOrCreateBuildPrimaryKey("pipeline-framework", 1); - dao.recordGeneratedArtifact("pipeline-framework", 1, "com.mycompany", "framework", "1.0-SNAPSHOT", "jar", "1.0-SNAPSHOT", null, false, "jar", null); - dao.updateBuildOnCompletion("pipeline-framework", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11); + dao.recordGeneratedArtifact( + "pipeline-framework", + 1, + "com.mycompany", + "framework", + "1.0-SNAPSHOT", + "jar", + "1.0-SNAPSHOT", + null, + false, + "jar", + null); + dao.updateBuildOnCompletion( + "pipeline-framework", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11); dao.getOrCreateBuildPrimaryKey("pipeline-core", 1); - dao.recordDependency("pipeline-core", 1, "com.mycompany", "framework", "1.0-SNAPSHOT", "jar", "compile", false, null); - dao.recordGeneratedArtifact("pipeline-core", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "1.0-SNAPSHOT", null, false, "jar", null); + dao.recordDependency( + "pipeline-core", 1, "com.mycompany", "framework", "1.0-SNAPSHOT", "jar", "compile", false, null); + dao.recordGeneratedArtifact( + "pipeline-core", + 1, + "com.mycompany", + "core", + "1.0-SNAPSHOT", + "jar", + "1.0-SNAPSHOT", + null, + false, + "jar", + null); dao.updateBuildOnCompletion("pipeline-core", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11); { List downstreamJobs = dao.listDownstreamJobs("pipeline-framework", 1); assertThat(downstreamJobs).contains("pipeline-core"); - } // pipeline-core#2 fails before dependencies have been tracked @@ -1298,7 +2301,6 @@ public void list_downstream_jobs_with_failed_last_build() { { List downstreamJobs = dao.listDownstreamJobs("pipeline-framework", 1); assertThat(downstreamJobs).contains("pipeline-core"); - } } @@ -1306,14 +2308,48 @@ public void list_downstream_jobs_with_failed_last_build() { public void list_downstream_jobs_by_artifact_with_failed_last_build() { dao.getOrCreateBuildPrimaryKey("pipeline-framework", 1); - dao.recordGeneratedArtifact("pipeline-framework", 1, "com.mycompany", "framework", "1.0-SNAPSHOT", "jar", "1.0-SNAPSHOT", null, false, "jar", null); - dao.recordGeneratedArtifact("pipeline-framework", 1, "com.mycompany", "framework", "1.0-SNAPSHOT", "jar", "1.0-SNAPSHOT", null, false, "jar", + dao.recordGeneratedArtifact( + "pipeline-framework", + 1, + "com.mycompany", + "framework", + "1.0-SNAPSHOT", + "jar", + "1.0-SNAPSHOT", + null, + false, + "jar", + null); + dao.recordGeneratedArtifact( + "pipeline-framework", + 1, + "com.mycompany", + "framework", + "1.0-SNAPSHOT", + "jar", + "1.0-SNAPSHOT", + null, + false, + "jar", "sources"); - dao.updateBuildOnCompletion("pipeline-framework", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11); + dao.updateBuildOnCompletion( + "pipeline-framework", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11); dao.getOrCreateBuildPrimaryKey("pipeline-core", 1); - dao.recordDependency("pipeline-core", 1, "com.mycompany", "framework", "1.0-SNAPSHOT", "jar", "compile", false, null); - dao.recordGeneratedArtifact("pipeline-core", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "1.0-SNAPSHOT", null, false, "jar", null); + dao.recordDependency( + "pipeline-core", 1, "com.mycompany", "framework", "1.0-SNAPSHOT", "jar", "compile", false, null); + dao.recordGeneratedArtifact( + "pipeline-core", + 1, + "com.mycompany", + "core", + "1.0-SNAPSHOT", + "jar", + "1.0-SNAPSHOT", + null, + false, + "jar", + null); dao.updateBuildOnCompletion("pipeline-core", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11); MavenArtifact expectedMavenArtifact = new MavenArtifact(); @@ -1325,12 +2361,12 @@ public void list_downstream_jobs_by_artifact_with_failed_last_build() { expectedMavenArtifact.setExtension("jar"); { - Map> downstreamJobsByArtifact = dao.listDownstreamJobsByArtifact("pipeline-framework", 1); + Map> downstreamJobsByArtifact = + dao.listDownstreamJobsByArtifact("pipeline-framework", 1); SortedSet actualJobs = downstreamJobsByArtifact.get(expectedMavenArtifact); assertThat(actualJobs).contains("pipeline-core"); assertThat(downstreamJobsByArtifact).hasSize(1); - } // pipeline-core#2 fails before dependencies have been tracked @@ -1342,7 +2378,8 @@ public void list_downstream_jobs_by_artifact_with_failed_last_build() { SqlTestsUtils.dump("select * from JOB_DEPENDENCIES", this.ds, System.out); { - Map> downstreamJobsByArtifact = dao.listDownstreamJobsByArtifact("pipeline-framework", 1); + Map> downstreamJobsByArtifact = + dao.listDownstreamJobsByArtifact("pipeline-framework", 1); SortedSet actualJobs = downstreamJobsByArtifact.get(expectedMavenArtifact); assertThat(actualJobs).contains("pipeline-core"); assertThat(downstreamJobsByArtifact).hasSize(1); diff --git a/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginH2DaoInitializationTest.java b/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginH2DaoInitializationTest.java index f39701e6..cc673b6a 100644 --- a/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginH2DaoInitializationTest.java +++ b/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginH2DaoInitializationTest.java @@ -26,17 +26,15 @@ import static org.assertj.core.api.Assertions.assertThat; +import edu.umd.cs.findbugs.annotations.NonNull; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; - import org.h2.jdbcx.JdbcConnectionPool; import org.jenkinsci.plugins.pipeline.maven.db.migration.MigrationStep; import org.junit.jupiter.api.Test; -import edu.umd.cs.findbugs.annotations.NonNull; - /** * @author Cyrille Le Clerc */ diff --git a/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginH2DaoTest.java b/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginH2DaoTest.java index 1b02739e..60172381 100644 --- a/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginH2DaoTest.java +++ b/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginH2DaoTest.java @@ -24,11 +24,10 @@ package org.jenkinsci.plugins.pipeline.maven.db; +import javax.sql.DataSource; import org.h2.jdbcx.JdbcConnectionPool; import org.jenkinsci.plugins.pipeline.maven.db.migration.MigrationStep; -import javax.sql.DataSource; - /** * @author Cyrille Le Clerc */ diff --git a/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginMariaDbDaoIT.java b/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginMariaDbDaoIT.java index 895bf8a7..2aeee36c 100644 --- a/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginMariaDbDaoIT.java +++ b/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginMariaDbDaoIT.java @@ -30,28 +30,24 @@ import static org.mockito.Mockito.when; import static org.testcontainers.images.PullPolicy.alwaysPull; -import java.util.Collections; - -import javax.sql.DataSource; - -import org.jenkinsci.plugins.pipeline.maven.dao.PipelineMavenPluginDao; -import org.jenkinsci.plugins.pipeline.maven.db.migration.MigrationStep; -import org.junit.jupiter.api.Test; -import org.mockito.MockedStatic; -import org.testcontainers.containers.MariaDBContainer; -import org.testcontainers.junit.jupiter.Container; -import org.testcontainers.junit.jupiter.Testcontainers; - import com.cloudbees.plugins.credentials.CredentialsMatchers; import com.cloudbees.plugins.credentials.CredentialsProvider; import com.cloudbees.plugins.credentials.common.UsernamePasswordCredentials; import com.zaxxer.hikari.HikariConfig; import com.zaxxer.hikari.HikariDataSource; - import hudson.security.ACL; import hudson.util.FormValidation; import hudson.util.Secret; +import java.util.Collections; +import javax.sql.DataSource; import jenkins.model.Jenkins; +import org.jenkinsci.plugins.pipeline.maven.dao.PipelineMavenPluginDao; +import org.jenkinsci.plugins.pipeline.maven.db.migration.MigrationStep; +import org.junit.jupiter.api.Test; +import org.mockito.MockedStatic; +import org.testcontainers.containers.MariaDBContainer; +import org.testcontainers.junit.jupiter.Container; +import org.testcontainers.junit.jupiter.Testcontainers; /** * @author Cyrille Le Clerc @@ -60,7 +56,8 @@ public class PipelineMavenPluginMariaDbDaoIT extends PipelineMavenPluginDaoAbstractTest { @Container - public static MariaDBContainer DB = new MariaDBContainer<>(MariaDBContainer.NAME).withImagePullPolicy(alwaysPull()); + public static MariaDBContainer DB = + new MariaDBContainer<>(MariaDBContainer.NAME).withImagePullPolicy(alwaysPull()); @Override public DataSource before_newDataSource() { @@ -96,13 +93,16 @@ public void ensureValidateConfiguration() throws Exception { try (MockedStatic j = mockStatic(Jenkins.class); MockedStatic m = mockStatic(CredentialsMatchers.class); MockedStatic c = mockStatic(CredentialsProvider.class)) { - PipelineMavenPluginDao.Builder.Config config = new PipelineMavenPluginDao.Builder.Config().jdbcUrl(DB.getJdbcUrl()).credentialsId("credsId"); + PipelineMavenPluginDao.Builder.Config config = new PipelineMavenPluginDao.Builder.Config() + .jdbcUrl(DB.getJdbcUrl()) + .credentialsId("credsId"); UsernamePasswordCredentials credentials = mock(UsernamePasswordCredentials.class); Secret password = Secret.fromString(DB.getPassword()); String version = DB.createConnection("").getMetaData().getDatabaseProductVersion(); j.when(Jenkins::get).thenReturn(null); m.when(() -> CredentialsMatchers.withId("credsId")).thenReturn(null); - c.when(() -> CredentialsProvider.lookupCredentials(UsernamePasswordCredentials.class, (Jenkins) null, ACL.SYSTEM, Collections.EMPTY_LIST)) + c.when(() -> CredentialsProvider.lookupCredentials( + UsernamePasswordCredentials.class, (Jenkins) null, ACL.SYSTEM, Collections.EMPTY_LIST)) .thenReturn(null); c.when(() -> CredentialsMatchers.firstOrNull(null, null)).thenReturn(credentials); when(credentials.getUsername()).thenReturn(DB.getUsername()); diff --git a/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginMySqlDaoIT.java b/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginMySqlDaoIT.java index 4e537230..1c070f9c 100644 --- a/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginMySqlDaoIT.java +++ b/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginMySqlDaoIT.java @@ -30,28 +30,24 @@ import static org.mockito.Mockito.when; import static org.testcontainers.images.PullPolicy.alwaysPull; -import java.util.Collections; - -import javax.sql.DataSource; - -import org.jenkinsci.plugins.pipeline.maven.dao.PipelineMavenPluginDao; -import org.jenkinsci.plugins.pipeline.maven.db.migration.MigrationStep; -import org.junit.jupiter.api.Test; -import org.mockito.MockedStatic; -import org.testcontainers.containers.MySQLContainer; -import org.testcontainers.junit.jupiter.Container; -import org.testcontainers.junit.jupiter.Testcontainers; - import com.cloudbees.plugins.credentials.CredentialsMatchers; import com.cloudbees.plugins.credentials.CredentialsProvider; import com.cloudbees.plugins.credentials.common.UsernamePasswordCredentials; import com.zaxxer.hikari.HikariConfig; import com.zaxxer.hikari.HikariDataSource; - import hudson.security.ACL; import hudson.util.FormValidation; import hudson.util.Secret; +import java.util.Collections; +import javax.sql.DataSource; import jenkins.model.Jenkins; +import org.jenkinsci.plugins.pipeline.maven.dao.PipelineMavenPluginDao; +import org.jenkinsci.plugins.pipeline.maven.db.migration.MigrationStep; +import org.junit.jupiter.api.Test; +import org.mockito.MockedStatic; +import org.testcontainers.containers.MySQLContainer; +import org.testcontainers.junit.jupiter.Container; +import org.testcontainers.junit.jupiter.Testcontainers; /** * @author Cyrille Le Clerc @@ -96,13 +92,16 @@ public void ensureValidateConfiguration() throws Exception { try (MockedStatic j = mockStatic(Jenkins.class); MockedStatic m = mockStatic(CredentialsMatchers.class); MockedStatic c = mockStatic(CredentialsProvider.class)) { - PipelineMavenPluginDao.Builder.Config config = new PipelineMavenPluginDao.Builder.Config().jdbcUrl(DB.getJdbcUrl()).credentialsId("credsId"); + PipelineMavenPluginDao.Builder.Config config = new PipelineMavenPluginDao.Builder.Config() + .jdbcUrl(DB.getJdbcUrl()) + .credentialsId("credsId"); UsernamePasswordCredentials credentials = mock(UsernamePasswordCredentials.class); Secret password = Secret.fromString(DB.getPassword()); String version = DB.createConnection("").getMetaData().getDatabaseProductVersion(); j.when(Jenkins::get).thenReturn(null); m.when(() -> CredentialsMatchers.withId("credsId")).thenReturn(null); - c.when(() -> CredentialsProvider.lookupCredentials(UsernamePasswordCredentials.class, (Jenkins) null, ACL.SYSTEM, Collections.EMPTY_LIST)) + c.when(() -> CredentialsProvider.lookupCredentials( + UsernamePasswordCredentials.class, (Jenkins) null, ACL.SYSTEM, Collections.EMPTY_LIST)) .thenReturn(null); c.when(() -> CredentialsMatchers.firstOrNull(null, null)).thenReturn(credentials); when(credentials.getUsername()).thenReturn(DB.getUsername()); diff --git a/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginMySqlDaoInitializationTest.java b/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginMySqlDaoInitializationTest.java index ea487def..24864cdd 100644 --- a/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginMySqlDaoInitializationTest.java +++ b/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginMySqlDaoInitializationTest.java @@ -26,17 +26,15 @@ import static org.assertj.core.api.Assertions.assertThat; +import edu.umd.cs.findbugs.annotations.NonNull; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; - import org.h2.jdbcx.JdbcConnectionPool; import org.jenkinsci.plugins.pipeline.maven.db.migration.MigrationStep; import org.junit.jupiter.api.Test; -import edu.umd.cs.findbugs.annotations.NonNull; - /** * @author Cyrille Le Clerc */ diff --git a/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginMySqlDaoTest.java b/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginMySqlDaoTest.java index 3c998c00..5ac409da 100644 --- a/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginMySqlDaoTest.java +++ b/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginMySqlDaoTest.java @@ -27,7 +27,6 @@ import static org.assertj.core.api.Assertions.assertThat; import javax.sql.DataSource; - import org.h2.jdbcx.JdbcConnectionPool; import org.jenkinsci.plugins.pipeline.maven.db.migration.MigrationStep; import org.junit.jupiter.api.Test; @@ -75,7 +74,8 @@ public void test_mariadb_version_parsing_JENKINS_55378() { */ @Test public void test_mariadb_version_parsing_mariadb_as_docker_container() { - String actual = PipelineMavenPluginMySqlDao.extractMariaDbVersion("5.5.5-10.3.11-MariaDB-1:10.3.11+maria~bionic"); + String actual = + PipelineMavenPluginMySqlDao.extractMariaDbVersion("5.5.5-10.3.11-MariaDB-1:10.3.11+maria~bionic"); assertThat(actual).isEqualTo("10.3.11"); } } diff --git a/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginPostgreSqlDaoIT.java b/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginPostgreSqlDaoIT.java index 458ffb5a..d32697c6 100644 --- a/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginPostgreSqlDaoIT.java +++ b/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginPostgreSqlDaoIT.java @@ -30,28 +30,24 @@ import static org.mockito.Mockito.when; import static org.testcontainers.images.PullPolicy.alwaysPull; -import java.util.Collections; - -import javax.sql.DataSource; - -import org.jenkinsci.plugins.pipeline.maven.dao.PipelineMavenPluginDao; -import org.jenkinsci.plugins.pipeline.maven.db.migration.MigrationStep; -import org.junit.jupiter.api.Test; -import org.mockito.MockedStatic; -import org.testcontainers.containers.PostgreSQLContainer; -import org.testcontainers.junit.jupiter.Container; -import org.testcontainers.junit.jupiter.Testcontainers; - import com.cloudbees.plugins.credentials.CredentialsMatchers; import com.cloudbees.plugins.credentials.CredentialsProvider; import com.cloudbees.plugins.credentials.common.UsernamePasswordCredentials; import com.zaxxer.hikari.HikariConfig; import com.zaxxer.hikari.HikariDataSource; - import hudson.security.ACL; import hudson.util.FormValidation; import hudson.util.Secret; +import java.util.Collections; +import javax.sql.DataSource; import jenkins.model.Jenkins; +import org.jenkinsci.plugins.pipeline.maven.dao.PipelineMavenPluginDao; +import org.jenkinsci.plugins.pipeline.maven.db.migration.MigrationStep; +import org.junit.jupiter.api.Test; +import org.mockito.MockedStatic; +import org.testcontainers.containers.PostgreSQLContainer; +import org.testcontainers.junit.jupiter.Container; +import org.testcontainers.junit.jupiter.Testcontainers; /** * @author Cyrille Le Clerc @@ -60,7 +56,8 @@ public class PipelineMavenPluginPostgreSqlDaoIT extends PipelineMavenPluginDaoAbstractTest { @Container - public static PostgreSQLContainer DB = new PostgreSQLContainer<>(PostgreSQLContainer.IMAGE).withImagePullPolicy(alwaysPull()); + public static PostgreSQLContainer DB = + new PostgreSQLContainer<>(PostgreSQLContainer.IMAGE).withImagePullPolicy(alwaysPull()); @Override public DataSource before_newDataSource() throws Exception { @@ -97,13 +94,16 @@ public void ensureValidateConfiguration() throws Exception { try (MockedStatic j = mockStatic(Jenkins.class); MockedStatic m = mockStatic(CredentialsMatchers.class); MockedStatic c = mockStatic(CredentialsProvider.class)) { - PipelineMavenPluginDao.Builder.Config config = new PipelineMavenPluginDao.Builder.Config().jdbcUrl(DB.getJdbcUrl()).credentialsId("credsId"); + PipelineMavenPluginDao.Builder.Config config = new PipelineMavenPluginDao.Builder.Config() + .jdbcUrl(DB.getJdbcUrl()) + .credentialsId("credsId"); UsernamePasswordCredentials credentials = mock(UsernamePasswordCredentials.class); Secret password = Secret.fromString(DB.getPassword()); String version = DB.createConnection("").getMetaData().getDatabaseProductVersion(); j.when(Jenkins::get).thenReturn(null); m.when(() -> CredentialsMatchers.withId("credsId")).thenReturn(null); - c.when(() -> CredentialsProvider.lookupCredentials(UsernamePasswordCredentials.class, (Jenkins) null, ACL.SYSTEM, Collections.EMPTY_LIST)) + c.when(() -> CredentialsProvider.lookupCredentials( + UsernamePasswordCredentials.class, (Jenkins) null, ACL.SYSTEM, Collections.EMPTY_LIST)) .thenReturn(null); c.when(() -> CredentialsMatchers.firstOrNull(null, null)).thenReturn(credentials); when(credentials.getUsername()).thenReturn(DB.getUsername()); diff --git a/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginPostgreSqlDaoTest.java b/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginPostgreSqlDaoTest.java index 8a3c6015..f8b66e8d 100644 --- a/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginPostgreSqlDaoTest.java +++ b/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginPostgreSqlDaoTest.java @@ -24,11 +24,10 @@ package org.jenkinsci.plugins.pipeline.maven.db; +import javax.sql.DataSource; import org.h2.jdbcx.JdbcConnectionPool; import org.jenkinsci.plugins.pipeline.maven.db.migration.MigrationStep; -import javax.sql.DataSource; - /** * @author Cyrille Le Clerc */ @@ -58,5 +57,4 @@ public String getMasterRootUrl() { } }; } - } diff --git a/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/util/ClassUtilsTest.java b/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/util/ClassUtilsTest.java index 481fcb6f..a045914d 100644 --- a/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/util/ClassUtilsTest.java +++ b/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/util/ClassUtilsTest.java @@ -12,6 +12,7 @@ public class ClassUtilsTest { @Test public void testGetResource() { - assertThat(getResourceAsStream("org/jenkinsci/plugins/pipeline/maven/util/classutils-test-1.txt")).isNotNull(); + assertThat(getResourceAsStream("org/jenkinsci/plugins/pipeline/maven/util/classutils-test-1.txt")) + .isNotNull(); } } diff --git a/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/util/SqlTestsUtils.java b/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/util/SqlTestsUtils.java index 2c5734bd..ae550afb 100644 --- a/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/util/SqlTestsUtils.java +++ b/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/util/SqlTestsUtils.java @@ -1,14 +1,8 @@ package org.jenkinsci.plugins.pipeline.maven.db.util; -import org.h2.api.ErrorCode; - -import edu.umd.cs.findbugs.annotations.NonNull; -import org.jenkinsci.plugins.pipeline.maven.db.util.RuntimeSqlException; - -import javax.sql.DataSource; - import static java.util.Optional.ofNullable; +import edu.umd.cs.findbugs.annotations.NonNull; import java.io.PrintStream; import java.sql.Connection; import java.sql.PreparedStatement; @@ -16,13 +10,14 @@ import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.sql.Statement; +import javax.sql.DataSource; +import org.h2.api.ErrorCode; /** * @author Cyrille Le Clerc */ public class SqlTestsUtils { - public static void dump(String sql, DataSource ds, PrintStream out) throws RuntimeSqlException { try (Connection connection = ds.getConnection()) { out.println("# DUMP " + sql); @@ -66,11 +61,12 @@ public static int countRows(@NonNull String sql, @NonNull DataSource ds, Object. } public static int countRows(@NonNull String sql, @NonNull Connection cnn, Object... params) throws SQLException { - String sqlQuery ; - if (sql.startsWith("select * from")){ + String sqlQuery; + if (sql.startsWith("select * from")) { sqlQuery = "select count(*) from " + sql.substring("select * from".length()); } else { - sqlQuery = "select count(*) from (" + sql + ")"; } + sqlQuery = "select count(*) from (" + sql + ")"; + } try (PreparedStatement stmt = cnn.prepareStatement(sqlQuery)) { int idx = 1; diff --git a/pipeline-maven-spy/pom.xml b/pipeline-maven-spy/pom.xml index 3d883907..8c9a9ffc 100644 --- a/pipeline-maven-spy/pom.xml +++ b/pipeline-maven-spy/pom.xml @@ -43,16 +43,16 @@ javax.inject 1 - - org.apache.maven - maven-core - provided - org.sonatype.aether aether-api 1.13.1 + + org.apache.maven + maven-core + provided + org.slf4j diff --git a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/JenkinsMavenEventSpy.java b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/JenkinsMavenEventSpy.java index ce181aae..a808677c 100644 --- a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/JenkinsMavenEventSpy.java +++ b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/JenkinsMavenEventSpy.java @@ -24,6 +24,16 @@ package org.jenkinsci.plugins.pipeline.maven.eventspy; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import javax.inject.Named; +import javax.inject.Singleton; import org.apache.maven.eventspy.AbstractEventSpy; import org.apache.maven.eventspy.EventSpy; import org.codehaus.plexus.util.xml.Xpp3Dom; @@ -52,17 +62,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.inject.Named; -import javax.inject.Singleton; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; - /** * Maven {@link EventSpy} to capture build details consumed by the Jenkins Pipeline Maven Plugin * and the {@code withMaven(){...}} pipeline step. @@ -73,9 +72,10 @@ @Singleton public class JenkinsMavenEventSpy extends AbstractEventSpy { - public final static String DISABLE_MAVEN_EVENT_SPY_PROPERTY_NAME = JenkinsMavenEventSpy.class.getName() + ".disabled"; + public static final String DISABLE_MAVEN_EVENT_SPY_PROPERTY_NAME = + JenkinsMavenEventSpy.class.getName() + ".disabled"; - public final static String DISABLE_MAVEN_EVENT_SPY_ENVIRONMENT_VARIABLE_NAME = "JENKINS_MAVEN_AGENT_DISABLED"; + public static final String DISABLE_MAVEN_EVENT_SPY_ENVIRONMENT_VARIABLE_NAME = "JENKINS_MAVEN_AGENT_DISABLED"; private final Logger logger = LoggerFactory.getLogger(getClass()); @@ -87,10 +87,11 @@ public class JenkinsMavenEventSpy extends AbstractEventSpy { protected final boolean disabled; private Set blackList = new HashSet(); - private Set ignoredList = new HashSet(Collections.singletonList( - /*"org.eclipse.aether.RepositoryEvent",*/ - "org.apache.maven.settings.building.DefaultSettingsBuildingResult"/*, - "org.apache.maven.execution.DefaultMavenExecutionResult"*/)); + private Set ignoredList = new HashSet( + Collections.singletonList( + /*"org.eclipse.aether.RepositoryEvent",*/ + "org.apache.maven.settings.building.DefaultSettingsBuildingResult" /*, + "org.apache.maven.execution.DefaultMavenExecutionResult"*/)); private List handlers = new ArrayList(); @@ -150,8 +151,7 @@ public void init(EventSpy.Context context) throws Exception { @Override public void onEvent(Object event) throws Exception { - if (disabled) - return; + if (disabled) return; try { if (blackList.contains(event.getClass())) { @@ -174,7 +174,6 @@ public void onEvent(Object event) throws Exception { } } - @Override public void close() { if (disabled) { @@ -187,9 +186,9 @@ public void close() { /** * Visible for testing */ - protected boolean isEventSpyDisabled(){ - return "true".equalsIgnoreCase(System.getProperty(DISABLE_MAVEN_EVENT_SPY_PROPERTY_NAME)) || - "true".equalsIgnoreCase(System.getenv(DISABLE_MAVEN_EVENT_SPY_ENVIRONMENT_VARIABLE_NAME)); + protected boolean isEventSpyDisabled() { + return "true".equalsIgnoreCase(System.getProperty(DISABLE_MAVEN_EVENT_SPY_PROPERTY_NAME)) + || "true".equalsIgnoreCase(System.getenv(DISABLE_MAVEN_EVENT_SPY_ENVIRONMENT_VARIABLE_NAME)); } public MavenEventReporter getReporter() { diff --git a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/AbstractExecutionHandler.java b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/AbstractExecutionHandler.java index d449da5a..72097a12 100644 --- a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/AbstractExecutionHandler.java +++ b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/AbstractExecutionHandler.java @@ -24,16 +24,14 @@ package org.jenkinsci.plugins.pipeline.maven.eventspy.handler; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; import java.util.List; - import org.apache.maven.execution.ExecutionEvent; import org.apache.maven.plugin.MojoExecution; import org.codehaus.plexus.util.xml.Xpp3Dom; import org.jenkinsci.plugins.pipeline.maven.eventspy.reporter.MavenEventReporter; -import edu.umd.cs.findbugs.annotations.NonNull; -import edu.umd.cs.findbugs.annotations.Nullable; - /** * @author Cyrille Le Clerc */ @@ -60,7 +58,9 @@ public boolean handle(@NonNull Object event) { String[] gag = supportedGoal.split(":"); if (gag.length == 3) { MojoExecution execution = executionEvent.getMojoExecution(); - if (execution.getGroupId().equals(gag[0]) && execution.getArtifactId().equals(gag[1]) && execution.getGoal().equals(gag[2])) { + if (execution.getGroupId().equals(gag[0]) + && execution.getArtifactId().equals(gag[1]) + && execution.getGoal().equals(gag[2])) { _handle(executionEvent); return true; } else { @@ -71,7 +71,6 @@ public boolean handle(@NonNull Object event) { return false; } } - } @Override @@ -106,7 +105,8 @@ public boolean _handle(@NonNull ExecutionEvent executionEvent) { } for (String configurationParameter : configurationParameters) { - Xpp3Dom element = fullClone(configurationParameter, execution.getConfiguration().getChild(configurationParameter)); + Xpp3Dom element = fullClone( + configurationParameter, execution.getConfiguration().getChild(configurationParameter)); if (element != null) { plugin.addChild(element); } @@ -115,7 +115,7 @@ public boolean _handle(@NonNull ExecutionEvent executionEvent) { addDetails(executionEvent, root); - if(executionEvent.getException() != null) { + if (executionEvent.getException() != null) { root.addChild(newElement("exception", executionEvent.getException())); } @@ -124,9 +124,7 @@ public boolean _handle(@NonNull ExecutionEvent executionEvent) { return true; } - protected void addDetails(@NonNull ExecutionEvent executionEvent, @NonNull Xpp3Dom root) { - - } + protected void addDetails(@NonNull ExecutionEvent executionEvent, @NonNull Xpp3Dom root) {} @NonNull protected abstract List getConfigurationParametersToReport(ExecutionEvent executionEvent); @@ -138,7 +136,6 @@ protected void addDetails(@NonNull ExecutionEvent executionEvent, @NonNull Xpp3D @Nullable protected abstract ExecutionEvent.Type getSupportedType(); - /** * * @return {@code null} if all goals are supported by this {@link AbstractExecutionHandler} @@ -156,7 +153,9 @@ public String toString() { @Nullable protected String getMojoConfigurationValue(@NonNull MojoExecution execution, @NonNull String elementName) { Xpp3Dom element = execution.getConfiguration().getChild(elementName); - return element == null ? null : element.getValue() == null ? element.getAttribute("default-value") : element.getValue(); + return element == null + ? null + : element.getValue() == null ? element.getAttribute("default-value") : element.getValue(); } @Nullable diff --git a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/AbstractMavenEventHandler.java b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/AbstractMavenEventHandler.java index cd885f9c..a6de09b7 100644 --- a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/AbstractMavenEventHandler.java +++ b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/AbstractMavenEventHandler.java @@ -24,6 +24,8 @@ package org.jenkinsci.plugins.pipeline.maven.eventspy.handler; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; import java.io.File; import java.io.IOException; import java.io.PrintWriter; @@ -31,7 +33,6 @@ import java.lang.reflect.ParameterizedType; import java.lang.reflect.Type; import java.util.regex.Pattern; - import org.apache.maven.artifact.Artifact; import org.apache.maven.artifact.handler.ArtifactHandler; import org.apache.maven.model.Build; @@ -44,9 +45,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import edu.umd.cs.findbugs.annotations.NonNull; -import edu.umd.cs.findbugs.annotations.Nullable; - /** * @author Cyrille Le Clerc */ @@ -61,12 +59,10 @@ public abstract class AbstractMavenEventHandler implements MavenEventHandler< */ private static final Pattern ANSI_PATTERN = Pattern.compile("\\x1b\\[[0-9;]*m"); - protected AbstractMavenEventHandler(MavenEventReporter reporter) { this.reporter = reporter; } - @Override public boolean handle(Object event) { Type type = getSupportedType(); @@ -88,7 +84,6 @@ public String toString() { return getClass().getName() + "[type=" + getSupportedType() + "]"; } - public Xpp3Dom newElement(String name, String value) { Xpp3Dom element = new Xpp3Dom(name); element.setValue(value); @@ -124,27 +119,34 @@ public Xpp3Dom newElement(@NonNull String name, @Nullable final MavenProject pro throw new RuntimeIOException(e); } - if (absolutePath.endsWith(File.separator + "pom.xml") || absolutePath.endsWith(File.separator + ".flattened-pom.xml")) { + if (absolutePath.endsWith(File.separator + "pom.xml") + || absolutePath.endsWith(File.separator + ".flattened-pom.xml")) { // JENKINS-43616: flatten-maven-plugin replaces the original pom as artifact with a .flattened-pom.xml // no tweak } else if (absolutePath.endsWith(File.separator + "dependency-reduced-pom.xml")) { // JENKINS-42302: maven-shade-plugin creates a temporary project file dependency-reduced-pom.xml // TODO see if there is a better way to implement this "workaround" - absolutePath = absolutePath.replace(File.separator + "dependency-reduced-pom.xml", File.separator + "pom.xml"); + absolutePath = + absolutePath.replace(File.separator + "dependency-reduced-pom.xml", File.separator + "pom.xml"); } else if (absolutePath.endsWith(File.separator + ".git-versioned-pom.xml")) { - // JENKINS-56666 maven-git-versioning-extension causes warnings due to temporary pom.xml file name '.git-versioned-pom.xml' + // JENKINS-56666 maven-git-versioning-extension causes warnings due to temporary pom.xml file name + // '.git-versioned-pom.xml' // https://github.com/qoomon/maven-git-versioning-extension/blob/v4.1.0/src/main/java/me/qoomon/maven/gitversioning/VersioningMojo.java#L39 // TODO see if there is a better way to implement this "workaround" - absolutePath = absolutePath.replace(File.separator + ".git-versioned-pom.xml", File.separator + "pom.xml"); + absolutePath = + absolutePath.replace(File.separator + ".git-versioned-pom.xml", File.separator + "pom.xml"); } else { String flattenedPomFilename = getMavenFlattenPluginFlattenedPomFilename(project); if (flattenedPomFilename == null) { - logger.warn("[jenkins-event-spy] Unexpected Maven project file name '" + projectFile.getName() + "', problems may occur"); + logger.warn("[jenkins-event-spy] Unexpected Maven project file name '" + projectFile.getName() + + "', problems may occur"); } else { if (absolutePath.endsWith(File.separator + flattenedPomFilename)) { - absolutePath = absolutePath.replace(File.separator + flattenedPomFilename, File.separator + "pom.xml"); + absolutePath = + absolutePath.replace(File.separator + flattenedPomFilename, File.separator + "pom.xml"); } else { - logger.warn("[jenkins-event-spy] Unexpected Maven project file name '" + projectFile.getName() + "', problems may occur"); + logger.warn("[jenkins-event-spy] Unexpected Maven project file name '" + projectFile.getName() + + "', problems may occur"); } } } @@ -177,10 +179,10 @@ public Xpp3Dom newElement(@NonNull String name, @Nullable final MavenProject pro */ @Nullable protected String getMavenFlattenPluginFlattenedPomFilename(@NonNull MavenProject project) { - for(Plugin buildPlugin : project.getBuildPlugins()) { + for (Plugin buildPlugin : project.getBuildPlugins()) { if ("org.codehaus.mojo:flatten-maven-plugin".equals(buildPlugin.getKey())) { String mavenConfigurationElement = "flattenedPomFilename"; - for(PluginExecution execution: buildPlugin.getExecutions()) { + for (PluginExecution execution : buildPlugin.getExecutions()) { if (execution.getGoals().contains("flatten")) { if (execution.getConfiguration() instanceof Xpp3Dom) { Xpp3Dom configuration = (Xpp3Dom) execution.getConfiguration(); @@ -204,12 +206,12 @@ protected String getMavenFlattenPluginFlattenedPomFilename(@NonNull MavenProject } private static String removeAnsiColor(String input) { - if (input!=null) { - input = ANSI_PATTERN.matcher(input).replaceAll(""); - } - return input; + if (input != null) { + input = ANSI_PATTERN.matcher(input).replaceAll(""); + } + return input; } - + public Xpp3Dom newElement(@NonNull String name, @Nullable Throwable t) { Xpp3Dom rootElt = new Xpp3Dom(name); if (t == null) { diff --git a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/ArtifactDeployedEventHandler.java b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/ArtifactDeployedEventHandler.java index acd133bb..3cf0337e 100644 --- a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/ArtifactDeployedEventHandler.java +++ b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/ArtifactDeployedEventHandler.java @@ -1,12 +1,11 @@ package org.jenkinsci.plugins.pipeline.maven.eventspy.handler; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; import org.codehaus.plexus.util.xml.Xpp3Dom; import org.eclipse.aether.RepositoryEvent; import org.jenkinsci.plugins.pipeline.maven.eventspy.reporter.MavenEventReporter; -import edu.umd.cs.findbugs.annotations.NonNull; -import edu.umd.cs.findbugs.annotations.Nullable; - public class ArtifactDeployedEventHandler implements MavenEventHandler { protected final MavenEventReporter reporter; diff --git a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/CatchAllExecutionHandler.java b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/CatchAllExecutionHandler.java index 0e2404e8..e9e82303 100644 --- a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/CatchAllExecutionHandler.java +++ b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/CatchAllExecutionHandler.java @@ -24,18 +24,16 @@ package org.jenkinsci.plugins.pipeline.maven.eventspy.handler; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; import java.util.ArrayList; import java.util.Collections; import java.util.List; - import org.apache.maven.execution.ExecutionEvent; import org.apache.maven.plugin.MojoExecution; import org.codehaus.plexus.util.xml.Xpp3Dom; import org.jenkinsci.plugins.pipeline.maven.eventspy.reporter.MavenEventReporter; -import edu.umd.cs.findbugs.annotations.NonNull; -import edu.umd.cs.findbugs.annotations.Nullable; - /** * @author Cyrille Le Clerc */ diff --git a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/DefaultSettingsBuildingRequestHandler.java b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/DefaultSettingsBuildingRequestHandler.java index 972154c8..639df509 100644 --- a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/DefaultSettingsBuildingRequestHandler.java +++ b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/DefaultSettingsBuildingRequestHandler.java @@ -48,5 +48,4 @@ public boolean _handle(DefaultSettingsBuildingRequest request) { reporter.print(root); return true; } - } diff --git a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/DependencyResolutionResultHandler.java b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/DependencyResolutionResultHandler.java index 134922c8..437b73d8 100644 --- a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/DependencyResolutionResultHandler.java +++ b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/DependencyResolutionResultHandler.java @@ -24,16 +24,15 @@ package org.jenkinsci.plugins.pipeline.maven.eventspy.handler; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; import org.apache.maven.project.DependencyResolutionResult; import org.codehaus.plexus.util.xml.Xpp3Dom; import org.eclipse.aether.artifact.Artifact; import org.eclipse.aether.graph.Dependency; import org.jenkinsci.plugins.pipeline.maven.eventspy.reporter.MavenEventReporter; -import java.util.Arrays; -import java.util.HashSet; -import java.util.Set; - /** * @author Cyrille Le Clerc */ @@ -69,13 +68,13 @@ protected boolean _handle(DependencyResolutionResult result) { for (Dependency dependency : result.getResolvedDependencies()) { Artifact artifact = dependency.getArtifact(); - if ( !includedScopes.contains(dependency.getScope())) { + if (!includedScopes.contains(dependency.getScope())) { continue; } if (!includeSnapshots && artifact.isSnapshot()) { continue; } - if(!includeReleases && !artifact.isSnapshot()) { + if (!includeReleases && !artifact.isSnapshot()) { continue; } diff --git a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/DeployDeployExecutionHandler.java b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/DeployDeployExecutionHandler.java index 6d0b08df..b50e8b34 100644 --- a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/DeployDeployExecutionHandler.java +++ b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/DeployDeployExecutionHandler.java @@ -1,13 +1,12 @@ package org.jenkinsci.plugins.pipeline.maven.eventspy.handler; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; import org.apache.maven.artifact.repository.ArtifactRepository; import org.apache.maven.execution.ExecutionEvent; import org.codehaus.plexus.util.xml.Xpp3Dom; import org.jenkinsci.plugins.pipeline.maven.eventspy.reporter.MavenEventReporter; -import edu.umd.cs.findbugs.annotations.NonNull; -import edu.umd.cs.findbugs.annotations.Nullable; - /** * @author Cyrille Le Clerc */ @@ -19,8 +18,8 @@ public DeployDeployExecutionHandler(@NonNull MavenEventReporter reporter) { @Override protected void addDetails(@NonNull ExecutionEvent executionEvent, @NonNull Xpp3Dom root) { super.addDetails(executionEvent, root); - ArtifactRepository artifactRepository = executionEvent.getProject() - .getDistributionManagementArtifactRepository(); + ArtifactRepository artifactRepository = + executionEvent.getProject().getDistributionManagementArtifactRepository(); Xpp3Dom artifactRepositoryElt = new Xpp3Dom("artifactRepository"); root.addChild(artifactRepositoryElt); if (artifactRepository == null) { @@ -34,7 +33,6 @@ protected void addDetails(@NonNull ExecutionEvent executionEvent, @NonNull Xpp3D urlElt.setValue(artifactRepository.getUrl()); artifactRepositoryElt.addChild(urlElt); } - } @Nullable diff --git a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/DeployDeployFileExecutionHandler.java b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/DeployDeployFileExecutionHandler.java index 38983100..f6854c80 100644 --- a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/DeployDeployFileExecutionHandler.java +++ b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/DeployDeployFileExecutionHandler.java @@ -1,17 +1,15 @@ package org.jenkinsci.plugins.pipeline.maven.eventspy.handler; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; import java.util.ArrayList; import java.util.List; - import org.apache.maven.execution.ExecutionEvent; import org.apache.maven.execution.ExecutionEvent.Type; import org.apache.maven.plugin.MojoExecution; import org.codehaus.plexus.util.xml.Xpp3Dom; import org.jenkinsci.plugins.pipeline.maven.eventspy.reporter.MavenEventReporter; -import edu.umd.cs.findbugs.annotations.NonNull; -import edu.umd.cs.findbugs.annotations.Nullable; - /** * Handler to alter the * org.apache.maven.plugins:maven-deploy-plugin:deploy-file goal : it will @@ -59,7 +57,7 @@ protected Type getSupportedType() { return ExecutionEvent.Type.MojoSucceeded; } - @Nullable + @Nullable @Override protected String getSupportedPluginGoal() { return "org.apache.maven.plugins:maven-deploy-plugin:deploy-file"; diff --git a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/FailsafeTestExecutionHandler.java b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/FailsafeTestExecutionHandler.java index 4c7afa40..7e09c0ef 100644 --- a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/FailsafeTestExecutionHandler.java +++ b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/FailsafeTestExecutionHandler.java @@ -24,15 +24,13 @@ package org.jenkinsci.plugins.pipeline.maven.eventspy.handler; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; import java.util.Arrays; import java.util.List; - import org.apache.maven.execution.ExecutionEvent; import org.jenkinsci.plugins.pipeline.maven.eventspy.reporter.MavenEventReporter; -import edu.umd.cs.findbugs.annotations.NonNull; -import edu.umd.cs.findbugs.annotations.Nullable; - /** * @author Cyrille Le Clerc */ diff --git a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/InvokerRunExecutionHandler.java b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/InvokerRunExecutionHandler.java index 3bf29288..56fec3f5 100644 --- a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/InvokerRunExecutionHandler.java +++ b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/InvokerRunExecutionHandler.java @@ -24,15 +24,13 @@ package org.jenkinsci.plugins.pipeline.maven.eventspy.handler; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; import java.util.Arrays; import java.util.List; - import org.apache.maven.execution.ExecutionEvent; import org.jenkinsci.plugins.pipeline.maven.eventspy.reporter.MavenEventReporter; -import edu.umd.cs.findbugs.annotations.NonNull; -import edu.umd.cs.findbugs.annotations.Nullable; - public class InvokerRunExecutionHandler extends AbstractExecutionHandler { public InvokerRunExecutionHandler(MavenEventReporter reporter) { diff --git a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/InvokerStartExecutionHandler.java b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/InvokerStartExecutionHandler.java index 7fb8fe53..8fa0ede6 100644 --- a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/InvokerStartExecutionHandler.java +++ b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/InvokerStartExecutionHandler.java @@ -26,18 +26,16 @@ import static org.jenkinsci.plugins.pipeline.maven.eventspy.JenkinsMavenEventSpy.DISABLE_MAVEN_EVENT_SPY_ENVIRONMENT_VARIABLE_NAME; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; import java.util.ArrayList; import java.util.List; - import org.apache.maven.execution.ExecutionEvent; import org.codehaus.plexus.util.xml.Xpp3Dom; import org.jenkinsci.plugins.pipeline.maven.eventspy.reporter.MavenEventReporter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import edu.umd.cs.findbugs.annotations.NonNull; -import edu.umd.cs.findbugs.annotations.Nullable; - /** * Handler to alter the * org.apache.maven.plugins:maven-invoker-plugin:run goal : it will diff --git a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/JarJarExecutionHandler.java b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/JarJarExecutionHandler.java index 40c82b6f..a77df05c 100644 --- a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/JarJarExecutionHandler.java +++ b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/JarJarExecutionHandler.java @@ -24,14 +24,12 @@ package org.jenkinsci.plugins.pipeline.maven.eventspy.handler; +import edu.umd.cs.findbugs.annotations.Nullable; import java.util.Arrays; import java.util.List; - import org.apache.maven.execution.ExecutionEvent; import org.jenkinsci.plugins.pipeline.maven.eventspy.reporter.MavenEventReporter; -import edu.umd.cs.findbugs.annotations.Nullable; - /** * @author Cyrille Le Clerc */ diff --git a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/ProjectFailedExecutionHandler.java b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/ProjectFailedExecutionHandler.java index 7923ae7d..a870ff40 100644 --- a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/ProjectFailedExecutionHandler.java +++ b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/ProjectFailedExecutionHandler.java @@ -24,11 +24,10 @@ package org.jenkinsci.plugins.pipeline.maven.eventspy.handler; -import org.apache.maven.execution.ExecutionEvent; -import org.jenkinsci.plugins.pipeline.maven.eventspy.reporter.MavenEventReporter; - import java.util.Collections; import java.util.List; +import org.apache.maven.execution.ExecutionEvent; +import org.jenkinsci.plugins.pipeline.maven.eventspy.reporter.MavenEventReporter; /** * @author Cyrille Le Clerc diff --git a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/ProjectStartedExecutionHandler.java b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/ProjectStartedExecutionHandler.java index d440a37b..927588fc 100644 --- a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/ProjectStartedExecutionHandler.java +++ b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/ProjectStartedExecutionHandler.java @@ -24,16 +24,14 @@ package org.jenkinsci.plugins.pipeline.maven.eventspy.handler; +import edu.umd.cs.findbugs.annotations.NonNull; import java.util.Collections; import java.util.List; - import org.apache.maven.execution.ExecutionEvent; import org.apache.maven.project.MavenProject; import org.codehaus.plexus.util.xml.Xpp3Dom; import org.jenkinsci.plugins.pipeline.maven.eventspy.reporter.MavenEventReporter; -import edu.umd.cs.findbugs.annotations.NonNull; - /** * @author Cyrille Le Clerc */ diff --git a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/ProjectSucceededExecutionHandler.java b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/ProjectSucceededExecutionHandler.java index be03856c..750a7604 100644 --- a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/ProjectSucceededExecutionHandler.java +++ b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/ProjectSucceededExecutionHandler.java @@ -24,6 +24,10 @@ package org.jenkinsci.plugins.pipeline.maven.eventspy.handler; +import java.io.File; +import java.io.IOException; +import java.util.Collections; +import java.util.List; import org.apache.maven.artifact.Artifact; import org.apache.maven.execution.ExecutionEvent; import org.apache.maven.project.MavenProject; @@ -31,11 +35,6 @@ import org.jenkinsci.plugins.pipeline.maven.eventspy.RuntimeIOException; import org.jenkinsci.plugins.pipeline.maven.eventspy.reporter.MavenEventReporter; -import java.io.File; -import java.io.IOException; -import java.util.Collections; -import java.util.List; - /** * @author Cyrille Le Clerc */ @@ -81,7 +80,6 @@ protected void addDetails(ExecutionEvent executionEvent, Xpp3Dom element) { } attachedArtifactsElt.addChild(artifactElt); } - } @Override diff --git a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/RepositoryEventHandler.java b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/RepositoryEventHandler.java index c40bb1ec..2aa3c72e 100644 --- a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/RepositoryEventHandler.java +++ b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/RepositoryEventHandler.java @@ -65,6 +65,7 @@ protected boolean _handle(RepositoryEvent repositoryEvent) { } private void print(RepositoryEvent repositoryEvent, long durationInNanos) { - reporter.print(repositoryEvent.getArtifact().toString() + "-" + repositoryEvent.getType() + "-" + durationInNanos + "nanos"); + reporter.print(repositoryEvent.getArtifact().toString() + "-" + repositoryEvent.getType() + "-" + + durationInNanos + "nanos"); } } diff --git a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/SessionEndedHandler.java b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/SessionEndedHandler.java index 4e57c148..1529f1c6 100644 --- a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/SessionEndedHandler.java +++ b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/SessionEndedHandler.java @@ -24,15 +24,13 @@ package org.jenkinsci.plugins.pipeline.maven.eventspy.handler; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; import java.util.Collections; import java.util.List; - import org.apache.maven.execution.ExecutionEvent; import org.jenkinsci.plugins.pipeline.maven.eventspy.reporter.MavenEventReporter; -import edu.umd.cs.findbugs.annotations.NonNull; -import edu.umd.cs.findbugs.annotations.Nullable; - /** * Don't generate an entry in the report for * {@link ExecutionEvent.Type#SessionEnded} diff --git a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/SurefireTestExecutionHandler.java b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/SurefireTestExecutionHandler.java index 14d9efe2..91aa6773 100644 --- a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/SurefireTestExecutionHandler.java +++ b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/SurefireTestExecutionHandler.java @@ -24,15 +24,13 @@ package org.jenkinsci.plugins.pipeline.maven.eventspy.handler; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; import java.util.Arrays; import java.util.List; - import org.apache.maven.execution.ExecutionEvent; import org.jenkinsci.plugins.pipeline.maven.eventspy.reporter.MavenEventReporter; -import edu.umd.cs.findbugs.annotations.NonNull; -import edu.umd.cs.findbugs.annotations.Nullable; - /** * @author Cyrille Le Clerc */ diff --git a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/reporter/DevNullMavenEventReporter.java b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/reporter/DevNullMavenEventReporter.java index aa8467c0..e96fdbf2 100644 --- a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/reporter/DevNullMavenEventReporter.java +++ b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/reporter/DevNullMavenEventReporter.java @@ -9,17 +9,11 @@ */ public class DevNullMavenEventReporter implements MavenEventReporter { @Override - public void print(Object message) { - - } + public void print(Object message) {} @Override - public void print(Xpp3Dom element) { - - } + public void print(Xpp3Dom element) {} @Override - public void close() { - - } + public void close() {} } diff --git a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/reporter/FileMavenEventReporter.java b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/reporter/FileMavenEventReporter.java index e5501d60..6c74962b 100644 --- a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/reporter/FileMavenEventReporter.java +++ b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/reporter/FileMavenEventReporter.java @@ -32,7 +32,6 @@ import java.sql.Timestamp; import java.text.SimpleDateFormat; import java.util.Date; - import org.codehaus.plexus.util.xml.PrettyPrintXMLWriter; import org.codehaus.plexus.util.xml.XMLWriter; import org.codehaus.plexus.util.xml.XmlWriterUtil; @@ -54,6 +53,7 @@ public class FileMavenEventReporter implements MavenEventReporter { * extension and gets renamed "maven-spy-*.log" at the end of the execution */ File outFile; + PrintWriter out; XMLWriter xmlWriter; /** diff --git a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/reporter/OutputStreamEventReporter.java b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/reporter/OutputStreamEventReporter.java index 0cc54f8c..59185c2a 100644 --- a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/reporter/OutputStreamEventReporter.java +++ b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/reporter/OutputStreamEventReporter.java @@ -30,7 +30,6 @@ import java.io.Writer; import java.nio.charset.Charset; import java.sql.Timestamp; - import org.codehaus.plexus.util.xml.PrettyPrintXMLWriter; import org.codehaus.plexus.util.xml.XMLWriter; import org.codehaus.plexus.util.xml.XmlWriterUtil; @@ -57,7 +56,6 @@ public OutputStreamEventReporter(Writer out) { } this.xmlWriter = new PrettyPrintXMLWriter(out); xmlWriter.startElement("mavenExecution"); - } @Override diff --git a/pipeline-maven-spy/src/test/java/org/jenkinsci/plugins/pipeline/maven/eventspy/JenkinsMavenEventSpyDisablementTest.java b/pipeline-maven-spy/src/test/java/org/jenkinsci/plugins/pipeline/maven/eventspy/JenkinsMavenEventSpyDisablementTest.java index 424d224e..b3c8106d 100644 --- a/pipeline-maven-spy/src/test/java/org/jenkinsci/plugins/pipeline/maven/eventspy/JenkinsMavenEventSpyDisablementTest.java +++ b/pipeline-maven-spy/src/test/java/org/jenkinsci/plugins/pipeline/maven/eventspy/JenkinsMavenEventSpyDisablementTest.java @@ -30,7 +30,6 @@ import java.util.Arrays; import java.util.HashMap; import java.util.Map; - import org.apache.maven.eventspy.EventSpy; import org.apache.maven.execution.DefaultMavenExecutionRequest; import org.codehaus.plexus.util.xml.Xpp3Dom; @@ -104,5 +103,4 @@ public Map getData() { spy.onEvent(request); spy.close(); } - } diff --git a/pipeline-maven-spy/src/test/java/org/jenkinsci/plugins/pipeline/maven/eventspy/JenkinsMavenEventSpyMTTest.java b/pipeline-maven-spy/src/test/java/org/jenkinsci/plugins/pipeline/maven/eventspy/JenkinsMavenEventSpyMTTest.java index ac7c1a58..b38c2902 100644 --- a/pipeline-maven-spy/src/test/java/org/jenkinsci/plugins/pipeline/maven/eventspy/JenkinsMavenEventSpyMTTest.java +++ b/pipeline-maven-spy/src/test/java/org/jenkinsci/plugins/pipeline/maven/eventspy/JenkinsMavenEventSpyMTTest.java @@ -35,11 +35,9 @@ import java.util.Vector; import java.util.concurrent.CyclicBarrier; import java.util.concurrent.atomic.AtomicInteger; - import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; - import org.apache.maven.eventspy.EventSpy; import org.apache.maven.execution.DefaultMavenExecutionRequest; import org.apache.maven.model.Model; @@ -77,7 +75,9 @@ public Map getData() { }); MavenXpp3Reader mavenXpp3Reader = new MavenXpp3Reader(); - InputStream in = Thread.currentThread().getContextClassLoader().getResourceAsStream("org/jenkinsci/plugins/pipeline/maven/eventspy/pom.xml"); + InputStream in = Thread.currentThread() + .getContextClassLoader() + .getResourceAsStream("org/jenkinsci/plugins/pipeline/maven/eventspy/pom.xml"); assertThat(in).isNotNull(); Model model = mavenXpp3Reader.read(in); @@ -92,7 +92,8 @@ public Map getData() { @Test // Issue JENKINS-46579 public void testMavenExecutionMTSpyReporters() throws Exception { int numThreads = 100; - final CyclicBarrier barrier = new CyclicBarrier(numThreads + 1); // we need to also stop the test thread (current) + final CyclicBarrier barrier = + new CyclicBarrier(numThreads + 1); // we need to also stop the test thread (current) final AtomicInteger counter = new AtomicInteger(0); final ExceptionHolder exceptionHolder = new ExceptionHolder(); @@ -103,27 +104,28 @@ public void testMavenExecutionMTSpyReporters() throws Exception { // persisted state, the rest will read it a couple of times. for (int i = 0; i < numThreads; i++) { new Thread(new Runnable() { - @Override - public void run() { - try { - barrier.await(); - // Thread.sleep(RandomUtils.nextInt(0, 500)); - JenkinsMavenEventSpy spy = createSpy(); - spyList.add(spy); - DefaultMavenExecutionRequest request = new DefaultMavenExecutionRequest(); - request.setPom(new File("path/to/pom.xml")); - request.setGoals(Arrays.asList("clean", "source:jar", "deploy")); - - for (int i = 0; i < 100; i++) { - spy.onEvent(request); + @Override + public void run() { + try { + barrier.await(); + // Thread.sleep(RandomUtils.nextInt(0, 500)); + JenkinsMavenEventSpy spy = createSpy(); + spyList.add(spy); + DefaultMavenExecutionRequest request = new DefaultMavenExecutionRequest(); + request.setPom(new File("path/to/pom.xml")); + request.setGoals(Arrays.asList("clean", "source:jar", "deploy")); + + for (int i = 0; i < 100; i++) { + spy.onEvent(request); + } + + } catch (Exception e) { + exceptionHolder.e = e; + } + counter.incrementAndGet(); } - - } catch (Exception e) { - exceptionHolder.e = e; - } - counter.incrementAndGet(); - } - }).start(); + }) + .start(); } barrier.await(); @@ -164,7 +166,8 @@ public void run() { @Test // Issue JENKINS-46579 public void testMavenExecutionMTRequestsSingleSpyReporter() throws Exception { int numThreads = 100; - final CyclicBarrier barrier = new CyclicBarrier(numThreads + 1); // we need to also stop the test thread (current) + final CyclicBarrier barrier = + new CyclicBarrier(numThreads + 1); // we need to also stop the test thread (current) final AtomicInteger counter = new AtomicInteger(0); final ExceptionHolder exceptionHolder = new ExceptionHolder(); @@ -175,25 +178,26 @@ public void testMavenExecutionMTRequestsSingleSpyReporter() throws Exception { // persisted state, the rest will read it a couple of times. for (int i = 0; i < numThreads; i++) { new Thread(new Runnable() { - @Override - public void run() { - try { - barrier.await(); - // Thread.sleep(RandomUtils.nextInt(0, 500)); - - DefaultMavenExecutionRequest request = new DefaultMavenExecutionRequest(); - request.setPom(new File("path/to/pom.xml")); - request.setGoals(Arrays.asList("clean", "source:jar", "deploy")); - for (int i = 0; i < 100; i++) { - spy.onEvent(request); + @Override + public void run() { + try { + barrier.await(); + // Thread.sleep(RandomUtils.nextInt(0, 500)); + + DefaultMavenExecutionRequest request = new DefaultMavenExecutionRequest(); + request.setPom(new File("path/to/pom.xml")); + request.setGoals(Arrays.asList("clean", "source:jar", "deploy")); + for (int i = 0; i < 100; i++) { + spy.onEvent(request); + } + + } catch (Exception e) { + exceptionHolder.e = e; + } + counter.incrementAndGet(); } - - } catch (Exception e) { - exceptionHolder.e = e; - } - counter.incrementAndGet(); - } - }).start(); + }) + .start(); } barrier.await(); @@ -246,7 +250,6 @@ public void validateXMLDocument(File document) { e.printStackTrace(); fail("Failed to parse spylog: " + document + " error:" + e); } - } public static class ExceptionHolder { diff --git a/pipeline-maven-spy/src/test/java/org/jenkinsci/plugins/pipeline/maven/eventspy/JenkinsMavenEventSpyTest.java b/pipeline-maven-spy/src/test/java/org/jenkinsci/plugins/pipeline/maven/eventspy/JenkinsMavenEventSpyTest.java index 3244152e..246c5909 100644 --- a/pipeline-maven-spy/src/test/java/org/jenkinsci/plugins/pipeline/maven/eventspy/JenkinsMavenEventSpyTest.java +++ b/pipeline-maven-spy/src/test/java/org/jenkinsci/plugins/pipeline/maven/eventspy/JenkinsMavenEventSpyTest.java @@ -32,7 +32,6 @@ import java.util.Arrays; import java.util.HashMap; import java.util.Map; - import org.apache.maven.eventspy.EventSpy; import org.apache.maven.execution.DefaultMavenExecutionRequest; import org.apache.maven.execution.ExecutionEvent; @@ -74,7 +73,9 @@ public Map getData() { }); MavenXpp3Reader mavenXpp3Reader = new MavenXpp3Reader(); - InputStream in = Thread.currentThread().getContextClassLoader().getResourceAsStream("org/jenkinsci/plugins/pipeline/maven/eventspy/pom.xml"); + InputStream in = Thread.currentThread() + .getContextClassLoader() + .getResourceAsStream("org/jenkinsci/plugins/pipeline/maven/eventspy/pom.xml"); assertThat(in).isNotNull(); Model model = mavenXpp3Reader.read(in); diff --git a/pipeline-maven-spy/src/test/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/AbstractMavenEventHandlerTest.java b/pipeline-maven-spy/src/test/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/AbstractMavenEventHandlerTest.java index f518ab96..2c8cce3b 100644 --- a/pipeline-maven-spy/src/test/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/AbstractMavenEventHandlerTest.java +++ b/pipeline-maven-spy/src/test/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/AbstractMavenEventHandlerTest.java @@ -4,7 +4,6 @@ import java.io.IOException; import java.io.InputStream; - import org.apache.maven.model.Model; import org.apache.maven.model.io.xpp3.MavenXpp3Reader; import org.apache.maven.project.MavenProject; @@ -19,13 +18,15 @@ public class AbstractMavenEventHandlerTest { @Test public void test_getMavenFlattenPluginFlattenedPomFilename_nameDefinedAtTheExecutionLevel() throws Exception { - test_getMavenFlattenPluginFlattenedPomFilename("org/jenkinsci/plugins/pipeline/maven/eventspy/pom-flatten-plugin-flattenedPomFilename.xml", + test_getMavenFlattenPluginFlattenedPomFilename( + "org/jenkinsci/plugins/pipeline/maven/eventspy/pom-flatten-plugin-flattenedPomFilename.xml", "${project.artifactId}-${project.version}.pom"); } @Test public void test_getMavenFlattenPluginFlattenedPomFilename_nameDefinedAtThePluginLevel() throws Exception { - test_getMavenFlattenPluginFlattenedPomFilename("org/jenkinsci/plugins/pipeline/maven/eventspy/pom-flatten-plugin-flattenedPomFilename2.xml", + test_getMavenFlattenPluginFlattenedPomFilename( + "org/jenkinsci/plugins/pipeline/maven/eventspy/pom-flatten-plugin-flattenedPomFilename2.xml", "${project.artifactId}-${project.version}.flatten-pom"); } @@ -34,17 +35,19 @@ public void test_getMavenFlattenPluginFlattenedPomFilename_nameNotDefined() thro test_getMavenFlattenPluginFlattenedPomFilename("org/jenkinsci/plugins/pipeline/maven/eventspy/pom.xml", null); } - protected void test_getMavenFlattenPluginFlattenedPomFilename(String pomFile, String expected) throws IOException, XmlPullParserException { + protected void test_getMavenFlattenPluginFlattenedPomFilename(String pomFile, String expected) + throws IOException, XmlPullParserException { InputStream in = Thread.currentThread().getContextClassLoader().getResourceAsStream(pomFile); Model mavenProjectModel = new MavenXpp3Reader().read(in); MavenProject mavenProject = new MavenProject(mavenProjectModel); - AbstractMavenEventHandler mavenEventHandler = new AbstractMavenEventHandler(new OutputStreamEventReporter(System.err)) { - @Override - protected boolean _handle(Object o) { - return false; - } - }; + AbstractMavenEventHandler mavenEventHandler = + new AbstractMavenEventHandler(new OutputStreamEventReporter(System.err)) { + @Override + protected boolean _handle(Object o) { + return false; + } + }; String actual = mavenEventHandler.getMavenFlattenPluginFlattenedPomFilename(mavenProject); // this unit test does not expand Maven variables assertThat(actual).isEqualTo(expected); diff --git a/pipeline-maven-spy/src/test/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/DeployDeployFileExecutionHandlerTest.java b/pipeline-maven-spy/src/test/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/DeployDeployFileExecutionHandlerTest.java index 068c6839..ee7449ba 100644 --- a/pipeline-maven-spy/src/test/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/DeployDeployFileExecutionHandlerTest.java +++ b/pipeline-maven-spy/src/test/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/DeployDeployFileExecutionHandlerTest.java @@ -5,7 +5,6 @@ import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; - import org.apache.maven.execution.ExecutionEvent; import org.apache.maven.execution.MavenSession; import org.apache.maven.model.Model; diff --git a/pipeline-maven/pom.xml b/pipeline-maven/pom.xml index 4b238e81..86b44a11 100644 --- a/pipeline-maven/pom.xml +++ b/pipeline-maven/pom.xml @@ -40,48 +40,11 @@ https://github.com/jenkinsci/pipeline-maven-plugin/ - https://github.com/jenkinsci/pipeline-maven-plugin/releases 1333 + https://github.com/jenkinsci/pipeline-maven-plugin/releases - - org.jenkins-ci.plugins - pipeline-maven-api - - - - - org.jenkins-ci.plugins - pipeline-maven-database - ${project.version} - test - - - io.jenkins.plugins - h2-api - ${jenkins-plugin-h2.version} - test - - - io.jenkins.plugins - mysql-api - ${jenkins-plugin-mysql.version} - test - - - com.google.protobuf - protobuf-java - - - - - io.jenkins.plugins - postgresql-api - ${jenkins-plugin-postgresql.version} - test - org.jenkins-ci.main maven-plugin @@ -180,7 +143,6 @@ org.jenkins-ci.plugins jacoco - ${jenkins-plugin-jacoco.version} true @@ -258,6 +220,10 @@ pipeline-build-step true + + org.jenkins-ci.plugins + pipeline-maven-api + org.jenkins-ci.plugins script-security @@ -292,14 +258,14 @@ org.jvnet.hudson.plugins.findbugs library - - xml-apis - xml-apis - xerces xercesImpl + + xml-apis + xml-apis + @@ -313,6 +279,13 @@ slf4j-simple + + ${project.groupId} + pipeline-maven-spy + ${project.version} + provided + + io.jenkins configuration-as-code @@ -323,6 +296,30 @@ test-harness test + + io.jenkins.plugins + h2-api + ${jenkins-plugin-h2.version} + test + + + io.jenkins.plugins + mysql-api + ${jenkins-plugin-mysql.version} + test + + + com.google.protobuf + protobuf-java + + + + + io.jenkins.plugins + postgresql-api + ${jenkins-plugin-postgresql.version} + test + org.jenkins-ci.plugins git @@ -347,6 +344,14 @@ tests test + + + org.jenkins-ci.plugins + pipeline-maven-database + ${project.version} + test + org.jenkins-ci.plugins pipeline-stage-step @@ -503,9 +508,12 @@ com.spotify dockerfile-maven-plugin + + false + - build-sshd-image + build-first-sshd-image build @@ -560,9 +568,6 @@ - - false - org.codehaus.mojo diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/GlobalPipelineMavenConfig.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/GlobalPipelineMavenConfig.java index a3ebed0c..a6d6aa37 100644 --- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/GlobalPipelineMavenConfig.java +++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/GlobalPipelineMavenConfig.java @@ -28,6 +28,9 @@ import com.cloudbees.plugins.credentials.CredentialsProvider; import com.cloudbees.plugins.credentials.common.StandardListBoxModel; import com.cloudbees.plugins.credentials.common.UsernamePasswordCredentials; +import edu.umd.cs.findbugs.annotations.CheckForNull; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; import hudson.Extension; import hudson.ExtensionList; import hudson.init.Terminator; @@ -35,6 +38,15 @@ import hudson.security.ACL; import hudson.util.FormValidation; import hudson.util.ListBoxModel; +import java.io.IOException; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Objects; +import java.util.Optional; +import java.util.Set; +import java.util.logging.Level; +import java.util.logging.Logger; import jenkins.model.GlobalConfiguration; import jenkins.model.GlobalConfigurationCategory; import jenkins.model.Jenkins; @@ -51,19 +63,6 @@ import org.kohsuke.stapler.StaplerRequest; import org.kohsuke.stapler.verb.POST; -import edu.umd.cs.findbugs.annotations.CheckForNull; -import edu.umd.cs.findbugs.annotations.NonNull; -import edu.umd.cs.findbugs.annotations.Nullable; -import java.io.IOException; -import java.util.Collections; -import java.util.HashSet; -import java.util.List; -import java.util.Objects; -import java.util.Optional; -import java.util.Set; -import java.util.logging.Level; -import java.util.logging.Logger; - /** * @author Cyrille Le Clerc */ @@ -71,7 +70,7 @@ @Symbol("pipelineMaven") public class GlobalPipelineMavenConfig extends GlobalConfiguration { - private final static Logger LOGGER = Logger.getLogger(GlobalPipelineMavenConfig.class.getName()); + private static final Logger LOGGER = Logger.getLogger(GlobalPipelineMavenConfig.class.getName()); private transient volatile PipelineMavenPluginDao dao; @@ -110,10 +109,10 @@ public void setDaoClass(String daoClass) { } private Optional findDaoFromExtension(String daoClass) { - return ExtensionList.lookup(PipelineMavenPluginDao.class) - .stream() - .filter(pipelineMavenPluginDao -> StringUtils.equals(pipelineMavenPluginDao.getClass().getName(), daoClass)) - .findFirst(); + return ExtensionList.lookup(PipelineMavenPluginDao.class).stream() + .filter(pipelineMavenPluginDao -> + StringUtils.equals(pipelineMavenPluginDao.getClass().getName(), daoClass)) + .findFirst(); } @Override @@ -227,7 +226,7 @@ public synchronized void setJdbcCredentialsId(String jdbcCredentialsId) { @Override public boolean configure(StaplerRequest req, JSONObject json) throws FormException { - if(!StringUtils.equals(json.getString("daoClass"), daoClass)) { + if (!StringUtils.equals(json.getString("daoClass"), daoClass)) { closeDatasource(); this.dao = null; } @@ -279,16 +278,11 @@ public synchronized PipelineTriggerService getPipelineTriggerService() { @NonNull public Set getTriggerDownstreamBuildsResultsCriteria() { Set result = new HashSet<>(5); - if (this.triggerDownstreamUponResultSuccess) - result.add(Result.SUCCESS); - if (this.triggerDownstreamUponResultUnstable) - result.add(Result.UNSTABLE); - if (this.triggerDownstreamUponResultAborted) - result.add(Result.ABORTED); - if (this.triggerDownstreamUponResultNotBuilt) - result.add(Result.NOT_BUILT); - if (this.triggerDownstreamUponResultFailure) - result.add(Result.FAILURE); + if (this.triggerDownstreamUponResultSuccess) result.add(Result.SUCCESS); + if (this.triggerDownstreamUponResultUnstable) result.add(Result.UNSTABLE); + if (this.triggerDownstreamUponResultAborted) result.add(Result.ABORTED); + if (this.triggerDownstreamUponResultNotBuilt) result.add(Result.NOT_BUILT); + if (this.triggerDownstreamUponResultFailure) result.add(Result.FAILURE); return result; } @@ -306,21 +300,19 @@ public ListBoxModel doFillJdbcCredentialsIdItems() { .includeEmptyValue() .withMatching( CredentialsMatchers.always(), - CredentialsProvider.lookupCredentials(UsernamePasswordCredentials.class, - Jenkins.get(), - ACL.SYSTEM, - Collections.EMPTY_LIST)); + CredentialsProvider.lookupCredentials( + UsernamePasswordCredentials.class, Jenkins.get(), ACL.SYSTEM, Collections.EMPTY_LIST)); } @POST public FormValidation doValidateJdbcConnection( - @QueryParameter String jdbcUrl, - @QueryParameter String properties, - @QueryParameter String jdbcCredentialsId, - @QueryParameter String daoClass) { + @QueryParameter String jdbcUrl, + @QueryParameter String properties, + @QueryParameter String jdbcCredentialsId, + @QueryParameter String daoClass) { Jenkins.get().checkPermission(Jenkins.ADMINISTER); Optional optionalPipelineMavenPluginDao = findDaoFromExtension(daoClass); - if(optionalPipelineMavenPluginDao.isEmpty()) { + if (optionalPipelineMavenPluginDao.isEmpty()) { return FormValidation.ok("OK"); } diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/MavenConfigFolderOverrideProperty.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/MavenConfigFolderOverrideProperty.java index c793170a..1b51a189 100644 --- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/MavenConfigFolderOverrideProperty.java +++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/MavenConfigFolderOverrideProperty.java @@ -3,14 +3,13 @@ import com.cloudbees.hudson.plugins.folder.AbstractFolder; import com.cloudbees.hudson.plugins.folder.AbstractFolderProperty; import com.cloudbees.hudson.plugins.folder.AbstractFolderPropertyDescriptor; +import edu.umd.cs.findbugs.annotations.NonNull; import hudson.Extension; import jenkins.mvn.GlobalSettingsProvider; import jenkins.mvn.SettingsProvider; import org.kohsuke.stapler.DataBoundConstructor; import org.kohsuke.stapler.DataBoundSetter; -import edu.umd.cs.findbugs.annotations.NonNull; - /** * Provides a way to override maven configuration at a folder level */ @@ -37,8 +36,7 @@ public class MavenConfigFolderOverrideProperty extends AbstractFolderProperty publishersExceptions) { - super(publishersExceptions.size() + " exceptions occured within the publishers of the withMaven pipeline step:\n" - + publishersExceptions.stream().map(e -> { - StringBuilder builder = new StringBuilder("- "); - builder.append(e.getMessage()); - if (e.getCause() != null) { - builder.append(": ").append(e.getCause().getMessage()); - } - return builder.toString(); - }).collect(joining())); + super(publishersExceptions.size() + + " exceptions occured within the publishers of the withMaven pipeline step:\n" + + publishersExceptions.stream() + .map(e -> { + StringBuilder builder = new StringBuilder("- "); + builder.append(e.getMessage()); + if (e.getCause() != null) { + builder.append(": ").append(e.getCause().getMessage()); + } + return builder.toString(); + }) + .collect(joining())); } } diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/MavenPublisher.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/MavenPublisher.java index b0f86207..966046f2 100644 --- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/MavenPublisher.java +++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/MavenPublisher.java @@ -1,26 +1,26 @@ package org.jenkinsci.plugins.pipeline.maven; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; import hudson.ExtensionPoint; import hudson.model.AbstractDescribableImpl; import hudson.model.Descriptor; -import org.jenkinsci.plugins.workflow.steps.StepContext; -import org.kohsuke.stapler.DataBoundSetter; -import org.w3c.dom.Element; - -import edu.umd.cs.findbugs.annotations.NonNull; -import edu.umd.cs.findbugs.annotations.Nullable; import java.io.IOException; import java.io.Serializable; import java.util.logging.Logger; +import org.jenkinsci.plugins.workflow.steps.StepContext; +import org.kohsuke.stapler.DataBoundSetter; +import org.w3c.dom.Element; /** * Experimental interface, likely to change in the future. * * @author Cyrille Le Clerc */ -public abstract class MavenPublisher extends AbstractDescribableImpl implements ExtensionPoint, Comparable, Serializable { +public abstract class MavenPublisher extends AbstractDescribableImpl + implements ExtensionPoint, Comparable, Serializable { - private final static Logger LOGGER = Logger.getLogger(MavenPublisher.class.getName()); + private static final Logger LOGGER = Logger.getLogger(MavenPublisher.class.getName()); private boolean disabled; @@ -39,7 +39,8 @@ public void setDisabled(boolean disabled) { * @throws IOException * @throws InterruptedException */ - public abstract void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsElt) throws IOException, InterruptedException; + public abstract void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsElt) + throws IOException, InterruptedException; @Override public DescriptorImpl getDescriptor() { @@ -53,12 +54,11 @@ public int compareTo(MavenPublisher o) { @Override public String toString() { - return getClass().getName() + "[" + - "disabled=" + disabled + - ']'; + return getClass().getName() + "[" + "disabled=" + disabled + ']'; } - public static abstract class DescriptorImpl extends Descriptor implements Comparable { + public abstract static class DescriptorImpl extends Descriptor + implements Comparable { /** * @return the ordinal of this reporter to execute publishers in predictable order. The smallest ordinal is executed first. * @see #compareTo(MavenPublisher) @@ -73,8 +73,7 @@ public int ordinal() { * @return name of the marker file. {@code null} if no marker file is defined for this reporter */ @Nullable - abstract public String getSkipFileName(); - + public abstract String getSkipFileName(); @Override public int compareTo(DescriptorImpl o) { @@ -87,6 +86,4 @@ public int compareTo(DescriptorImpl o) { return compare; } } - - } diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/MavenPublisherStrategy.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/MavenPublisherStrategy.java index f7a7f1c0..be98c58d 100644 --- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/MavenPublisherStrategy.java +++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/MavenPublisherStrategy.java @@ -1,12 +1,10 @@ package org.jenkinsci.plugins.pipeline.maven; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; import hudson.DescriptorExtensionList; import hudson.model.Descriptor; import hudson.model.TaskListener; -import jenkins.model.Jenkins; - -import edu.umd.cs.findbugs.annotations.NonNull; -import edu.umd.cs.findbugs.annotations.Nullable; import java.io.PrintWriter; import java.util.ArrayList; import java.util.Collections; @@ -15,12 +13,12 @@ import java.util.Map; import java.util.logging.Level; import java.util.logging.Logger; +import jenkins.model.Jenkins; /** * @author Cyrille Le Clerc */ public enum MavenPublisherStrategy { - IMPLICIT(Messages.publisher_strategy_implicit_description()) { /** *

Build the list of {@link MavenPublisher}s that should be invoked for the build execution of the given {@link TaskListener} @@ -38,7 +36,8 @@ public enum MavenPublisherStrategy { * @param listener */ @NonNull - public List buildPublishersList(@NonNull List configuredPublishers, @NonNull TaskListener listener) { + public List buildPublishersList( + @NonNull List configuredPublishers, @NonNull TaskListener listener) { // configuration passed as parameter of "withMaven(options=[...]){}" // mavenPublisher.descriptor.id -> mavenPublisher @@ -55,23 +54,28 @@ public List buildPublishersList(@NonNull List co Map globallyConfiguredPublishersById = new HashMap<>(); GlobalPipelineMavenConfig globalPipelineMavenConfig = GlobalPipelineMavenConfig.get(); - List globallyConfiguredPublishers = globalPipelineMavenConfig == null ? Collections.emptyList() : globalPipelineMavenConfig.getPublisherOptions(); + List globallyConfiguredPublishers = globalPipelineMavenConfig == null + ? Collections.emptyList() + : globalPipelineMavenConfig.getPublisherOptions(); if (globallyConfiguredPublishers == null) { globallyConfiguredPublishers = Collections.emptyList(); } for (MavenPublisher mavenPublisher : globallyConfiguredPublishers) { - globallyConfiguredPublishersById.put(mavenPublisher.getDescriptor().getId(), mavenPublisher); + globallyConfiguredPublishersById.put( + mavenPublisher.getDescriptor().getId(), mavenPublisher); } - // mavenPublisher.descriptor.id -> mavenPublisher Map defaultPublishersById = new HashMap<>(); - DescriptorExtensionList> descriptorList = Jenkins.get().getDescriptorList(MavenPublisher.class); + DescriptorExtensionList> descriptorList = + Jenkins.get().getDescriptorList(MavenPublisher.class); for (Descriptor descriptor : descriptorList) { try { defaultPublishersById.put(descriptor.getId(), descriptor.clazz.newInstance()); } catch (InstantiationException | IllegalAccessException e) { - PrintWriter error = listener.error("[withMaven] Exception instantiation default config for Maven Publisher '" + descriptor.getDisplayName() + "' / " + descriptor.getId() + ": " + e); + PrintWriter error = + listener.error("[withMaven] Exception instantiation default config for Maven Publisher '" + + descriptor.getDisplayName() + "' / " + descriptor.getId() + ": " + e); e.printStackTrace(error); error.close(); LOGGER.log(Level.WARNING, "Exception instantiating " + descriptor.clazz + ": " + e, e); @@ -79,11 +83,16 @@ public List buildPublishersList(@NonNull List co } } - if (LOGGER.isLoggable(Level.FINE)) { - listener.getLogger().println("[withMaven] Maven Publishers with configuration provided by the pipeline: " + configuredPublishersById.values()); - listener.getLogger().println("[withMaven] Maven Publishers with configuration defined globally: " + globallyConfiguredPublishersById.values()); - listener.getLogger().println("[withMaven] Maven Publishers with default configuration: " + defaultPublishersById.values()); + listener.getLogger() + .println("[withMaven] Maven Publishers with configuration provided by the pipeline: " + + configuredPublishersById.values()); + listener.getLogger() + .println("[withMaven] Maven Publishers with configuration defined globally: " + + globallyConfiguredPublishersById.values()); + listener.getLogger() + .println("[withMaven] Maven Publishers with default configuration: " + + defaultPublishersById.values()); } // TODO FILTER @@ -105,12 +114,12 @@ public List buildPublishersList(@NonNull List co EXPLICIT(Messages.publisher_strategy_explicit_description()) { @NonNull @Override - public List buildPublishersList - (@NonNull List configuredPublishers, @NonNull TaskListener listener) { + public List buildPublishersList( + @NonNull List configuredPublishers, @NonNull TaskListener listener) { // filter null entries caused by missing plugins List result = new ArrayList<>(); - for(MavenPublisher publisher: configuredPublishers) { + for (MavenPublisher publisher : configuredPublishers) { if (publisher != null) { result.add(publisher); } @@ -128,7 +137,11 @@ public List buildPublishersList(@NonNull List co this.description = description; } - public MavenPublisher buildConfiguredMavenPublisher(@Nullable MavenPublisher pipelinePublisher, @Nullable MavenPublisher globallyConfiguredPublisher, @NonNull MavenPublisher defaultPublisher, @NonNull TaskListener listener) { + public MavenPublisher buildConfiguredMavenPublisher( + @Nullable MavenPublisher pipelinePublisher, + @Nullable MavenPublisher globallyConfiguredPublisher, + @NonNull MavenPublisher defaultPublisher, + @NonNull TaskListener listener) { MavenPublisher result; String logMessage; @@ -142,32 +155,37 @@ public MavenPublisher buildConfiguredMavenPublisher(@Nullable MavenPublisher pip } else if (pipelinePublisher != null && globallyConfiguredPublisher == null) { result = pipelinePublisher; logMessage = "pipeline"; - } else if (pipelinePublisher != null && globallyConfiguredPublisher != null) { + } else if (pipelinePublisher != null && globallyConfiguredPublisher != null) { // workaround FindBugs "Bug kind and pattern: NP - NP_NULL_ON_SOME_PATH" // check pipelinePublisher and globallyConfiguredPublisher are non null even if it is useless result = pipelinePublisher; logMessage = "pipeline"; - listener.getLogger().println("[withMaven] WARNING merging publisher configuration defined in the 'Global Tool Configuration' and at the pipeline level is not yet supported." + - " Use pipeline level configuration for '" + result.getDescriptor().getDisplayName() + "'"); -// -// PropertyDescriptor[] propertyDescriptors = PropertyUtils.getPropertyDescriptors(defaultPublisher); -// for(PropertyDescriptor propertyDescriptor: propertyDescriptors) { -// Method readMethod = propertyDescriptor.getReadMethod(); -// Method writeMethod = propertyDescriptor.getWriteMethod(); -// -// Object defaultValue = readMethod.invoke(defaultPublisher); -// Object globallyDefinedValue = readMethod.invoke(globallyConfiguredPublisher); -// Object pipelineValue = readMethod.invoke(pipelinePublisher); -// } + listener.getLogger() + .println( + "[withMaven] WARNING merging publisher configuration defined in the 'Global Tool Configuration' and at the pipeline level is not yet supported." + + " Use pipeline level configuration for '" + + result.getDescriptor().getDisplayName() + "'"); + // + // PropertyDescriptor[] propertyDescriptors = + // PropertyUtils.getPropertyDescriptors(defaultPublisher); + // for(PropertyDescriptor propertyDescriptor: propertyDescriptors) { + // Method readMethod = propertyDescriptor.getReadMethod(); + // Method writeMethod = propertyDescriptor.getWriteMethod(); + // + // Object defaultValue = readMethod.invoke(defaultPublisher); + // Object globallyDefinedValue = readMethod.invoke(globallyConfiguredPublisher); + // Object pipelineValue = readMethod.invoke(pipelinePublisher); + // } } else { throw new IllegalStateException("Should not happen, workaround for Findbugs NP_NULL_ON_SOME_PATH above"); } if (LOGGER.isLoggable(Level.FINE)) - listener.getLogger().println("[withMaven] Use " + logMessage + " defined publisher for '" + result.getDescriptor().getDisplayName() + "'"); + listener.getLogger() + .println("[withMaven] Use " + logMessage + " defined publisher for '" + + result.getDescriptor().getDisplayName() + "'"); return result; - } public String getDescription() { @@ -190,7 +208,8 @@ public String getDescription() { * @param listener */ @NonNull - public abstract List buildPublishersList(@NonNull List configuredPublishers, @NonNull TaskListener listener); + public abstract List buildPublishersList( + @NonNull List configuredPublishers, @NonNull TaskListener listener); - private final static Logger LOGGER = Logger.getLogger(MavenPublisherStrategy.class.getName()); + private static final Logger LOGGER = Logger.getLogger(MavenPublisherStrategy.class.getName()); } diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/MavenSpyLogProcessor.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/MavenSpyLogProcessor.java index c8fa4c6c..0bc7ac36 100644 --- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/MavenSpyLogProcessor.java +++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/MavenSpyLogProcessor.java @@ -24,23 +24,10 @@ package org.jenkinsci.plugins.pipeline.maven; +import edu.umd.cs.findbugs.annotations.NonNull; import hudson.FilePath; import hudson.model.Run; import hudson.model.TaskListener; -import jenkins.model.InterruptedBuildAction; -import org.apache.commons.lang.StringUtils; -import org.jenkinsci.plugins.pipeline.maven.publishers.JenkinsMavenEventSpyLogsPublisher; -import org.jenkinsci.plugins.pipeline.maven.publishers.MavenPipelinePublisherException; -import org.jenkinsci.plugins.pipeline.maven.util.XmlUtils; -import org.jenkinsci.plugins.workflow.steps.StepContext; -import org.w3c.dom.Element; -import org.xml.sax.SAXException; - -import edu.umd.cs.findbugs.annotations.NonNull; -import javax.xml.XMLConstants; -import javax.xml.parsers.DocumentBuilder; -import javax.xml.parsers.DocumentBuilderFactory; -import javax.xml.parsers.ParserConfigurationException; import java.io.IOException; import java.io.InputStream; import java.io.PrintWriter; @@ -53,6 +40,18 @@ import java.util.logging.Level; import java.util.logging.Logger; import java.util.stream.Collectors; +import javax.xml.XMLConstants; +import javax.xml.parsers.DocumentBuilder; +import javax.xml.parsers.DocumentBuilderFactory; +import javax.xml.parsers.ParserConfigurationException; +import jenkins.model.InterruptedBuildAction; +import org.apache.commons.lang.StringUtils; +import org.jenkinsci.plugins.pipeline.maven.publishers.JenkinsMavenEventSpyLogsPublisher; +import org.jenkinsci.plugins.pipeline.maven.publishers.MavenPipelinePublisherException; +import org.jenkinsci.plugins.pipeline.maven.util.XmlUtils; +import org.jenkinsci.plugins.workflow.steps.StepContext; +import org.w3c.dom.Element; +import org.xml.sax.SAXException; /** * @author Cyrille Le Clerc @@ -63,13 +62,19 @@ public class MavenSpyLogProcessor implements Serializable { private static final Logger LOGGER = Logger.getLogger(MavenSpyLogProcessor.class.getName()); - public void processMavenSpyLogs(@NonNull StepContext context, @NonNull FilePath mavenSpyLogFolder, @NonNull List options, - @NonNull MavenPublisherStrategy publisherStrategy) throws IOException, InterruptedException { + public void processMavenSpyLogs( + @NonNull StepContext context, + @NonNull FilePath mavenSpyLogFolder, + @NonNull List options, + @NonNull MavenPublisherStrategy publisherStrategy) + throws IOException, InterruptedException { long nanosBefore = System.nanoTime(); FilePath[] mavenSpyLogsList = mavenSpyLogFolder.list("maven-spy-*.log"); - LOGGER.log(Level.FINE, "Found {0} maven execution reports in {1}", new Object[]{mavenSpyLogsList.length, mavenSpyLogFolder}); + LOGGER.log(Level.FINE, "Found {0} maven execution reports in {1}", new Object[] { + mavenSpyLogsList.length, mavenSpyLogFolder + }); TaskListener listener = context.get(TaskListener.class); FilePath workspace = context.get(FilePath.class); @@ -106,7 +111,8 @@ public void processMavenSpyLogs(@NonNull StepContext context, @NonNull FilePath documentBuilder = dbf.newDocumentBuilder(); - // See https://github.com/jenkinsci/jenkins/blob/jenkins-2.176/core/src/main/java/jenkins/util/xml/XMLUtils.java#L114 + // See + // https://github.com/jenkinsci/jenkins/blob/jenkins-2.176/core/src/main/java/jenkins/util/xml/XMLUtils.java#L114 documentBuilder.setEntityResolver(XmlUtils.RestrictiveEntityResolver.INSTANCE); } catch (ParserConfigurationException e) { throw new IllegalStateException("Failure to create a DocumentBuilder", e); @@ -129,10 +135,12 @@ public void processMavenSpyLogs(@NonNull StepContext context, @NonNull FilePath new JenkinsMavenEventSpyLogsPublisher().process(context, mavenSpyLogs); } - Element mavenSpyLogsElt = documentBuilder.parse(mavenSpyLogsInputStream).getDocumentElement(); + Element mavenSpyLogsElt = + documentBuilder.parse(mavenSpyLogsInputStream).getDocumentElement(); - if (LOGGER.isLoggable(Level.FINE)){ - listener.getLogger().println("[withMaven] Maven Publisher Strategy: " + publisherStrategy.getDescription()); + if (LOGGER.isLoggable(Level.FINE)) { + listener.getLogger() + .println("[withMaven] Maven Publisher Strategy: " + publisherStrategy.getDescription()); } List mavenPublishers = publisherStrategy.buildPublishersList(options, listener); List exceptions = new ArrayList<>(); @@ -140,34 +148,52 @@ public void processMavenSpyLogs(@NonNull StepContext context, @NonNull FilePath String skipFileName = mavenPublisher.getDescriptor().getSkipFileName(); if (Boolean.TRUE.equals(mavenPublisher.isDisabled())) { if (LOGGER.isLoggable(Level.FINE)) { - listener.getLogger().println("[withMaven] Skip '" + mavenPublisher.getDescriptor().getDisplayName() + "' disabled by configuration"); + listener.getLogger() + .println("[withMaven] Skip '" + + mavenPublisher.getDescriptor().getDisplayName() + + "' disabled by configuration"); } - } else if (StringUtils.isNotEmpty(skipFileName) && workspace.child(skipFileName).exists()) { + } else if (StringUtils.isNotEmpty(skipFileName) + && workspace.child(skipFileName).exists()) { if (LOGGER.isLoggable(Level.FINE)) { - listener.getLogger().println("[withMaven] Skip '" + mavenPublisher.getDescriptor().getDisplayName() + "' disabled by marker file '" + skipFileName + "'"); + listener.getLogger() + .println("[withMaven] Skip '" + + mavenPublisher.getDescriptor().getDisplayName() + + "' disabled by marker file '" + skipFileName + "'"); } } else { long nanosBeforePublisher = System.nanoTime(); if (LOGGER.isLoggable(Level.FINE)) { - listener.getLogger().println("[withMaven] Run '" + mavenPublisher.getDescriptor().getDisplayName() + "'..."); + listener.getLogger() + .println("[withMaven] Run '" + + mavenPublisher.getDescriptor().getDisplayName() + "'..."); } try { mavenPublisher.process(context, mavenSpyLogsElt); } catch (InterruptedException e) { - listener.error("[withMaven] Processing of Maven build outputs interrupted in " + mavenPublisher.toString() + " after " + - TimeUnit.MILLISECONDS.convert(System.nanoTime() - nanosBefore, TimeUnit.NANOSECONDS) + "ms."); - Thread.currentThread().interrupt(); // set interrupt flag + listener.error("[withMaven] Processing of Maven build outputs interrupted in " + + mavenPublisher.toString() + " after " + + TimeUnit.MILLISECONDS.convert( + System.nanoTime() - nanosBefore, TimeUnit.NANOSECONDS) + + "ms."); + Thread.currentThread().interrupt(); // set interrupt flag throw e; } catch (MavenPipelinePublisherException e) { exceptions.add(e); } catch (Exception e) { - PrintWriter error = listener.error("[withMaven] WARNING Exception executing Maven reporter '" + mavenPublisher.getDescriptor().getDisplayName() + - "' / " + mavenPublisher.getDescriptor().getId() + "." + - " Please report a bug associated for the component 'pipeline-maven-plugin' at https://issues.jenkins-ci.org "); + PrintWriter error = listener.error( + "[withMaven] WARNING Exception executing Maven reporter '" + + mavenPublisher.getDescriptor().getDisplayName() + "' / " + + mavenPublisher.getDescriptor().getId() + "." + + " Please report a bug associated for the component 'pipeline-maven-plugin' at https://issues.jenkins-ci.org "); e.printStackTrace(error); - exceptions.add(new MavenPipelinePublisherException(mavenPublisher.getDescriptor().getDisplayName(), "", e)); + exceptions.add(new MavenPipelinePublisherException( + mavenPublisher.getDescriptor().getDisplayName(), "", e)); } finally { - durationInMillisPerPublisher.add(new AbstractMap.SimpleImmutableEntry(mavenPublisher.getDescriptor().getDisplayName(), TimeUnit.MILLISECONDS.convert(System.nanoTime() - nanosBeforePublisher, TimeUnit.NANOSECONDS))); + durationInMillisPerPublisher.add(new AbstractMap.SimpleImmutableEntry( + mavenPublisher.getDescriptor().getDisplayName(), + TimeUnit.MILLISECONDS.convert( + System.nanoTime() - nanosBeforePublisher, TimeUnit.NANOSECONDS))); } } } @@ -180,43 +206,55 @@ public void processMavenSpyLogs(@NonNull StepContext context, @NonNull FilePath Run run = context.get(Run.class); String msg = ""; if (run.getActions(InterruptedBuildAction.class).isEmpty()) { - msg = "[withMaven] WARNING Exception parsing the logs generated by the Jenkins Maven Event Spy " + mavenSpyLogs + ", ignore file. " + - " Please report a bug associated for the component 'pipeline-maven-plugin' at https://issues.jenkins-ci.org "; + msg = "[withMaven] WARNING Exception parsing the logs generated by the Jenkins Maven Event Spy " + + mavenSpyLogs + ", ignore file. " + + " Please report a bug associated for the component 'pipeline-maven-plugin' at https://issues.jenkins-ci.org "; } else { // job has been aborted (see InterruptedBuildAction) - msg = "[withMaven] WARNING logs generated by the Jenkins Maven Event Spy " + mavenSpyLogs + " are invalid, probably due to the interruption of the job, ignore file."; + msg = "[withMaven] WARNING logs generated by the Jenkins Maven Event Spy " + mavenSpyLogs + + " are invalid, probably due to the interruption of the job, ignore file."; } PrintWriter errorWriter = listener.error(msg); e.printStackTrace(errorWriter); throw new MavenPipelineException(e); } catch (InterruptedException e) { - PrintWriter errorWriter = listener.error("[withMaven] Processing of Maven build outputs interrupted after " + - TimeUnit.MILLISECONDS.convert(System.nanoTime() - nanosBefore, TimeUnit.NANOSECONDS) + "ms."); + PrintWriter errorWriter = + listener.error("[withMaven] Processing of Maven build outputs interrupted after " + + TimeUnit.MILLISECONDS.convert(System.nanoTime() - nanosBefore, TimeUnit.NANOSECONDS) + + "ms."); if (LOGGER.isLoggable(Level.FINE)) { e.printStackTrace(errorWriter); } - Thread.currentThread().interrupt(); // set interrupt flag + Thread.currentThread().interrupt(); // set interrupt flag return; } catch (Exception e) { - PrintWriter errorWriter = listener.error("[withMaven] WARNING Exception processing the logs generated by the Jenkins Maven Event Spy " + mavenSpyLogs + ", ignore file. " + - " Please report a bug associated for the component 'pipeline-maven-plugin' at https://issues.jenkins-ci.org "); + PrintWriter errorWriter = listener.error( + "[withMaven] WARNING Exception processing the logs generated by the Jenkins Maven Event Spy " + + mavenSpyLogs + ", ignore file. " + + " Please report a bug associated for the component 'pipeline-maven-plugin' at https://issues.jenkins-ci.org "); e.printStackTrace(errorWriter); throw new MavenPipelineException(e); } finally { if (LOGGER.isLoggable(Level.INFO)) { - listener.getLogger().println("[withMaven] Publishers: " + - durationInMillisPerPublisher.stream().filter(entry -> entry.getValue() > 0). - map(entry -> entry.getKey() + ": " + entry.getValue() + " ms"). - collect(Collectors.joining(", "))); + listener.getLogger() + .println("[withMaven] Publishers: " + + durationInMillisPerPublisher.stream() + .filter(entry -> entry.getValue() > 0) + .map(entry -> entry.getKey() + ": " + entry.getValue() + " ms") + .collect(Collectors.joining(", "))); } } } FilePath[] mavenSpyLogsInterruptedList = mavenSpyLogFolder.list("maven-spy-*.log.tmp"); if (mavenSpyLogsInterruptedList.length > 0) { - listener.getLogger().print("[withMaven] One or multiple Maven executions have been ignored by the " + - "Jenkins Pipeline Maven Plugin because they have been interrupted before completion " + - "(" + mavenSpyLogsInterruptedList.length + "). See "); - listener.hyperlink("https://github.com/jenkinsci/pipeline-maven-plugin/blob/master/FAQ.adoc#how-to-use-the-pipeline-maven-plugin-with-docker", "Pipeline Maven Plugin FAQ"); + listener.getLogger() + .print("[withMaven] One or multiple Maven executions have been ignored by the " + + "Jenkins Pipeline Maven Plugin because they have been interrupted before completion " + + "(" + + mavenSpyLogsInterruptedList.length + "). See "); + listener.hyperlink( + "https://github.com/jenkinsci/pipeline-maven-plugin/blob/master/FAQ.adoc#how-to-use-the-pipeline-maven-plugin-with-docker", + "Pipeline Maven Plugin FAQ"); listener.getLogger().println(" for more details."); if (LOGGER.isLoggable(Level.FINE)) { for (FilePath mavenSpyLogsInterruptedLogs : mavenSpyLogsInterruptedList) { @@ -227,26 +265,23 @@ public void processMavenSpyLogs(@NonNull StepContext context, @NonNull FilePath } /* - - */ + + */ public static class PluginInvocation { public String groupId, artifactId, version, goal, executionId; public String getId() { - return artifactId + ":" + - goal + " " + - "(" + executionId + ")"; + return artifactId + ":" + goal + " " + "(" + executionId + ")"; } @Override public String toString() { - return "PluginInvocation{" + - groupId + ":" + - artifactId + ":" + - version + "@" + - goal + " " + - " " + executionId + - '}'; + return "PluginInvocation{" + groupId + + ":" + artifactId + + ":" + version + + "@" + goal + + " " + " " + + executionId + '}'; } } } diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/NeededPipelineMavenDatabasePluginAdminMonitor.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/NeededPipelineMavenDatabasePluginAdminMonitor.java index 58f9b728..b5fb9d3f 100644 --- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/NeededPipelineMavenDatabasePluginAdminMonitor.java +++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/NeededPipelineMavenDatabasePluginAdminMonitor.java @@ -26,14 +26,9 @@ import hudson.Extension; import hudson.model.AdministrativeMonitor; +import java.util.Objects; import jenkins.model.Jenkins; import org.apache.commons.lang.StringUtils; -import org.kohsuke.stapler.StaplerRequest; -import org.kohsuke.stapler.StaplerResponse; -import org.kohsuke.stapler.interceptor.RequirePOST; - -import java.io.IOException; -import java.util.Objects; /** * @@ -45,8 +40,8 @@ public class NeededPipelineMavenDatabasePluginAdminMonitor extends Administrativ public boolean isActivated() { String jdbcUrl = Objects.requireNonNull(GlobalPipelineMavenConfig.get()).getJdbcUrl(); return (StringUtils.startsWith(jdbcUrl, "jdbc:h2") - || StringUtils.startsWith(jdbcUrl, "jdbc:mysql") - || StringUtils.startsWith(jdbcUrl, "jdbc:postgresql")) + || StringUtils.startsWith(jdbcUrl, "jdbc:mysql") + || StringUtils.startsWith(jdbcUrl, "jdbc:postgresql")) && Jenkins.get().getPlugin("pipeline-maven-database") == null; } @@ -54,5 +49,4 @@ public boolean isActivated() { public String getDisplayName() { return Messages.admin_monitor_needed_pipeline_maven_database_plugin_description(); } - -} \ No newline at end of file +} diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/WithMavenStep.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/WithMavenStep.java index 2e21c0a6..85714aab 100644 --- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/WithMavenStep.java +++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/WithMavenStep.java @@ -38,6 +38,9 @@ import hudson.tasks.Maven; import hudson.tasks.Maven.MavenInstallation; import hudson.util.ListBoxModel; +import java.util.ArrayList; +import java.util.List; +import java.util.Set; import jenkins.model.Jenkins; import jenkins.mvn.GlobalMavenConfig; import jenkins.mvn.SettingsProvider; @@ -55,10 +58,6 @@ import org.kohsuke.stapler.DataBoundConstructor; import org.kohsuke.stapler.DataBoundSetter; -import java.util.ArrayList; -import java.util.List; -import java.util.Set; - /** * Configures maven environment to use within a pipeline job by calling sh mvn or bat mvn. * The selected maven installation will be configured and prepended to the path. @@ -79,8 +78,7 @@ public class WithMavenStep extends Step { private Boolean traceability = null; @DataBoundConstructor - public WithMavenStep() { - } + public WithMavenStep() {} public String getTempBinDir() { return tempBinDir; @@ -272,9 +270,10 @@ public ListBoxModel doFillJdkItems(@AncestorInPath Item item) { } return r; } - + @Restricted(NoExternalUse.class) // Only for UI calls - public ListBoxModel doFillMavenSettingsConfigItems(@AncestorInPath Item item, @AncestorInPath ItemGroup context) { + public ListBoxModel doFillMavenSettingsConfigItems( + @AncestorInPath Item item, @AncestorInPath ItemGroup context) { ListBoxModel r = new ListBoxModel(); if (item == null) { return r; // it's empty @@ -288,7 +287,8 @@ public ListBoxModel doFillMavenSettingsConfigItems(@AncestorInPath Item item, @A } @Restricted(NoExternalUse.class) // Only for UI calls - public ListBoxModel doFillGlobalMavenSettingsConfigItems(@AncestorInPath Item item, @AncestorInPath ItemGroup context) { + public ListBoxModel doFillGlobalMavenSettingsConfigItems( + @AncestorInPath Item item, @AncestorInPath ItemGroup context) { ListBoxModel r = new ListBoxModel(); if (item == null) { return r; // it's empty @@ -308,7 +308,7 @@ public ListBoxModel doFillPublisherStrategyItems(@AncestorInPath Item item, @Anc return r; // it's empty } item.checkPermission(Item.EXTENDED_READ); - for(MavenPublisherStrategy publisherStrategy: MavenPublisherStrategy.values()) { + for (MavenPublisherStrategy publisherStrategy : MavenPublisherStrategy.values()) { r.add(publisherStrategy.getDescription(), publisherStrategy.name()); } return r; @@ -320,6 +320,5 @@ public ListBoxModel doFillPublisherStrategyItems(@AncestorInPath Item item, @Anc public DescriptorExtensionList getOptionsDescriptors() { return Jenkins.get().getDescriptorList(MavenPublisher.class); } - } } diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/WithMavenStepExecution.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/WithMavenStepExecution.java index 2e8f041b..03c4e06b 100644 --- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/WithMavenStepExecution.java +++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/WithMavenStepExecution.java @@ -24,7 +24,6 @@ package org.jenkinsci.plugins.pipeline.maven; - import org.jenkinsci.plugins.workflow.steps.AbstractStepExecutionImpl; import org.jenkinsci.plugins.workflow.steps.BodyExecutionCallback; import org.jenkinsci.plugins.workflow.steps.StepContext; @@ -50,8 +49,7 @@ public boolean start() { private class WithMavenStepExecutionCallBack extends BodyExecutionCallback.TailCall { private static final long serialVersionUID = 1L; - private WithMavenStepExecutionCallBack() { - } + private WithMavenStepExecutionCallBack() {} @Override protected void finished(StepContext context) { diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/WithMavenStepExecution2.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/WithMavenStepExecution2.java index 988cee2b..3fadbdd7 100644 --- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/WithMavenStepExecution2.java +++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/WithMavenStepExecution2.java @@ -24,25 +24,26 @@ package org.jenkinsci.plugins.pipeline.maven; - import com.cloudbees.hudson.plugins.folder.AbstractFolder; import com.cloudbees.plugins.credentials.Credentials; import com.cloudbees.plugins.credentials.CredentialsProvider; import com.cloudbees.plugins.credentials.common.IdCredentials; import com.cloudbees.plugins.credentials.common.StandardUsernameCredentials; -import com.cloudbees.plugins.credentials.common.UsernameCredentials; import com.cloudbees.plugins.credentials.domains.DomainRequirement; +import edu.umd.cs.findbugs.annotations.CheckForNull; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import hudson.AbortException; import hudson.EnvVars; import hudson.ExtensionList; import hudson.FilePath; +import hudson.Functions; import hudson.Launcher; import hudson.Launcher.ProcStarter; import hudson.Proc; import hudson.Util; import hudson.console.ConsoleLogFilter; -import hudson.model.BuildListener; import hudson.model.Computer; import hudson.model.Item; import hudson.model.ItemGroup; @@ -54,6 +55,24 @@ import hudson.slaves.WorkspaceList; import hudson.tasks.Maven; import hudson.tasks.Maven.MavenInstallation; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.net.URL; +import java.security.CodeSource; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.UUID; +import java.util.function.Function; +import java.util.logging.Level; +import java.util.logging.Logger; +import java.util.stream.Collectors; import jenkins.model.Jenkins; import jenkins.mvn.DefaultGlobalSettingsProvider; import jenkins.mvn.DefaultSettingsProvider; @@ -84,30 +103,9 @@ import org.jenkinsci.plugins.workflow.steps.StepContext; import org.springframework.util.ClassUtils; -import edu.umd.cs.findbugs.annotations.CheckForNull; -import edu.umd.cs.findbugs.annotations.NonNull; -import edu.umd.cs.findbugs.annotations.Nullable; -import hudson.Functions; -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.InputStream; -import java.net.URL; -import java.security.CodeSource; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.UUID; -import java.util.function.Function; -import java.util.logging.Level; -import java.util.logging.Logger; -import java.util.stream.Collectors; - -@SuppressFBWarnings(value = "SE_TRANSIENT_FIELD_NOT_RESTORED", justification = "Contextual fields used only in start(); no onResume needed") +@SuppressFBWarnings( + value = "SE_TRANSIENT_FIELD_NOT_RESTORED", + justification = "Contextual fields used only in start(); no onResume needed") class WithMavenStepExecution2 extends GeneralNonBlockingStepExecution { private static final long serialVersionUID = 1L; @@ -154,7 +152,8 @@ class WithMavenStepExecution2 extends GeneralNonBlockingStepExecution { build = context.get(Run.class); } - @Override public boolean start() throws Exception { + @Override + public boolean start() throws Exception { run(this::doStart); return false; } @@ -179,16 +178,20 @@ protected boolean doStart() throws Exception { console.trace("[withMaven] Options: " + step.getOptions()); ExtensionList availableMavenPublishers = Jenkins.get().getExtensionList(MavenPublisher.class); - console.trace("[withMaven] Available options: " + availableMavenPublishers.stream().map( - MavenPublisher::toString).collect(Collectors.joining(","))); + console.trace("[withMaven] Available options: " + + availableMavenPublishers.stream() + .map(MavenPublisher::toString) + .collect(Collectors.joining(","))); getComputer(); withContainer = detectWithContainer(); if (withContainer) { - console.trace("[withMaven] IMPORTANT \"withMaven(){...}\" step running within a Docker container. See " ); - console.traceHyperlink("https://github.com/jenkinsci/pipeline-maven-plugin/blob/master/FAQ.adoc#how-to-use-the-pipeline-maven-plugin-with-docker", "Pipeline Maven Plugin FAQ"); + console.trace("[withMaven] IMPORTANT \"withMaven(){...}\" step running within a Docker container. See "); + console.traceHyperlink( + "https://github.com/jenkinsci/pipeline-maven-plugin/blob/master/FAQ.adoc#how-to-use-the-pipeline-maven-plugin-with-docker", + "Pipeline Maven Plugin FAQ"); console.trace(" in case of problem."); } @@ -199,23 +202,35 @@ protected boolean doStart() throws Exception { setupMaven(credentials); if (LOGGER.isLoggable(Level.FINE)) { - LOGGER.log(Level.FINE, this.build + " - Track usage and mask password of credentials " + credentials.stream().map(new CredentialsToPrettyString()).collect(Collectors.joining(","))); + LOGGER.log( + Level.FINE, + this.build + " - Track usage and mask password of credentials " + + credentials.stream() + .map(new CredentialsToPrettyString()) + .collect(Collectors.joining(","))); } CredentialsProvider.trackAll(build, new ArrayList<>(credentials)); ConsoleLogFilter originalFilter = getContext().get(ConsoleLogFilter.class); - ConsoleLogFilter maskSecretsFilter = MaskPasswordsConsoleLogFilter.newMaskPasswordsConsoleLogFilter(credentials, getComputer().getDefaultCharset()); - MavenColorizerConsoleLogFilter mavenColorizerFilter = new MavenColorizerConsoleLogFilter(getComputer().getDefaultCharset().name()); + ConsoleLogFilter maskSecretsFilter = MaskPasswordsConsoleLogFilter.newMaskPasswordsConsoleLogFilter( + credentials, getComputer().getDefaultCharset()); + MavenColorizerConsoleLogFilter mavenColorizerFilter = new MavenColorizerConsoleLogFilter( + getComputer().getDefaultCharset().name()); ConsoleLogFilter newFilter = BodyInvoker.mergeConsoleLogFilters( - BodyInvoker.mergeConsoleLogFilters(originalFilter, maskSecretsFilter), - mavenColorizerFilter); + BodyInvoker.mergeConsoleLogFilters(originalFilter, maskSecretsFilter), mavenColorizerFilter); - EnvironmentExpander envEx = EnvironmentExpander.merge(getContext().get(EnvironmentExpander.class), new ExpanderImpl(envOverride)); + EnvironmentExpander envEx = + EnvironmentExpander.merge(getContext().get(EnvironmentExpander.class), new ExpanderImpl(envOverride)); LOGGER.log(Level.FINEST, "envOverride: {0}", envOverride); // JENKINS-40484 - getContext().newBodyInvoker().withContexts(envEx, newFilter).withCallback(new WithMavenStepExecutionCallBack(tempBinDir, step.getOptions(), step.getPublisherStrategy())).start(); + getContext() + .newBodyInvoker() + .withContexts(envEx, newFilter) + .withCallback( + new WithMavenStepExecutionCallBack(tempBinDir, step.getOptions(), step.getPublisherStrategy())) + .start(); return false; } @@ -247,7 +262,8 @@ private boolean detectWithContainer() { if (launcherClassName.contains("org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator")) { LOGGER.log(Level.FINE, "Step running within Kubernetes withContainer(): {1}", launcherClassName); return false; - } if (launcherClassName.contains("WithContainerStep")) { + } + if (launcherClassName.contains("WithContainerStep")) { LOGGER.log(Level.FINE, "Step running within docker.image(): {1}", launcherClassName); return true; } else if (launcherClassName.contains("ContainerExecDecorator")) { @@ -272,9 +288,10 @@ private void setupJDK() throws AbortException, IOException, InterruptedException if (withContainer) { // see #detectWithContainer() LOGGER.log(Level.FINE, "Ignoring JDK installation parameter: {0}", jdkInstallationName); - console.println("WARNING: \"withMaven(){...}\" step running within a container," + - " tool installations are not available see https://issues.jenkins-ci.org/browse/JENKINS-36159. " + - "You have specified a JDK installation \"" + jdkInstallationName + "\", which will be ignored."); + console.println("WARNING: \"withMaven(){...}\" step running within a container," + + " tool installations are not available see https://issues.jenkins-ci.org/browse/JENKINS-36159. " + + "You have specified a JDK installation \"" + + jdkInstallationName + "\", which will be ignored."); return; } @@ -282,15 +299,16 @@ private void setupJDK() throws AbortException, IOException, InterruptedException JDK jdk = Jenkins.get().getJDK(jdkInstallationName); if (jdk == null) { - throw new AbortException("Could not find the JDK installation: " + jdkInstallationName + ". Make sure it is configured on the Global Tool Configuration page"); + throw new AbortException("Could not find the JDK installation: " + jdkInstallationName + + ". Make sure it is configured on the Global Tool Configuration page"); } Node node = getComputer().getNode(); if (node == null) { - throw new AbortException("Could not obtain the Node for the computer: " + getComputer().getName()); + throw new AbortException("Could not obtain the Node for the computer: " + + getComputer().getName()); } jdk = jdk.forNode(node, listener).forEnvironment(env); jdk.buildEnvVars(envOverride); - } /** @@ -299,18 +317,23 @@ private void setupJDK() throws AbortException, IOException, InterruptedException * @throws InterruptedException */ private void setupMaven(@NonNull Collection credentials) throws IOException, InterruptedException { - // Temp dir with the wrapper that will be prepended to the path and the temporary files used by withMaven (settings files...) + // Temp dir with the wrapper that will be prepended to the path and the temporary files used by withMaven + // (settings files...) if (step.getTempBinDir() != null && !step.getTempBinDir().isEmpty()) { String expandedTargetLocation = step.getTempBinDir(); try { expandedTargetLocation = TokenMacro.expandAll(build, ws, listener, expandedTargetLocation); } catch (MacroEvaluationException e) { - listener.getLogger().println("[ERROR] failed to expand variables in target location '" + expandedTargetLocation + "' : " + e.getMessage()); + listener.getLogger() + .println("[ERROR] failed to expand variables in target location '" + expandedTargetLocation + + "' : " + e.getMessage()); } tempBinDir = new FilePath(ws, expandedTargetLocation); } if (tempBinDir == null) { - tempBinDir = tempDir(ws).child("withMaven" + Util.getDigestOf(UUID.randomUUID().toString()).substring(0, 8)); + tempBinDir = tempDir(ws) + .child("withMaven" + + Util.getDigestOf(UUID.randomUUID().toString()).substring(0, 8)); } tempBinDir.mkdirs(); envOverride.put("MVN_CMD_DIR", tempBinDir.getRemote()); @@ -332,8 +355,8 @@ private void setupMaven(@NonNull Collection credentials) throws IOE if (StringUtils.isNotEmpty(javaToolsOptions)) { javaToolsOptions += " "; } - javaToolsOptions += "-Dmaven.ext.class.path=\"" + mavenSpyJarPath.getRemote() + "\" " + - "-Dorg.jenkinsci.plugins.pipeline.maven.reportsFolder=\"" + this.tempBinDir.getRemote() + "\" "; + javaToolsOptions += "-Dmaven.ext.class.path=\"" + mavenSpyJarPath.getRemote() + "\" " + + "-Dorg.jenkinsci.plugins.pipeline.maven.reportsFolder=\"" + this.tempBinDir.getRemote() + "\" "; envOverride.put("JAVA_TOOL_OPTIONS", javaToolsOptions); // @@ -343,18 +366,24 @@ private void setupMaven(@NonNull Collection credentials) throws IOE mavenConfig.append("--batch-mode "); ifTraceabilityEnabled(() -> mavenConfig.append("--show-version ")); if (StringUtils.isNotEmpty(settingsFilePath)) { - // JENKINS-57324 escape '%' as '%%'. See https://en.wikibooks.org/wiki/Windows_Batch_Scripting#Quoting_and_escaping - if (!isUnix) settingsFilePath=settingsFilePath.replace("%", "%%"); + // JENKINS-57324 escape '%' as '%%'. See + // https://en.wikibooks.org/wiki/Windows_Batch_Scripting#Quoting_and_escaping + if (!isUnix) settingsFilePath = settingsFilePath.replace("%", "%%"); mavenConfig.append("--settings \"").append(settingsFilePath).append("\" "); } if (StringUtils.isNotEmpty(globalSettingsFilePath)) { - // JENKINS-57324 escape '%' as '%%'. See https://en.wikibooks.org/wiki/Windows_Batch_Scripting#Quoting_and_escaping - if (!isUnix) globalSettingsFilePath=globalSettingsFilePath.replace("%", "%%"); - mavenConfig.append("--global-settings \"").append(globalSettingsFilePath).append("\" "); + // JENKINS-57324 escape '%' as '%%'. See + // https://en.wikibooks.org/wiki/Windows_Batch_Scripting#Quoting_and_escaping + if (!isUnix) globalSettingsFilePath = globalSettingsFilePath.replace("%", "%%"); + mavenConfig + .append("--global-settings \"") + .append(globalSettingsFilePath) + .append("\" "); } if (StringUtils.isNotEmpty(mavenLocalRepo)) { - // JENKINS-57324 escape '%' as '%%'. See https://en.wikibooks.org/wiki/Windows_Batch_Scripting#Quoting_and_escaping - if (!isUnix) mavenLocalRepo=mavenLocalRepo.replace("%", "%%"); + // JENKINS-57324 escape '%' as '%%'. See + // https://en.wikibooks.org/wiki/Windows_Batch_Scripting#Quoting_and_escaping + if (!isUnix) mavenLocalRepo = mavenLocalRepo.replace("%", "%%"); mavenConfig.append("\"-Dmaven.repo.local=").append(mavenLocalRepo).append("\" "); } @@ -384,12 +413,12 @@ private void setupMaven(@NonNull Collection credentials) throws IOE String content = generateMavenWrapperScriptContent(mvnExec, mavenConfig.toString()); // ADD MAVEN WRAPPER SCRIPT PARENT DIRECTORY TO PATH - // WARNING MUST BE INVOKED AFTER obtainMavenExec(), THERE SEEM TO BE A BUG IN ENVIRONMENT VARIABLE HANDLING IN obtainMavenExec() + // WARNING MUST BE INVOKED AFTER obtainMavenExec(), THERE SEEM TO BE A BUG IN ENVIRONMENT VARIABLE HANDLING + // IN obtainMavenExec() envOverride.put("PATH+MAVEN", tempBinDir.getRemote()); createWrapperScript(tempBinDir, mvnExec.getName(), content); } - } private FilePath setupMavenSpy() throws IOException, InterruptedException { @@ -407,18 +436,26 @@ private FilePath setupMavenSpy() throws IOException, InterruptedException { // Don't use Thread.currentThread().getContextClassLoader() as it doesn't show the resources of the plugin Class clazz = WithMavenStepExecution2.class; ClassLoader classLoader = clazz.getClassLoader(); - LOGGER.log(Level.FINE, "Load " + embeddedMavenSpyJarPath + " using classloader " + classLoader.getClass() + ": " + classLoader); + LOGGER.log( + Level.FINE, + "Load " + embeddedMavenSpyJarPath + " using classloader " + classLoader.getClass() + ": " + + classLoader); in = classLoader.getResourceAsStream(embeddedMavenSpyJarPath); if (in == null) { CodeSource codeSource = clazz.getProtectionDomain().getCodeSource(); - String msg = "Embedded maven spy jar not found at " + embeddedMavenSpyJarPath + " in the pipeline-maven-plugin classpath. " + - "Maven Spy Jar URL can be defined with the system property: '" + MAVEN_SPY_JAR_URL + "'" + - "Classloader " + classLoader.getClass() + ": " + classLoader + ". " + - "Class " + clazz.getName() + " loaded from " + (codeSource == null ? "#unknown#" : codeSource.getLocation()); + String msg = "Embedded maven spy jar not found at " + embeddedMavenSpyJarPath + + " in the pipeline-maven-plugin classpath. " + + "Maven Spy Jar URL can be defined with the system property: '" + + MAVEN_SPY_JAR_URL + "'" + "Classloader " + + classLoader.getClass() + ": " + classLoader + ". " + "Class " + + clazz.getName() + " loaded from " + + (codeSource == null ? "#unknown#" : codeSource.getLocation()); throw new IllegalStateException(msg); } } else { - LOGGER.log(Level.FINE, "Load maven spy jar provided by system property '" + MAVEN_SPY_JAR_URL + "': " + mavenSpyJarUrl); + LOGGER.log( + Level.FINE, + "Load maven spy jar provided by system property '" + MAVEN_SPY_JAR_URL + "': " + mavenSpyJarUrl); in = new URL(mavenSpyJarUrl).openStream(); } @@ -447,14 +484,17 @@ private String obtainMavenExec() throws IOException, InterruptedException { consoleMessage.append(" using Maven installation provided by the build agent"); } else if (withContainer) { console.println( - "[withMaven] WARNING: Specified Maven '" + mavenInstallationName + "' cannot be installed, will be ignored. " + - "Step running within a container, tool installations are not available see https://issues.jenkins-ci.org/browse/JENKINS-36159. "); - LOGGER.log(Level.FINE, "Running in docker-pipeline, ignore Maven Installation parameter: {0}", mavenInstallationName); + "[withMaven] WARNING: Specified Maven '" + mavenInstallationName + + "' cannot be installed, will be ignored. " + + "Step running within a container, tool installations are not available see https://issues.jenkins-ci.org/browse/JENKINS-36159. "); + LOGGER.log( + Level.FINE, + "Running in docker-pipeline, ignore Maven Installation parameter: {0}", + mavenInstallationName); } else { return obtainMvnExecutableFromMavenInstallation(mavenInstallationName); } - // in case there are no installations available we fallback to the OS maven installation // first we try MAVEN_HOME and M2_HOME LOGGER.fine("Searching for Maven through MAVEN_HOME and M2_HOME environment variables..."); @@ -466,14 +506,20 @@ private String obtainMavenExec() throws IOException, InterruptedException { if (mavenHome == null) { mavenHome = readFromProcess("printenv", M2_HOME); if (StringUtils.isNotEmpty(mavenHome)) { - consoleMessage.append(" with the environment variable M2_HOME=").append(mavenHome); + consoleMessage + .append(" with the environment variable M2_HOME=") + .append(mavenHome); } } else { - consoleMessage.append(" with the environment variable MAVEN_HOME=").append(mavenHome); + consoleMessage + .append(" with the environment variable MAVEN_HOME=") + .append(mavenHome); } if (mavenHome == null) { - LOGGER.log(Level.FINE, "NO maven installation discovered on docker container through MAVEN_HOME and M2_HOME environment variables"); + LOGGER.log( + Level.FINE, + "NO maven installation discovered on docker container through MAVEN_HOME and M2_HOME environment variables"); mvnExecPath = null; } else { LOGGER.log(Level.FINE, "Found maven installation on {0}", mavenHome); @@ -487,13 +533,19 @@ private String obtainMavenExec() throws IOException, InterruptedException { if (mavenHome == null) { mavenHome = env.get(M2_HOME); if (StringUtils.isNotEmpty(mavenHome)) { - consoleMessage.append(" with the environment variable M2_HOME=").append(mavenHome); + consoleMessage + .append(" with the environment variable M2_HOME=") + .append(mavenHome); } } else { - consoleMessage.append(" with the environment variable MAVEN_HOME=").append(mavenHome); + consoleMessage + .append(" with the environment variable MAVEN_HOME=") + .append(mavenHome); } if (mavenHome == null) { - LOGGER.log(Level.FINE, "NO maven installation discovered on build agent through MAVEN_HOME and M2_HOME environment variables"); + LOGGER.log( + Level.FINE, + "NO maven installation discovered on build agent through MAVEN_HOME and M2_HOME environment variables"); mvnExecPath = null; } else { LOGGER.log(Level.FINE, "Found maven installation on {0}", mavenHome); @@ -507,7 +559,8 @@ private String obtainMavenExec() throws IOException, InterruptedException { // if at this point mvnExecPath is still null try to use which/where command to find a maven executable if (mvnExecPath == null) { if (LOGGER.isLoggable(Level.FINE)) { - console.trace("[withMaven] No Maven Installation or MAVEN_HOME found, looking for mvn executable by using which/where command"); + console.trace( + "[withMaven] No Maven Installation or MAVEN_HOME found, looking for mvn executable by using which/where command"); } if (Boolean.TRUE.equals(getComputer().isUnix())) { mvnExecPath = readFromProcess("/bin/sh", "-c", "which mvn"); @@ -522,11 +575,14 @@ private String obtainMavenExec() throws IOException, InterruptedException { String mvnwScript = isUnix ? "mvnw" : "mvnw.cmd"; boolean mvnwScriptExists = ws.child(mvnwScript).exists(); if (mvnwScriptExists) { - consoleMessage = new StringBuilder("[withMaven] Maven installation not specified in the 'withMaven()' step " + - "and not found on the build agent but '" + mvnwScript + "' script found in the workspace."); + consoleMessage = + new StringBuilder("[withMaven] Maven installation not specified in the 'withMaven()' step " + + "and not found on the build agent but '" + mvnwScript + + "' script found in the workspace."); } else { - consoleMessage = new StringBuilder("[withMaven] Maven installation not specified in the 'withMaven()' step " + - "and not found on the build agent"); + consoleMessage = + new StringBuilder("[withMaven] Maven installation not specified in the 'withMaven()' step " + + "and not found on the build agent"); } } else { consoleMessage.append(" with executable ").append(mvnExecPath); @@ -539,13 +595,16 @@ private String obtainMavenExec() throws IOException, InterruptedException { return mvnExecPath; } - private String obtainMvnExecutableFromMavenInstallation(String mavenInstallationName) throws IOException, InterruptedException { + private String obtainMvnExecutableFromMavenInstallation(String mavenInstallationName) + throws IOException, InterruptedException { MavenInstallation mavenInstallation = null; for (MavenInstallation i : getMavenInstallations()) { if (mavenInstallationName.equals(i.getName())) { mavenInstallation = i; - LOGGER.log(Level.FINE, "Found maven installation {0} with installation home {1}", new Object[]{mavenInstallation.getName(), mavenInstallation.getHome()}); + LOGGER.log(Level.FINE, "Found maven installation {0} with installation home {1}", new Object[] { + mavenInstallation.getName(), mavenInstallation.getHome() + }); break; } } @@ -554,7 +613,8 @@ private String obtainMvnExecutableFromMavenInstallation(String mavenInstallation } Node node = getComputer().getNode(); if (node == null) { - throw new AbortException("Could not obtain the Node for the computer: " + getComputer().getName()); + throw new AbortException("Could not obtain the Node for the computer: " + + getComputer().getName()); } mavenInstallation = mavenInstallation.forNode(node, listener).forEnvironment(env); mavenInstallation.buildEnvVars(envOverride); @@ -578,12 +638,15 @@ private String readFromProcess(String... args) throws InterruptedException { Proc p = launcher.launch(ps.cmds(args).stdout(baos)); int exitCode = p.join(); if (exitCode == 0) { - return baos.toString(getComputer().getDefaultCharset().name()).replaceAll("[\t\r\n]+", " ").trim(); + return baos.toString(getComputer().getDefaultCharset().name()) + .replaceAll("[\t\r\n]+", " ") + .trim(); } else { return null; } } catch (IOException e) { - e.printStackTrace(console.format("Error executing command '%s' : %s%n", Arrays.toString(args), e.getMessage())); + e.printStackTrace( + console.format("Error executing command '%s' : %s%n", Arrays.toString(args), e.getMessage())); } return null; } @@ -596,7 +659,8 @@ private String readFromProcess(String... args) throws InterruptedException { * @return wrapper script content * @throws AbortException when problems creating content */ - private String generateMavenWrapperScriptContent(@NonNull FilePath mvnExec, @NonNull String mavenConfig) throws AbortException { + private String generateMavenWrapperScriptContent(@NonNull FilePath mvnExec, @NonNull String mavenConfig) + throws AbortException { boolean isUnix = Boolean.TRUE.equals(getComputer().isUnix()); @@ -605,16 +669,29 @@ private String generateMavenWrapperScriptContent(@NonNull FilePath mvnExec, @Non if (isUnix) { // Linux, Unix, MacOSX String lineSep = "\n"; script.append("#!/bin/sh -e").append(lineSep); - ifTraceabilityEnabled(() -> script.append("echo ----- withMaven Wrapper script -----").append(lineSep)); - script.append("\"").append(mvnExec.getRemote()).append("\" ").append(mavenConfig).append(" \"$@\"").append(lineSep); + ifTraceabilityEnabled(() -> + script.append("echo ----- withMaven Wrapper script -----").append(lineSep)); + script.append("\"") + .append(mvnExec.getRemote()) + .append("\" ") + .append(mavenConfig) + .append(" \"$@\"") + .append(lineSep); } else { // Windows String lineSep = "\r\n"; script.append("@echo off").append(lineSep); - ifTraceabilityEnabled(() -> script.append("echo ----- withMaven Wrapper script -----").append(lineSep)); - // JENKINS-57324 escape '%' as '%%'. See https://en.wikibooks.org/wiki/Windows_Batch_Scripting#Quoting_and_escaping + ifTraceabilityEnabled(() -> + script.append("echo ----- withMaven Wrapper script -----").append(lineSep)); + // JENKINS-57324 escape '%' as '%%'. See + // https://en.wikibooks.org/wiki/Windows_Batch_Scripting#Quoting_and_escaping mavenConfig = mavenConfig.replace("%", "%%"); - script.append("\"").append(mvnExec.getRemote()).append("\" ").append(mavenConfig).append(" %*").append(lineSep); + script.append("\"") + .append(mvnExec.getRemote()) + .append("\" ") + .append(mavenConfig) + .append(" %*") + .append(lineSep); } LOGGER.log(Level.FINER, "Generated Maven wrapper script: \n{0}", script); @@ -631,7 +708,8 @@ private String generateMavenWrapperScriptContent(@NonNull FilePath mvnExec, @Non * @throws InterruptedException when processing remote calls * @throws IOException when reading files */ - private FilePath createWrapperScript(FilePath tempBinDir, String name, String content) throws IOException, InterruptedException { + private FilePath createWrapperScript(FilePath tempBinDir, String name, String content) + throws IOException, InterruptedException { FilePath scriptFile = tempBinDir.child(name); envOverride.put(MVN_CMD, scriptFile.getRemote()); @@ -664,7 +742,9 @@ private String setupMavenLocalRepo() throws IOException, InterruptedException { expandedMavenLocalRepo = repoPath.getRemote(); } } - LOGGER.log(Level.FINEST, "setupMavenLocalRepo({0}): {1}", new Object[]{step.getMavenLocalRepo(), expandedMavenLocalRepo}); + LOGGER.log(Level.FINEST, "setupMavenLocalRepo({0}): {1}", new Object[] { + step.getMavenLocalRepo(), expandedMavenLocalRepo + }); return expandedMavenLocalRepo; } @@ -679,13 +759,16 @@ private String setupMavenLocalRepo() throws IOException, InterruptedException { * @throws IOException when reading files */ @Nullable - private String setupSettingFile(@NonNull Collection credentials) throws IOException, InterruptedException { + private String setupSettingFile(@NonNull Collection credentials) + throws IOException, InterruptedException { final FilePath settingsDest = tempBinDir.child("settings.xml"); // Settings from Config File Provider if (StringUtils.isNotEmpty(step.getMavenSettingsConfig())) { if (LOGGER.isLoggable(Level.FINE)) { - console.formatTrace("[withMaven] using Maven settings provided by the Jenkins Managed Configuration File '%s' %n", step.getMavenSettingsConfig()); + console.formatTrace( + "[withMaven] using Maven settings provided by the Jenkins Managed Configuration File '%s' %n", + step.getMavenSettingsConfig()); } settingsFromConfig(step.getMavenSettingsConfig(), settingsDest, credentials); envOverride.put("MVN_SETTINGS", settingsDest.getRemote()); @@ -700,8 +783,11 @@ private String setupSettingFile(@NonNull Collection credentials) th if ((settings = ws.child(settingsPath)).exists()) { // settings file residing on the agent if (LOGGER.isLoggable(Level.FINE)) { - console.formatTrace("[withMaven] using Maven settings provided on the build agent '%s' %n", settingsPath); - LOGGER.log(Level.FINE, "Copying maven settings file from build agent {0} to {1}", new Object[] { settings, settingsDest }); + console.formatTrace( + "[withMaven] using Maven settings provided on the build agent '%s' %n", settingsPath); + LOGGER.log(Level.FINE, "Copying maven settings file from build agent {0} to {1}", new Object[] { + settings, settingsDest + }); } settings.copyTo(settingsDest); envOverride.put("MVN_SETTINGS", settingsDest.getRemote()); @@ -714,17 +800,21 @@ private String setupSettingFile(@NonNull Collection credentials) th SettingsProvider settingsProvider; MavenConfigFolderOverrideProperty overrideProperty = getMavenConfigOverrideProperty(); - StringBuilder mavenSettingsLog=new StringBuilder(); + StringBuilder mavenSettingsLog = new StringBuilder(); if (overrideProperty != null && overrideProperty.getSettings() != null) { // Settings overridden by a folder property - if(LOGGER.isLoggable(Level.FINE)) { - mavenSettingsLog.append("[withMaven] using overridden Maven settings by folder '").append(overrideProperty.getOwner().getDisplayName()).append("'. "); + if (LOGGER.isLoggable(Level.FINE)) { + mavenSettingsLog + .append("[withMaven] using overridden Maven settings by folder '") + .append(overrideProperty.getOwner().getDisplayName()) + .append("'. "); } settingsProvider = overrideProperty.getSettings(); } else { if (LOGGER.isLoggable(Level.FINE)) { - mavenSettingsLog.append("[withMaven] using Maven settings provided by the Jenkins global configuration. "); + mavenSettingsLog.append( + "[withMaven] using Maven settings provided by the Jenkins global configuration. "); } // Settings provided by the global maven configuration settingsProvider = GlobalMavenConfig.get().getSettingsProvider(); @@ -733,7 +823,10 @@ private String setupSettingFile(@NonNull Collection credentials) th if (settingsProvider instanceof MvnSettingsProvider) { MvnSettingsProvider mvnSettingsProvider = (MvnSettingsProvider) settingsProvider; if (LOGGER.isLoggable(Level.FINE)) { - mavenSettingsLog.append("Config File Provider maven settings file '").append(mvnSettingsProvider.getSettingsConfigId()).append("'"); + mavenSettingsLog + .append("Config File Provider maven settings file '") + .append(mvnSettingsProvider.getSettingsConfigId()) + .append("'"); console.trace(mavenSettingsLog); } settingsFromConfig(mvnSettingsProvider.getSettingsConfigId(), settingsDest, credentials); @@ -748,12 +841,16 @@ private String setupSettingFile(@NonNull Collection credentials) th settings.copyTo(settingsDest); envOverride.put("MVN_SETTINGS", settingsDest.getRemote()); if (LOGGER.isLoggable(Level.FINE)) { - mavenSettingsLog.append("Maven settings on the build agent'").append(settingsPath).append("'"); + mavenSettingsLog + .append("Maven settings on the build agent'") + .append(settingsPath) + .append("'"); console.trace(mavenSettingsLog); } return settingsDest.getRemote(); } else { - throw new AbortException("Could not find file provided by the Jenkins global configuration '" + settings + "' on the build agent"); + throw new AbortException("Could not find file provided by the Jenkins global configuration '" + settings + + "' on the build agent"); } } else if (settingsProvider instanceof DefaultSettingsProvider) { @@ -763,7 +860,8 @@ private String setupSettingFile(@NonNull Collection credentials) th console.trace(mavenSettingsLog); } } else if (settingsProvider == null) { - // should not happen according to the source code of jenkins.mvn.MavenConfig.getSettingsProvider() in jenkins-core 2.7 + // should not happen according to the source code of jenkins.mvn.MavenConfig.getSettingsProvider() in + // jenkins-core 2.7 // do nothing if (LOGGER.isLoggable(Level.FINE)) { mavenSettingsLog.append("Maven settings are null. NO settings will be defined."); @@ -782,9 +880,12 @@ private MavenConfigFolderOverrideProperty getMavenConfigOverrideProperty() { // Iterate until we find an override or until we reach the top. We need it to be an item to be able to do // getParent, AbstractFolder which has the properties is also an Item - for (ItemGroup group = job.getParent(); group instanceof Item && !(group instanceof Jenkins); group = ((Item) group).getParent()) { + for (ItemGroup group = job.getParent(); + group instanceof Item && !(group instanceof Jenkins); + group = ((Item) group).getParent()) { if (group instanceof AbstractFolder) { - MavenConfigFolderOverrideProperty mavenConfigProperty = ((AbstractFolder) group).getProperties().get(MavenConfigFolderOverrideProperty.class); + MavenConfigFolderOverrideProperty mavenConfigProperty = + ((AbstractFolder) group).getProperties().get(MavenConfigFolderOverrideProperty.class); if (mavenConfigProperty != null && mavenConfigProperty.isOverride()) { return mavenConfigProperty; } @@ -804,13 +905,16 @@ private MavenConfigFolderOverrideProperty getMavenConfigOverrideProperty() { * @throws IOException when reading files */ @Nullable - private String setupGlobalSettingFile(@NonNull Collection credentials) throws IOException, InterruptedException { + private String setupGlobalSettingFile(@NonNull Collection credentials) + throws IOException, InterruptedException { final FilePath settingsDest = tempBinDir.child("globalSettings.xml"); // Global settings from Config File Provider if (StringUtils.isNotEmpty(step.getGlobalMavenSettingsConfig())) { if (LOGGER.isLoggable(Level.FINE)) { - console.formatTrace("[withMaven] using Maven global settings provided by the Jenkins Managed Configuration File '%s' %n", step.getGlobalMavenSettingsConfig()); + console.formatTrace( + "[withMaven] using Maven global settings provided by the Jenkins Managed Configuration File '%s' %n", + step.getGlobalMavenSettingsConfig()); } globalSettingsFromConfig(step.getGlobalMavenSettingsConfig(), settingsDest, credentials); envOverride.put("GLOBAL_MVN_SETTINGS", settingsDest.getRemote()); @@ -824,8 +928,13 @@ private String setupGlobalSettingFile(@NonNull Collection credentia if ((settings = ws.child(settingsPath)).exists()) { // Global settings file residing on the agent if (LOGGER.isLoggable(Level.FINE)) { - console.formatTrace("[withMaven] using Maven global settings provided on the build agent '%s' %n", settingsPath); - LOGGER.log(Level.FINE, "Copying maven global settings file from build agent {0} to {1}", new Object[] { settings, settingsDest }); + console.formatTrace( + "[withMaven] using Maven global settings provided on the build agent '%s' %n", + settingsPath); + LOGGER.log( + Level.FINE, + "Copying maven global settings file from build agent {0} to {1}", + new Object[] {settings, settingsDest}); } settings.copyTo(settingsDest); envOverride.put("GLOBAL_MVN_SETTINGS", settingsDest.getRemote()); @@ -842,14 +951,18 @@ private String setupGlobalSettingFile(@NonNull Collection credentia StringBuilder mavenSettingsLog = new StringBuilder(); if (overrideProperty == null || overrideProperty.getGlobalSettings() == null) { if (LOGGER.isLoggable(Level.FINE)) { - mavenSettingsLog.append("[withMaven] using Maven global settings provided by the Jenkins global configuration. "); + mavenSettingsLog.append( + "[withMaven] using Maven global settings provided by the Jenkins global configuration. "); } // Settings provided by the global maven configuration globalSettingsProvider = GlobalMavenConfig.get().getGlobalSettingsProvider(); } else { // Settings overridden by a folder property if (LOGGER.isLoggable(Level.FINE)) { - mavenSettingsLog.append("[withMaven] using overridden Maven global settings by folder '").append(overrideProperty.getOwner().getDisplayName()).append("'. "); + mavenSettingsLog + .append("[withMaven] using overridden Maven global settings by folder '") + .append(overrideProperty.getOwner().getDisplayName()) + .append("'. "); } globalSettingsProvider = overrideProperty.getGlobalSettings(); } @@ -857,7 +970,10 @@ private String setupGlobalSettingFile(@NonNull Collection credentia if (globalSettingsProvider instanceof MvnGlobalSettingsProvider) { MvnGlobalSettingsProvider mvnGlobalSettingsProvider = (MvnGlobalSettingsProvider) globalSettingsProvider; if (LOGGER.isLoggable(Level.FINE)) { - mavenSettingsLog.append("Config File Provider maven global settings file '").append(mvnGlobalSettingsProvider.getSettingsConfigId()).append("'"); + mavenSettingsLog + .append("Config File Provider maven global settings file '") + .append(mvnGlobalSettingsProvider.getSettingsConfigId()) + .append("'"); } globalSettingsFromConfig(mvnGlobalSettingsProvider.getSettingsConfigId(), settingsDest, credentials); envOverride.put("GLOBAL_MVN_SETTINGS", settingsDest.getRemote()); @@ -866,13 +982,17 @@ private String setupGlobalSettingFile(@NonNull Collection credentia } return settingsDest.getRemote(); } else if (globalSettingsProvider instanceof FilePathGlobalSettingsProvider) { - FilePathGlobalSettingsProvider filePathGlobalSettingsProvider = (FilePathGlobalSettingsProvider) globalSettingsProvider; + FilePathGlobalSettingsProvider filePathGlobalSettingsProvider = + (FilePathGlobalSettingsProvider) globalSettingsProvider; String settingsPath = filePathGlobalSettingsProvider.getPath(); FilePath settings; if ((settings = ws.child(settingsPath)).exists()) { // Global settings file residing on the agent if (LOGGER.isLoggable(Level.FINE)) { - mavenSettingsLog.append("Maven global settings on the build agent '").append(settingsPath).append("'"); + mavenSettingsLog + .append("Maven global settings on the build agent '") + .append(settingsPath) + .append("'"); } settings.copyTo(settingsDest); envOverride.put("GLOBAL_MVN_SETTINGS", settingsDest.getRemote()); @@ -881,16 +1001,19 @@ private String setupGlobalSettingFile(@NonNull Collection credentia } return settingsDest.getRemote(); } else { - throw new AbortException("Could not find file provided by the Jenkins global configuration '" + settings + "' on the build agent"); + throw new AbortException("Could not find file provided by the Jenkins global configuration '" + settings + + "' on the build agent"); } } else if (globalSettingsProvider instanceof DefaultGlobalSettingsProvider) { // do nothing if (LOGGER.isLoggable(Level.FINE)) { - mavenSettingsLog.append("Maven global settings defined by 'DefaultSettingsProvider', NOT overriding it."); + mavenSettingsLog.append( + "Maven global settings defined by 'DefaultSettingsProvider', NOT overriding it."); console.trace(mavenSettingsLog); } } else if (globalSettingsProvider == null) { - // should not happen according to the source code of jenkins.mvn.GlobalMavenConfig.getGlobalSettingsProvider() in jenkins-core 2.7 + // should not happen according to the source code of + // jenkins.mvn.GlobalMavenConfig.getGlobalSettingsProvider() in jenkins-core 2.7 // do nothing if (LOGGER.isLoggable(Level.FINE)) { mavenSettingsLog.append("Maven global settings are null. NO settings will be defined."); @@ -913,47 +1036,66 @@ private String setupGlobalSettingFile(@NonNull Collection credentia * @return the {@link FilePath} to the settings file * @throws AbortException in case of error */ - private void settingsFromConfig(String mavenSettingsConfigId, FilePath mavenSettingsFile, @NonNull Collection credentials) throws AbortException { + private void settingsFromConfig( + String mavenSettingsConfigId, FilePath mavenSettingsFile, @NonNull Collection credentials) + throws AbortException { Config c = ConfigFiles.getByIdOrNull(build, mavenSettingsConfigId); if (c == null) { - throw new AbortException("Could not find the Maven settings.xml config file id:" + mavenSettingsConfigId + ". Make sure it exists on Managed Files"); + throw new AbortException("Could not find the Maven settings.xml config file id:" + mavenSettingsConfigId + + ". Make sure it exists on Managed Files"); } if (StringUtils.isBlank(c.content)) { - throw new AbortException("Could not create Maven settings.xml config file id:" + mavenSettingsConfigId + ". Content of the file is empty"); + throw new AbortException("Could not create Maven settings.xml config file id:" + mavenSettingsConfigId + + ". Content of the file is empty"); } MavenSettingsConfig mavenSettingsConfig; if (c instanceof MavenSettingsConfig) { mavenSettingsConfig = (MavenSettingsConfig) c; } else { - mavenSettingsConfig = new MavenSettingsConfig(c.id, c.name, c.comment, c.content, MavenSettingsConfig.isReplaceAllDefault, null); + mavenSettingsConfig = new MavenSettingsConfig( + c.id, c.name, c.comment, c.content, MavenSettingsConfig.isReplaceAllDefault, null); } try { - final Map resolvedCredentialsByMavenServerId = resolveCredentials(mavenSettingsConfig.getServerCredentialMappings(), "Maven settings"); + final Map resolvedCredentialsByMavenServerId = + resolveCredentials(mavenSettingsConfig.getServerCredentialMappings(), "Maven settings"); String mavenSettingsFileContent; if (resolvedCredentialsByMavenServerId.isEmpty()) { mavenSettingsFileContent = mavenSettingsConfig.content; if (LOGGER.isLoggable(Level.FINE)) { - console.trace("[withMaven] using Maven settings.xml '" + mavenSettingsConfig.id + "' with NO Maven servers credentials provided by Jenkins"); + console.trace("[withMaven] using Maven settings.xml '" + mavenSettingsConfig.id + + "' with NO Maven servers credentials provided by Jenkins"); } } else { credentials.addAll(resolvedCredentialsByMavenServerId.values()); List tempFiles = new ArrayList<>(); - mavenSettingsFileContent = CredentialsHelper.fillAuthentication(mavenSettingsConfig.content, mavenSettingsConfig.isReplaceAll, resolvedCredentialsByMavenServerId, tempBinDir, tempFiles); + mavenSettingsFileContent = CredentialsHelper.fillAuthentication( + mavenSettingsConfig.content, + mavenSettingsConfig.isReplaceAll, + resolvedCredentialsByMavenServerId, + tempBinDir, + tempFiles); if (LOGGER.isLoggable(Level.FINE)) { - console.trace("[withMaven] using Maven settings.xml '" + mavenSettingsConfig.id + "' with Maven servers credentials provided by Jenkins " + - "(replaceAll: " + mavenSettingsConfig.isReplaceAll + "): " + - resolvedCredentialsByMavenServerId.entrySet().stream().map(new MavenServerToCredentialsMappingToStringFunction()).sorted().collect(Collectors.joining(", "))); + console.trace("[withMaven] using Maven settings.xml '" + mavenSettingsConfig.id + + "' with Maven servers credentials provided by Jenkins " + "(replaceAll: " + + mavenSettingsConfig.isReplaceAll + "): " + + resolvedCredentialsByMavenServerId.entrySet().stream() + .map(new MavenServerToCredentialsMappingToStringFunction()) + .sorted() + .collect(Collectors.joining(", "))); } } - mavenSettingsFile.write(mavenSettingsFileContent, getComputer().getDefaultCharset().name()); + mavenSettingsFile.write( + mavenSettingsFileContent, getComputer().getDefaultCharset().name()); } catch (Exception e) { - throw new IllegalStateException("Exception injecting Maven settings.xml " + mavenSettingsConfig.id + - " during the build: " + build + ": " + e.getMessage(), e); + throw new IllegalStateException( + "Exception injecting Maven settings.xml " + mavenSettingsConfig.id + " during the build: " + build + + ": " + e.getMessage(), + e); } } @@ -967,48 +1109,66 @@ private void settingsFromConfig(String mavenSettingsConfigId, FilePath mavenSett * @return the {@link FilePath} to the settings file * @throws AbortException in case of error */ - private void globalSettingsFromConfig(String mavenGlobalSettingsConfigId, FilePath mavenGlobalSettingsFile, Collection credentials) throws AbortException { + private void globalSettingsFromConfig( + String mavenGlobalSettingsConfigId, FilePath mavenGlobalSettingsFile, Collection credentials) + throws AbortException { Config c = ConfigFiles.getByIdOrNull(build, mavenGlobalSettingsConfigId); if (c == null) { - throw new AbortException("Could not find the Maven global settings.xml config file id:" + mavenGlobalSettingsFile + ". Make sure it exists on Managed Files"); + throw new AbortException("Could not find the Maven global settings.xml config file id:" + + mavenGlobalSettingsFile + ". Make sure it exists on Managed Files"); } if (StringUtils.isBlank(c.content)) { - throw new AbortException("Could not create Maven global settings.xml config file id:" + mavenGlobalSettingsFile + ". Content of the file is empty"); + throw new AbortException("Could not create Maven global settings.xml config file id:" + + mavenGlobalSettingsFile + ". Content of the file is empty"); } GlobalMavenSettingsConfig mavenGlobalSettingsConfig; if (c instanceof GlobalMavenSettingsConfig) { mavenGlobalSettingsConfig = (GlobalMavenSettingsConfig) c; } else { - mavenGlobalSettingsConfig = new GlobalMavenSettingsConfig(c.id, c.name, c.comment, c.content, MavenSettingsConfig.isReplaceAllDefault, null); + mavenGlobalSettingsConfig = new GlobalMavenSettingsConfig( + c.id, c.name, c.comment, c.content, MavenSettingsConfig.isReplaceAllDefault, null); } try { - final Map resolvedCredentialsByMavenServerId = resolveCredentials(mavenGlobalSettingsConfig.getServerCredentialMappings(), " Global Maven settings"); + final Map resolvedCredentialsByMavenServerId = resolveCredentials( + mavenGlobalSettingsConfig.getServerCredentialMappings(), " Global Maven settings"); String mavenGlobalSettingsFileContent; if (resolvedCredentialsByMavenServerId.isEmpty()) { mavenGlobalSettingsFileContent = mavenGlobalSettingsConfig.content; - console.trace("[withMaven] using Maven global settings.xml '" + mavenGlobalSettingsConfig.id + "' with NO Maven servers credentials provided by Jenkins"); + console.trace("[withMaven] using Maven global settings.xml '" + mavenGlobalSettingsConfig.id + + "' with NO Maven servers credentials provided by Jenkins"); } else { credentials.addAll(resolvedCredentialsByMavenServerId.values()); List tempFiles = new ArrayList<>(); - mavenGlobalSettingsFileContent = CredentialsHelper.fillAuthentication(mavenGlobalSettingsConfig.content, mavenGlobalSettingsConfig.isReplaceAll, resolvedCredentialsByMavenServerId, tempBinDir, tempFiles); - console.trace("[withMaven] using Maven global settings.xml '" + mavenGlobalSettingsConfig.id + "' with Maven servers credentials provided by Jenkins " + - "(replaceAll: " + mavenGlobalSettingsConfig.isReplaceAll + "): " + - resolvedCredentialsByMavenServerId.entrySet().stream().map(new MavenServerToCredentialsMappingToStringFunction()).sorted().collect(Collectors.joining(", "))); - + mavenGlobalSettingsFileContent = CredentialsHelper.fillAuthentication( + mavenGlobalSettingsConfig.content, + mavenGlobalSettingsConfig.isReplaceAll, + resolvedCredentialsByMavenServerId, + tempBinDir, + tempFiles); + console.trace("[withMaven] using Maven global settings.xml '" + mavenGlobalSettingsConfig.id + + "' with Maven servers credentials provided by Jenkins " + "(replaceAll: " + + mavenGlobalSettingsConfig.isReplaceAll + "): " + + resolvedCredentialsByMavenServerId.entrySet().stream() + .map(new MavenServerToCredentialsMappingToStringFunction()) + .sorted() + .collect(Collectors.joining(", "))); } - - mavenGlobalSettingsFile.write(mavenGlobalSettingsFileContent, getComputer().getDefaultCharset().name()); - LOGGER.log(Level.FINE, "Created global config file {0}", new Object[]{mavenGlobalSettingsFile}); + mavenGlobalSettingsFile.write( + mavenGlobalSettingsFileContent, + getComputer().getDefaultCharset().name()); + LOGGER.log(Level.FINE, "Created global config file {0}", new Object[] {mavenGlobalSettingsFile}); } catch (Exception e) { - throw new IllegalStateException("Exception injecting Maven settings.xml " + mavenGlobalSettingsConfig.id + - " during the build: " + build + ": " + e.getMessage(), e); + throw new IllegalStateException( + "Exception injecting Maven settings.xml " + mavenGlobalSettingsConfig.id + " during the build: " + + build + ": " + e.getMessage(), + e); } } @@ -1019,8 +1179,10 @@ private void globalSettingsFromConfig(String mavenGlobalSettingsConfigId, FilePa * @return credentials by Maven server Id */ @NonNull - public Map resolveCredentials(@Nullable final List serverCredentialMappings, String logMessagePrefix) { - // CredentialsHelper.removeMavenServerDefinitions() requires a Map implementation that supports `null` values. `HashMap` supports `null` values, `TreeMap` doesn't + public Map resolveCredentials( + @Nullable final List serverCredentialMappings, String logMessagePrefix) { + // CredentialsHelper.removeMavenServerDefinitions() requires a Map implementation that supports `null` values. + // `HashMap` supports `null` values, `TreeMap` doesn't // https://github.com/jenkinsci/config-file-provider-plugin/blob/config-file-provider-2.16.4/src/main/java/org/jenkinsci/plugins/configfiles/maven/security/CredentialsHelper.java#L252 Map mavenServerIdToCredentials = new HashMap<>(); if (serverCredentialMappings == null) { @@ -1029,9 +1191,15 @@ public Map resolveCredentials(@Nullable fin List unresolvedServerCredentialsMappings = new ArrayList<>(); for (ServerCredentialMapping serverCredentialMapping : serverCredentialMappings) { - List domainRequirements = StringUtils.isBlank(serverCredentialMapping.getServerId()) ? Collections.emptyList(): Collections.singletonList(new MavenServerIdRequirement(serverCredentialMapping.getServerId())); + List domainRequirements = StringUtils.isBlank(serverCredentialMapping.getServerId()) + ? Collections.emptyList() + : Collections.singletonList(new MavenServerIdRequirement(serverCredentialMapping.getServerId())); @Nullable - final StandardUsernameCredentials credentials = CredentialsProvider.findCredentialById(serverCredentialMapping.getCredentialsId(), StandardUsernameCredentials.class, build, domainRequirements); + final StandardUsernameCredentials credentials = CredentialsProvider.findCredentialById( + serverCredentialMapping.getCredentialsId(), + StandardUsernameCredentials.class, + build, + domainRequirements); if (credentials == null) { unresolvedServerCredentialsMappings.add(serverCredentialMapping); @@ -1044,8 +1212,11 @@ public Map resolveCredentials(@Nullable fin * we prefer to print a warning message rather than failing the build with an AbortException if some credentials are NOT found for backward compatibility reasons. * The behaviour of o.j.p.configfiles.m.s.CredentialsHelper.resolveCredentials(model.Run, List, TaskListener)` is to just print a warning message */ - console.println("[withMaven] WARNING " + logMessagePrefix + " - Silently skip Maven server Ids with missing associated Jenkins credentials: " + - unresolvedServerCredentialsMappings.stream().map(new ServerCredentialMappingToStringFunction()).collect(Collectors.joining(", "))); + console.println("[withMaven] WARNING " + logMessagePrefix + + " - Silently skip Maven server Ids with missing associated Jenkins credentials: " + + unresolvedServerCredentialsMappings.stream() + .map(new ServerCredentialMappingToStringFunction()) + .collect(Collectors.joining(", "))); } return mavenServerIdToCredentials; } @@ -1069,7 +1240,7 @@ private static final class ExpanderImpl extends EnvironmentExpander { private final Map overrides; private ExpanderImpl(EnvVars overrides) { - LOGGER.log(Level.FINEST, "ExpanderImpl(overrides: {0})", new Object[]{overrides}); + LOGGER.log(Level.FINEST, "ExpanderImpl(overrides: {0})", new Object[] {overrides}); this.overrides = new HashMap<>(); for (Entry entry : overrides.entrySet()) { this.overrides.put(entry.getKey(), entry.getValue()); @@ -1078,9 +1249,11 @@ private ExpanderImpl(EnvVars overrides) { @Override public void expand(EnvVars env) throws IOException, InterruptedException { - LOGGER.log(Level.FINEST, "ExpanderImpl.expand - env before expand: {0}", new Object[]{env}); // JENKINS-40484 + LOGGER.log( + Level.FINEST, "ExpanderImpl.expand - env before expand: {0}", new Object[] {env}); // JENKINS-40484 env.overrideAll(overrides); - LOGGER.log(Level.FINEST, "ExpanderImpl.expand - env after expand: {0}", new Object[]{env}); // JENKINS-40484 + LOGGER.log( + Level.FINEST, "ExpanderImpl.expand - env after expand: {0}", new Object[] {env}); // JENKINS-40484 } } @@ -1099,8 +1272,10 @@ private class WithMavenStepExecutionCallBack extends TailCall { private final MavenSpyLogProcessor mavenSpyLogProcessor = new MavenSpyLogProcessor(); - private WithMavenStepExecutionCallBack(@NonNull FilePath tempBinDir, @NonNull List options, - @NonNull MavenPublisherStrategy mavenPublisherStrategy) { + private WithMavenStepExecutionCallBack( + @NonNull FilePath tempBinDir, + @NonNull List options, + @NonNull MavenPublisherStrategy mavenPublisherStrategy) { this.tempBinDirPath = tempBinDir.getRemote(); this.options = options; this.mavenPublisherStrategy = mavenPublisherStrategy; @@ -1179,7 +1354,7 @@ private Computer getComputer() throws AbortException { LOGGER.log(Level.FINE, "Computer: {0}", computer.getName()); try { LOGGER.log(Level.FINE, "Env: {0}", computer.getEnvironment()); - } catch (IOException | InterruptedException e) {// ignored + } catch (IOException | InterruptedException e) { // ignored } } return computer; @@ -1198,32 +1373,31 @@ private static FilePath tempDir(FilePath ws) { private static class ServerCredentialMappingToStringFunction implements Function { @Override public String apply(ServerCredentialMapping mapping) { - return "[mavenServerId: " + mapping.getServerId() + ", jenkinsCredentials: " + mapping.getCredentialsId() + "]"; + return "[mavenServerId: " + mapping.getServerId() + ", jenkinsCredentials: " + mapping.getCredentialsId() + + "]"; } } /** * ToString of the mapping mavenServerId -> Credentials */ - private static class MavenServerToCredentialsMappingToStringFunction implements Function, String> { + private static class MavenServerToCredentialsMappingToStringFunction + implements Function, String> { @Override public String apply(@Nullable Entry entry) { - if (entry == null) - return null; + if (entry == null) return null; String mavenServerId = entry.getKey(); StandardUsernameCredentials credentials = entry.getValue(); - return "[" + - "mavenServerId: '" + mavenServerId + "', " + - "jenkinsCredentials: '" + credentials.getId() + "'" + - "]"; + return "[" + "mavenServerId: '" + + mavenServerId + "', " + "jenkinsCredentials: '" + + credentials.getId() + "'" + "]"; } } private static class CredentialsToPrettyString implements Function { @Override public String apply(@javax.annotation.Nullable Credentials credentials) { - if (credentials == null) - return "null"; + if (credentials == null) return "null"; String result = ClassUtils.getShortName(credentials.getClass()) + "["; if (credentials instanceof IdCredentials) { diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/cause/MavenDependencyAbstractCause.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/cause/MavenDependencyAbstractCause.java index 62b19f77..cb58021e 100644 --- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/cause/MavenDependencyAbstractCause.java +++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/cause/MavenDependencyAbstractCause.java @@ -1,13 +1,12 @@ package org.jenkinsci.plugins.pipeline.maven.cause; -import hudson.model.Cause; -import org.jenkinsci.plugins.pipeline.maven.MavenArtifact; - import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; +import hudson.model.Cause; import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; +import org.jenkinsci.plugins.pipeline.maven.MavenArtifact; /** * @author Cyrille Le Clerc @@ -18,8 +17,7 @@ public abstract class MavenDependencyAbstractCause extends Cause implements Mave private List omittedPipelineFullNames; - public MavenDependencyAbstractCause() { - } + public MavenDependencyAbstractCause() {} public MavenDependencyAbstractCause(@Nullable List mavenArtifacts) { this.mavenArtifacts = mavenArtifacts; @@ -39,7 +37,6 @@ public void setMavenArtifacts(@NonNull List mavenArtifacts) { this.mavenArtifacts = mavenArtifacts; } - @NonNull @Override public List getOmittedPipelineFullNames() { diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/cause/MavenDependencyCause.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/cause/MavenDependencyCause.java index e127e082..3e2bb52c 100644 --- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/cause/MavenDependencyCause.java +++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/cause/MavenDependencyCause.java @@ -1,10 +1,9 @@ package org.jenkinsci.plugins.pipeline.maven.cause; -import hudson.model.Job; -import org.jenkinsci.plugins.pipeline.maven.MavenArtifact; - import edu.umd.cs.findbugs.annotations.NonNull; +import hudson.model.Job; import java.util.List; +import org.jenkinsci.plugins.pipeline.maven.MavenArtifact; /** * @author Cyrille Le Clerc @@ -27,5 +26,4 @@ public interface MavenDependencyCause { void setOmittedPipelineFullNames(List omittedPipelineFullNames); String getMavenArtifactsDescription(); - } diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/cause/MavenDependencyCauseHelper.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/cause/MavenDependencyCauseHelper.java index 6e2744be..51abf1f6 100644 --- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/cause/MavenDependencyCauseHelper.java +++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/cause/MavenDependencyCauseHelper.java @@ -1,14 +1,13 @@ package org.jenkinsci.plugins.pipeline.maven.cause; import com.google.common.base.Preconditions; -import hudson.model.Cause; -import org.jenkinsci.plugins.pipeline.maven.MavenArtifact; - import edu.umd.cs.findbugs.annotations.NonNull; +import hudson.model.Cause; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Objects; +import org.jenkinsci.plugins.pipeline.maven.MavenArtifact; /** * @author Cyrille Le Clerc @@ -24,8 +23,11 @@ public static List isSameCause(MavenDependencyCause newMavenCause return Collections.emptyList(); } - List newCauseArtifacts = Preconditions.checkNotNull(newMavenCause.getMavenArtifacts(), "newMavenCause.mavenArtifacts should not be null"); - List oldCauseArtifacts = Preconditions.checkNotNull(((MavenDependencyCause) oldMavenCause).getMavenArtifacts(), "oldMavenCause.mavenArtifacts should not be null"); + List newCauseArtifacts = Preconditions.checkNotNull( + newMavenCause.getMavenArtifacts(), "newMavenCause.mavenArtifacts should not be null"); + List oldCauseArtifacts = Preconditions.checkNotNull( + ((MavenDependencyCause) oldMavenCause).getMavenArtifacts(), + "oldMavenCause.mavenArtifacts should not be null"); List matchingArtifacts = new ArrayList<>(); for (MavenArtifact newCauseArtifact : newCauseArtifacts) { @@ -33,12 +35,12 @@ public static List isSameCause(MavenDependencyCause newMavenCause // snapshot without exact version (aka base version), cannot search for same cause } else { for (MavenArtifact oldCauseArtifact : oldCauseArtifacts) { - if (Objects.equals(newCauseArtifact.getGroupId(), oldCauseArtifact.getGroupId()) && - Objects.equals(newCauseArtifact.getArtifactId(), oldCauseArtifact.getArtifactId()) && - Objects.equals(newCauseArtifact.getVersion(), oldCauseArtifact.getVersion()) && - Objects.equals(newCauseArtifact.getBaseVersion(), oldCauseArtifact.getBaseVersion()) && - Objects.equals(newCauseArtifact.getClassifier(), oldCauseArtifact.getClassifier()) && - Objects.equals(newCauseArtifact.getType(), oldCauseArtifact.getType())) { + if (Objects.equals(newCauseArtifact.getGroupId(), oldCauseArtifact.getGroupId()) + && Objects.equals(newCauseArtifact.getArtifactId(), oldCauseArtifact.getArtifactId()) + && Objects.equals(newCauseArtifact.getVersion(), oldCauseArtifact.getVersion()) + && Objects.equals(newCauseArtifact.getBaseVersion(), oldCauseArtifact.getBaseVersion()) + && Objects.equals(newCauseArtifact.getClassifier(), oldCauseArtifact.getClassifier()) + && Objects.equals(newCauseArtifact.getType(), oldCauseArtifact.getType())) { matchingArtifacts.add(newCauseArtifact); } } @@ -51,7 +53,7 @@ public static List isSameCause(MavenDependencyCause newMavenCause public static List isSameCause(MavenDependencyCause newMavenCause, List oldMavenCauses) { List matchingArtifacts = new ArrayList<>(); - for (Cause oldMavenCause:oldMavenCauses) { + for (Cause oldMavenCause : oldMavenCauses) { matchingArtifacts.addAll(isSameCause(newMavenCause, oldMavenCause)); } return matchingArtifacts; diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/cause/MavenDependencyCliCause.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/cause/MavenDependencyCliCause.java index 9e7cc72a..6f0d0c59 100644 --- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/cause/MavenDependencyCliCause.java +++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/cause/MavenDependencyCliCause.java @@ -3,11 +3,10 @@ import hudson.console.ModelHyperlinkNote; import hudson.model.TaskListener; import hudson.model.User; -import org.jenkinsci.plugins.pipeline.maven.MavenArtifact; - import java.util.Arrays; import java.util.Collections; import java.util.List; +import org.jenkinsci.plugins.pipeline.maven.MavenArtifact; /** * @author Cyrille Le Clerc @@ -38,8 +37,8 @@ public String getShortDescription() { @Override public void print(TaskListener listener) { - listener.getLogger().println( - "Started from command line by " + ModelHyperlinkNote.encodeTo("/user/" + startedBy, startedBy) + " for maven artifacts " + getMavenArtifactsDescription()); + listener.getLogger() + .println("Started from command line by " + ModelHyperlinkNote.encodeTo("/user/" + startedBy, startedBy) + + " for maven artifacts " + getMavenArtifactsDescription()); } - } diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/cause/MavenDependencyUpstreamCause.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/cause/MavenDependencyUpstreamCause.java index 7692935c..42170a06 100644 --- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/cause/MavenDependencyUpstreamCause.java +++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/cause/MavenDependencyUpstreamCause.java @@ -1,13 +1,11 @@ package org.jenkinsci.plugins.pipeline.maven.cause; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; import hudson.console.ModelHyperlinkNote; import hudson.model.Cause; import hudson.model.Run; import hudson.model.TaskListener; -import org.jenkinsci.plugins.pipeline.maven.MavenArtifact; - -import edu.umd.cs.findbugs.annotations.NonNull; -import edu.umd.cs.findbugs.annotations.Nullable; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -15,6 +13,7 @@ import java.util.List; import java.util.Objects; import java.util.stream.Collectors; +import org.jenkinsci.plugins.pipeline.maven.MavenArtifact; public class MavenDependencyUpstreamCause extends Cause.UpstreamCause implements MavenDependencyCause { private List mavenArtifacts; @@ -26,15 +25,20 @@ public MavenDependencyUpstreamCause(Run up, @NonNull MavenArtifact... mave this.mavenArtifacts = Arrays.asList(mavenArtifact); } - public MavenDependencyUpstreamCause(Run up, @Nullable Collection mavenArtifacts, @Nullable Collection omittedPipelineFullNames) { + public MavenDependencyUpstreamCause( + Run up, + @Nullable Collection mavenArtifacts, + @Nullable Collection omittedPipelineFullNames) { super(up); this.mavenArtifacts = mavenArtifacts == null ? Collections.emptyList() : new ArrayList<>(mavenArtifacts); - this.omittedPipelineFullNames = omittedPipelineFullNames == null ? Collections.emptyList() : new ArrayList<>(omittedPipelineFullNames); + this.omittedPipelineFullNames = + omittedPipelineFullNames == null ? Collections.emptyList() : new ArrayList<>(omittedPipelineFullNames); } @Override public String getShortDescription() { - return "Started by upstream build \"" + getUpstreamProject() + "\" #" + getUpstreamBuild() + " generating Maven artifacts: " + getMavenArtifactsDescription(); + return "Started by upstream build \"" + getUpstreamProject() + "\" #" + getUpstreamBuild() + + " generating Maven artifacts: " + getMavenArtifactsDescription(); } /** @@ -72,12 +76,18 @@ private void print(TaskListener listener, int depth) { Run upstreamRun = getUpstreamRun(); if (upstreamRun == null) { - listener.getLogger().println("Started by upstream build " + ModelHyperlinkNote.encodeTo('/' + getUpstreamUrl(), getUpstreamProject()) + - "\" #" + ModelHyperlinkNote.encodeTo('/' + getUpstreamUrl() + getUpstreamBuild(), Integer.toString(getUpstreamBuild())) + - " generating Maven artifact: " + getMavenArtifactsDescription()); + listener.getLogger() + .println("Started by upstream build " + + ModelHyperlinkNote.encodeTo('/' + getUpstreamUrl(), getUpstreamProject()) + "\" #" + + ModelHyperlinkNote.encodeTo( + '/' + getUpstreamUrl() + getUpstreamBuild(), Integer.toString(getUpstreamBuild())) + + " generating Maven artifact: " + + getMavenArtifactsDescription()); } else { - listener.getLogger().println("Started by upstream build " + - ModelHyperlinkNote.encodeTo('/' + upstreamRun.getUrl(), upstreamRun.getFullDisplayName()) + " generating Maven artifacts: " + getMavenArtifactsDescription()); + listener.getLogger() + .println("Started by upstream build " + + ModelHyperlinkNote.encodeTo('/' + upstreamRun.getUrl(), upstreamRun.getFullDisplayName()) + + " generating Maven artifacts: " + getMavenArtifactsDescription()); } if (getUpstreamCauses() != null && !getUpstreamCauses().isEmpty()) { diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/cause/OtherMavenDependencyCause.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/cause/OtherMavenDependencyCause.java index 66ce7ca5..f00e6ebb 100644 --- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/cause/OtherMavenDependencyCause.java +++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/cause/OtherMavenDependencyCause.java @@ -1,17 +1,17 @@ package org.jenkinsci.plugins.pipeline.maven.cause; -import org.jenkinsci.plugins.pipeline.maven.MavenArtifact; - import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; import java.util.List; import java.util.Objects; +import org.jenkinsci.plugins.pipeline.maven.MavenArtifact; /** * @author Cyrille Le Clerc */ public class OtherMavenDependencyCause extends MavenDependencyAbstractCause { final String shortDescription; + public OtherMavenDependencyCause(@NonNull String shortDescription) { super(); this.shortDescription = Objects.requireNonNull(shortDescription); diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/cli/TriggerDownstreamPipelinesCommand.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/cli/TriggerDownstreamPipelinesCommand.java index 13ef9608..9c8f0af1 100644 --- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/cli/TriggerDownstreamPipelinesCommand.java +++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/cli/TriggerDownstreamPipelinesCommand.java @@ -2,6 +2,7 @@ import hudson.Extension; import hudson.cli.CLICommand; +import java.util.Collection; import jenkins.model.Jenkins; import org.jenkinsci.plugins.pipeline.maven.GlobalPipelineMavenConfig; import org.jenkinsci.plugins.pipeline.maven.cause.MavenDependencyCliCause; @@ -9,8 +10,6 @@ import org.jenkinsci.plugins.pipeline.maven.service.ServiceLoggerImpl; import org.kohsuke.args4j.Option; -import java.util.Collection; - /** * @author Cyrille Le Clerc */ @@ -18,12 +17,25 @@ public class TriggerDownstreamPipelinesCommand extends CLICommand { @Option(name = "--groupId", aliases = "-g", usage = "Group ID", required = true) public String groupId; + @Option(name = "--artifactId", aliases = "-a", usage = "Artifact ID", required = true) public String artifactId; - @Option(name = "--version", aliases = "-v", usage = "Artifact version (e.g. '1.0-SNAPSHOT' is just built locally or '1.0-20100529-1213' when a SNAPSHOT artifact is deployed to a Maven repository or '1.0' for a released version", required = true) + + @Option( + name = "--version", + aliases = "-v", + usage = + "Artifact version (e.g. '1.0-SNAPSHOT' is just built locally or '1.0-20100529-1213' when a SNAPSHOT artifact is deployed to a Maven repository or '1.0' for a released version", + required = true) public String version; - @Option(name = "--base-version", aliases = "-bv", usage = "Artifact base version (e.g. '1.0-SNAPSHOT'). The base version is different from the '--version' that provides the timestamped version number when uploading snapshots to Maven repository") + + @Option( + name = "--base-version", + aliases = "-bv", + usage = + "Artifact base version (e.g. '1.0-SNAPSHOT'). The base version is different from the '--version' that provides the timestamped version number when uploading snapshots to Maven repository") public String baseVersion; + @Option(name = "--type", aliases = "-t", usage = "Artifact type", required = true) public String type; @@ -32,16 +44,24 @@ public String getShortDescription() { return "Triggers the downstream pipelines of the given Maven artifact based on their Maven dependencies"; } - @Override protected int run() throws Exception { /* * @Inject does NOT work to inject GlobalPipelineMavenConfig in the TriggerDownstreamPipelinesCommand instance, use static code :-( */ - PipelineTriggerService pipelineTriggerService = GlobalPipelineMavenConfig.get().getPipelineTriggerService(); + PipelineTriggerService pipelineTriggerService = + GlobalPipelineMavenConfig.get().getPipelineTriggerService(); - MavenDependencyCliCause cause = new MavenDependencyCliCause(Jenkins.getAuthentication().getName()); - Collection triggeredPipelines = pipelineTriggerService.triggerDownstreamPipelines(groupId, artifactId, baseVersion, version, type, cause, new ServiceLoggerImpl(this.stdout, this.stderr, null)); + MavenDependencyCliCause cause = + new MavenDependencyCliCause(Jenkins.getAuthentication().getName()); + Collection triggeredPipelines = pipelineTriggerService.triggerDownstreamPipelines( + groupId, + artifactId, + baseVersion, + version, + type, + cause, + new ServiceLoggerImpl(this.stdout, this.stderr, null)); stdout.println(triggeredPipelines); return 0; } diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/console/MaskPasswordsConsoleLogFilter.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/console/MaskPasswordsConsoleLogFilter.java index d69a7228..cff4a94e 100644 --- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/console/MaskPasswordsConsoleLogFilter.java +++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/console/MaskPasswordsConsoleLogFilter.java @@ -4,12 +4,10 @@ import com.cloudbees.plugins.credentials.Credentials; import com.cloudbees.plugins.credentials.common.PasswordCredentials; import com.cloudbees.plugins.credentials.common.UsernamePasswordCredentials; - +import edu.umd.cs.findbugs.annotations.NonNull; import hudson.console.ConsoleLogFilter; import hudson.model.Run; import hudson.util.Secret; - -import edu.umd.cs.findbugs.annotations.NonNull; import java.io.IOException; import java.io.OutputStream; import java.io.Serializable; @@ -27,23 +25,26 @@ */ public class MaskPasswordsConsoleLogFilter extends ConsoleLogFilter implements Serializable { private static final long serialVersionUID = 1; - private final static Logger LOGGER = Logger.getLogger(MaskPasswordsConsoleLogFilter.class.getName()); + private static final Logger LOGGER = Logger.getLogger(MaskPasswordsConsoleLogFilter.class.getName()); private final Secret secretsAsRegexp; private final String charsetName; public MaskPasswordsConsoleLogFilter(@NonNull Collection secrets, @NonNull String charsetName) { - this.secretsAsRegexp = Secret.fromString(SecretPatterns.getAggregateSecretPattern(secrets).toString()); + this.secretsAsRegexp = Secret.fromString( + SecretPatterns.getAggregateSecretPattern(secrets).toString()); this.charsetName = charsetName; } @Override public OutputStream decorateLogger(Run build, final OutputStream logger) throws IOException, InterruptedException { - return new SecretPatterns.MaskingOutputStream(logger, () -> Pattern.compile(secretsAsRegexp.getPlainText()), charsetName); + return new SecretPatterns.MaskingOutputStream( + logger, () -> Pattern.compile(secretsAsRegexp.getPlainText()), charsetName); } @NonNull - public static MaskPasswordsConsoleLogFilter newMaskPasswordsConsoleLogFilter(@NonNull Iterable credentials, @NonNull Charset charset){ + public static MaskPasswordsConsoleLogFilter newMaskPasswordsConsoleLogFilter( + @NonNull Iterable credentials, @NonNull Charset charset) { Collection secrets = toString(credentials); return new MaskPasswordsConsoleLogFilter(secrets, charset.name()); } @@ -69,7 +70,9 @@ protected static Collection toString(@NonNull Iterable cred } // omit the private key, there } else { - LOGGER.log(Level.FINE, "Skip masking of unsupported credentials type {0}: {1}", new Object[]{creds.getClass(), creds.getDescriptor().getDisplayName()}); + LOGGER.log(Level.FINE, "Skip masking of unsupported credentials type {0}: {1}", new Object[] { + creds.getClass(), creds.getDescriptor().getDisplayName() + }); } } return result; diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/console/MavenColorizerConsoleLogFilter.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/console/MavenColorizerConsoleLogFilter.java index 4c8840a5..d1c5fe7f 100644 --- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/console/MavenColorizerConsoleLogFilter.java +++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/console/MavenColorizerConsoleLogFilter.java @@ -2,7 +2,6 @@ import hudson.console.ConsoleLogFilter; import hudson.model.Run; - import java.io.IOException; import java.io.OutputStream; import java.io.Serializable; @@ -28,8 +27,7 @@ private Object readResolve() { } @Override - public OutputStream decorateLogger(Run run, final OutputStream logger) - throws IOException, InterruptedException { + public OutputStream decorateLogger(Run run, final OutputStream logger) throws IOException, InterruptedException { return new MavenConsoleAnnotator(logger, Charset.forName(charset), notes); } } diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/console/MavenConsoleAnnotator.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/console/MavenConsoleAnnotator.java index ddf5d80f..98b3accf 100644 --- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/console/MavenConsoleAnnotator.java +++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/console/MavenConsoleAnnotator.java @@ -28,8 +28,6 @@ import hudson.tasks._maven.MavenErrorNote; import hudson.tasks._maven.MavenMojoNote; import hudson.tasks._maven.MavenWarningNote; -import jenkins.util.JenkinsJVM; - import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.OutputStream; @@ -37,6 +35,7 @@ import java.nio.charset.Charset; import java.util.regex.Matcher; import java.util.stream.Stream; +import jenkins.util.JenkinsJVM; // adapted from version in hudson.tasks._maven @@ -47,15 +46,17 @@ class MavenConsoleAnnotator extends LineTransformationOutputStream { static byte[][] createNotes() { JenkinsJVM.checkJenkinsJVM(); - return Stream.of(new MavenMojoNote(), new Maven3MojoNote(), new MavenWarningNote(), new MavenErrorNote()).map(note -> { - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - try { - note.encodeTo(baos); - } catch (IOException x) { // should be impossible - throw new RuntimeException(x); - } - return baos.toByteArray(); - }).toArray(byte[][]::new); + return Stream.of(new MavenMojoNote(), new Maven3MojoNote(), new MavenWarningNote(), new MavenErrorNote()) + .map(note -> { + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + try { + note.encodeTo(baos); + } catch (IOException x) { // should be impossible + throw new RuntimeException(x); + } + return baos.toByteArray(); + }) + .toArray(byte[][]::new); } private final OutputStream out; @@ -96,7 +97,7 @@ protected void eol(byte[] b, int len) throws IOException { out.write(notes[3]); } - out.write(b,0,len); + out.write(b, 0, len); } @Override @@ -109,5 +110,4 @@ public void close() throws IOException { super.close(); out.close(); } - } diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/listeners/AbstractWorkflowRunListener.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/listeners/AbstractWorkflowRunListener.java index b6ea8d22..1697bf19 100644 --- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/listeners/AbstractWorkflowRunListener.java +++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/listeners/AbstractWorkflowRunListener.java @@ -4,12 +4,11 @@ import static java.util.stream.StreamSupport.stream; import static org.jenkinsci.plugins.pipeline.maven.WithMavenStep.DescriptorImpl.FUNCTION_NAME; -import org.jenkinsci.plugins.workflow.flow.FlowExecutionOwner; -import org.jenkinsci.plugins.workflow.graphanalysis.DepthFirstScanner; - import hudson.model.Run; import hudson.model.TaskListener; import hudson.model.listeners.RunListener; +import org.jenkinsci.plugins.workflow.flow.FlowExecutionOwner; +import org.jenkinsci.plugins.workflow.graphanalysis.DepthFirstScanner; public abstract class AbstractWorkflowRunListener extends RunListener> { @@ -17,13 +16,16 @@ protected boolean shouldRun(Run run, TaskListener listener) { if (!(run instanceof FlowExecutionOwner.Executable)) { return false; } - + return ofNullable(((FlowExecutionOwner.Executable) run).asFlowExecutionOwner()) .map(owner -> { try { return owner.get(); } catch (Exception ex) { - listener.getLogger().println("[withMaven] downstreamPipelineTriggerRunListener - Failure to introspect build steps: " + ex.toString()); + listener.getLogger() + .println( + "[withMaven] downstreamPipelineTriggerRunListener - Failure to introspect build steps: " + + ex.toString()); return null; } }) @@ -33,7 +35,8 @@ protected boolean shouldRun(Run run, TaskListener listener) { }) .map(scanner -> scanner.spliterator()) .map(iterator -> stream(iterator, false)) - .flatMap(stream -> stream.filter(n -> FUNCTION_NAME.equals(n.getDisplayFunctionName())).findAny()) + .flatMap(stream -> stream.filter(n -> FUNCTION_NAME.equals(n.getDisplayFunctionName())) + .findAny()) .isPresent(); } } diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/listeners/DaoHelper.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/listeners/DaoHelper.java index 9f9a93bf..d9c4d5ae 100644 --- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/listeners/DaoHelper.java +++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/listeners/DaoHelper.java @@ -2,6 +2,9 @@ import static org.jenkinsci.plugins.pipeline.maven.dao.MonitoringPipelineMavenPluginDaoDecorator.registerCacheStatsSupplier; +import edu.umd.cs.findbugs.annotations.NonNull; +import hudson.model.Item; +import hudson.model.Run; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -9,19 +12,13 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.logging.Level; import java.util.logging.Logger; - -import edu.umd.cs.findbugs.annotations.NonNull; - import org.jenkinsci.plugins.pipeline.maven.GlobalPipelineMavenConfig; import org.jenkinsci.plugins.pipeline.maven.MavenArtifact; import org.jenkinsci.plugins.pipeline.maven.dao.CacheStats; -import hudson.model.Item; -import hudson.model.Run; - public class DaoHelper { - private final static Logger LOGGER = Logger.getLogger(DownstreamPipelineTriggerRunListener.class.getName()); + private static final Logger LOGGER = Logger.getLogger(DownstreamPipelineTriggerRunListener.class.getName()); private static final AtomicInteger GET_GENERATED_ARTIFACTS_HITS = new AtomicInteger(); private static final AtomicInteger GET_GENERATED_ARTIFACTS_MISSES = new AtomicInteger(); @@ -30,8 +27,10 @@ public class DaoHelper { private static final AtomicInteger LIST_DOWNSTREAM_JOBS_MISSES = new AtomicInteger(); static { - registerCacheStatsSupplier(() -> new CacheStats("getGeneratedArtifacts", GET_GENERATED_ARTIFACTS_HITS.get(), GET_GENERATED_ARTIFACTS_MISSES.get())); - registerCacheStatsSupplier(() -> new CacheStats("listDownstreamJobsByArtifact", LIST_DOWNSTREAM_JOBS_HITS.get(), LIST_DOWNSTREAM_JOBS_MISSES.get())); + registerCacheStatsSupplier(() -> new CacheStats( + "getGeneratedArtifacts", GET_GENERATED_ARTIFACTS_HITS.get(), GET_GENERATED_ARTIFACTS_MISSES.get())); + registerCacheStatsSupplier(() -> new CacheStats( + "listDownstreamJobsByArtifact", LIST_DOWNSTREAM_JOBS_HITS.get(), LIST_DOWNSTREAM_JOBS_MISSES.get())); } private GlobalPipelineMavenConfig globalPipelineMavenConfig; @@ -57,17 +56,21 @@ public DaoHelper(GlobalPipelineMavenConfig globalPipelineMavenConfig) { List getGeneratedArtifacts(@NonNull String jobFullName, int buildNumber) { String key = jobFullName + '#' + buildNumber; - LOGGER.log(Level.FINER, "calling getGeneratedArtifacts {0} {1}, cache size: {2}", - new Object[] { jobFullName, buildNumber, generatedArtifactsCache.size() }); + LOGGER.log(Level.FINER, "calling getGeneratedArtifacts {0} {1}, cache size: {2}", new Object[] { + jobFullName, buildNumber, generatedArtifactsCache.size() + }); if (generatedArtifactsCache.containsKey(key)) { - LOGGER.log(Level.FINER, "cache hit for getGeneratedArtifacts {0} {1}", new Object[] { jobFullName, buildNumber }); + LOGGER.log( + Level.FINER, "cache hit for getGeneratedArtifacts {0} {1}", new Object[] {jobFullName, buildNumber + }); GET_GENERATED_ARTIFACTS_HITS.incrementAndGet(); } else { GET_GENERATED_ARTIFACTS_MISSES.incrementAndGet(); } - return generatedArtifactsCache.computeIfAbsent(key, k -> globalPipelineMavenConfig.getDao().getGeneratedArtifacts(jobFullName, buildNumber)); + return generatedArtifactsCache.computeIfAbsent( + key, k -> globalPipelineMavenConfig.getDao().getGeneratedArtifacts(jobFullName, buildNumber)); } /** @@ -87,11 +90,14 @@ List getGeneratedArtifacts(@NonNull String jobFullName, int build Map> listDownstreamJobsByArtifact(String jobFullName, int buildNumber) { String key = jobFullName + '#' + buildNumber; if (downstreamJobsByArtifact.containsKey(key)) { - LOGGER.log(Level.FINER, "cache hit for listDownstreamJobsByArtifact {0} {1}", new Object[] { jobFullName, buildNumber }); + LOGGER.log(Level.FINER, "cache hit for listDownstreamJobsByArtifact {0} {1}", new Object[] { + jobFullName, buildNumber + }); LIST_DOWNSTREAM_JOBS_HITS.incrementAndGet(); } else { LIST_DOWNSTREAM_JOBS_MISSES.incrementAndGet(); } - return downstreamJobsByArtifact.computeIfAbsent(key, k -> globalPipelineMavenConfig.getDao().listDownstreamJobsByArtifact(jobFullName, buildNumber)); + return downstreamJobsByArtifact.computeIfAbsent( + key, k -> globalPipelineMavenConfig.getDao().listDownstreamJobsByArtifact(jobFullName, buildNumber)); } } diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/listeners/DatabaseSyncItemListener.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/listeners/DatabaseSyncItemListener.java index 7af24b2e..110ae6f9 100644 --- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/listeners/DatabaseSyncItemListener.java +++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/listeners/DatabaseSyncItemListener.java @@ -4,14 +4,13 @@ import hudson.model.Item; import hudson.model.ItemGroup; import hudson.model.listeners.ItemListener; +import java.util.logging.Level; +import java.util.logging.Logger; +import javax.inject.Inject; import jenkins.model.Jenkins; import org.jenkinsci.plugins.pipeline.maven.GlobalPipelineMavenConfig; import org.jenkinsci.plugins.workflow.flow.BlockableResume; -import javax.inject.Inject; -import java.util.logging.Level; -import java.util.logging.Logger; - /** * Maintains the database in sync with the jobs and builds. * @@ -19,7 +18,7 @@ */ @Extension public class DatabaseSyncItemListener extends ItemListener { - private final static Logger LOGGER = Logger.getLogger(DatabaseSyncItemListener.class.getName()); + private static final Logger LOGGER = Logger.getLogger(DatabaseSyncItemListener.class.getName()); @Inject public GlobalPipelineMavenConfig globalPipelineMavenConfig; @@ -30,14 +29,14 @@ public void onDeleted(Item item) { LOGGER.log(Level.FINE, "onDeleted({0})", item); globalPipelineMavenConfig.getDao().deleteJob(item.getFullName()); } else { - LOGGER.log(Level.FINE, "Ignore onDeleted({0})", new Object[]{item}); + LOGGER.log(Level.FINE, "Ignore onDeleted({0})", new Object[] {item}); } } @Override public void onRenamed(Item item, String oldName, String newName) { if (item instanceof BlockableResume) { - LOGGER.log(Level.FINE, "onRenamed({0}, {1}, {2})", new Object[]{item, oldName, newName}); + LOGGER.log(Level.FINE, "onRenamed({0}, {1}, {2})", new Object[] {item, oldName, newName}); String oldFullName; ItemGroup parent = item.getParent(); @@ -49,18 +48,19 @@ public void onRenamed(Item item, String oldName, String newName) { String newFullName = item.getFullName(); globalPipelineMavenConfig.getDao().renameJob(oldFullName, newFullName); } else { - LOGGER.log(Level.FINE, "Ignore onRenamed({0}, {1}, {2})", new Object[]{item, oldName, newName}); + LOGGER.log(Level.FINE, "Ignore onRenamed({0}, {1}, {2})", new Object[] {item, oldName, newName}); } } @Override public void onLocationChanged(Item item, String oldFullName, String newFullName) { if (item instanceof BlockableResume) { - LOGGER.log(Level.FINE, "onLocationChanged({0}, {1}, {2})", new Object[]{item, oldFullName, newFullName}); + LOGGER.log(Level.FINE, "onLocationChanged({0}, {1}, {2})", new Object[] {item, oldFullName, newFullName}); globalPipelineMavenConfig.getDao().renameJob(oldFullName, newFullName); } else { - LOGGER.log(Level.FINE, "Ignore onLocationChanged({0}, {1}, {2})", new Object[]{item, oldFullName, newFullName}); + LOGGER.log( + Level.FINE, "Ignore onLocationChanged({0}, {1}, {2})", new Object[] {item, oldFullName, newFullName + }); } } - } diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/listeners/DatabaseSyncRunListener.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/listeners/DatabaseSyncRunListener.java index a45846f2..6a07276c 100644 --- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/listeners/DatabaseSyncRunListener.java +++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/listeners/DatabaseSyncRunListener.java @@ -1,14 +1,13 @@ package org.jenkinsci.plugins.pipeline.maven.listeners; +import edu.umd.cs.findbugs.annotations.NonNull; import hudson.Extension; import hudson.model.Cause; import hudson.model.Result; import hudson.model.Run; import hudson.model.TaskListener; -import org.jenkinsci.plugins.pipeline.maven.GlobalPipelineMavenConfig; - -import edu.umd.cs.findbugs.annotations.NonNull; import javax.inject.Inject; +import org.jenkinsci.plugins.pipeline.maven.GlobalPipelineMavenConfig; /** * @author Cyrille Le Clerc @@ -28,16 +27,21 @@ public void onDeleted(Run run) { public void onInitialize(Run run) { super.onInitialize(run); - for (Cause cause: run.getCauses()) { + for (Cause cause : run.getCauses()) { if (cause instanceof Cause.UpstreamCause) { Cause.UpstreamCause upstreamCause = (Cause.UpstreamCause) cause; String upstreamJobName = upstreamCause.getUpstreamProject(); int upstreamBuildNumber = upstreamCause.getUpstreamBuild(); - globalPipelineMavenConfig.getDao().recordBuildUpstreamCause(upstreamJobName, upstreamBuildNumber, run.getParent().getFullName(), run.getNumber()); + globalPipelineMavenConfig + .getDao() + .recordBuildUpstreamCause( + upstreamJobName, + upstreamBuildNumber, + run.getParent().getFullName(), + run.getNumber()); } } - } /* @@ -56,11 +60,15 @@ public void onCompleted(Run workflowRun, @NonNull TaskListener listener) { if (result == null) { result = Result.SUCCESS; // FIXME more elegant handling } - globalPipelineMavenConfig.getDao().updateBuildOnCompletion( - workflowRun.getParent().getFullName(), - workflowRun.getNumber(), - result.ordinal, - workflowRun.getStartTimeInMillis(), - Math.max(System.currentTimeMillis() - workflowRun.getStartTimeInMillis(), 0)); // @see HUDSON-5844 + globalPipelineMavenConfig + .getDao() + .updateBuildOnCompletion( + workflowRun.getParent().getFullName(), + workflowRun.getNumber(), + result.ordinal, + workflowRun.getStartTimeInMillis(), + Math.max( + System.currentTimeMillis() - workflowRun.getStartTimeInMillis(), + 0)); // @see HUDSON-5844 } } diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/listeners/DownstreamPipelineTriggerRunListener.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/listeners/DownstreamPipelineTriggerRunListener.java index 4292b723..9e2ec145 100644 --- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/listeners/DownstreamPipelineTriggerRunListener.java +++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/listeners/DownstreamPipelineTriggerRunListener.java @@ -1,29 +1,15 @@ package org.jenkinsci.plugins.pipeline.maven.listeners; +import edu.umd.cs.findbugs.annotations.NonNull; import hudson.Extension; import hudson.console.ModelHyperlinkNote; import hudson.model.Cause; import hudson.model.CauseAction; import hudson.model.Job; import hudson.model.Queue; +import hudson.model.Queue.Task; import hudson.model.Run; import hudson.model.TaskListener; -import hudson.model.Queue.Task; -import jenkins.model.Jenkins; -import jenkins.model.ParameterizedJobMixIn; -import org.jenkinsci.plugins.pipeline.maven.GlobalPipelineMavenConfig; -import org.jenkinsci.plugins.pipeline.maven.MavenArtifact; -import org.jenkinsci.plugins.pipeline.maven.cause.MavenDependencyAbstractCause; -import org.jenkinsci.plugins.pipeline.maven.cause.MavenDependencyCause; -import org.jenkinsci.plugins.pipeline.maven.cause.MavenDependencyCauseHelper; -import org.jenkinsci.plugins.pipeline.maven.cause.MavenDependencyUpstreamCause; -import org.jenkinsci.plugins.pipeline.maven.cause.OtherMavenDependencyCause; -import org.jenkinsci.plugins.pipeline.maven.dao.UpstreamMemory; -import org.jenkinsci.plugins.pipeline.maven.trigger.WorkflowJobDependencyTrigger; - -import edu.umd.cs.findbugs.annotations.NonNull; -import javax.inject.Inject; - import java.io.IOException; import java.util.ArrayList; import java.util.Collections; @@ -39,6 +25,18 @@ import java.util.logging.Level; import java.util.logging.Logger; import java.util.stream.Collectors; +import javax.inject.Inject; +import jenkins.model.Jenkins; +import jenkins.model.ParameterizedJobMixIn; +import org.jenkinsci.plugins.pipeline.maven.GlobalPipelineMavenConfig; +import org.jenkinsci.plugins.pipeline.maven.MavenArtifact; +import org.jenkinsci.plugins.pipeline.maven.cause.MavenDependencyAbstractCause; +import org.jenkinsci.plugins.pipeline.maven.cause.MavenDependencyCause; +import org.jenkinsci.plugins.pipeline.maven.cause.MavenDependencyCauseHelper; +import org.jenkinsci.plugins.pipeline.maven.cause.MavenDependencyUpstreamCause; +import org.jenkinsci.plugins.pipeline.maven.cause.OtherMavenDependencyCause; +import org.jenkinsci.plugins.pipeline.maven.dao.UpstreamMemory; +import org.jenkinsci.plugins.pipeline.maven.trigger.WorkflowJobDependencyTrigger; /** * Trigger downstream pipelines. @@ -48,81 +46,115 @@ @Extension public class DownstreamPipelineTriggerRunListener extends AbstractWorkflowRunListener { - private final static Logger LOGGER = Logger.getLogger(DownstreamPipelineTriggerRunListener.class.getName()); + private static final Logger LOGGER = Logger.getLogger(DownstreamPipelineTriggerRunListener.class.getName()); @Inject public GlobalPipelineMavenConfig globalPipelineMavenConfig; @Override public void onCompleted(Run upstreamBuild, @NonNull TaskListener listener) { - LOGGER.log(Level.FINER, "onCompleted({0})", new Object[]{upstreamBuild}); + LOGGER.log(Level.FINER, "onCompleted({0})", new Object[] {upstreamBuild}); UpstreamMemory upstreamMemory = new UpstreamMemory(); DaoHelper daoHelper = new DaoHelper(globalPipelineMavenConfig); if (!shouldRun(upstreamBuild, listener)) { - LOGGER.log(Level.FINE, "Skipping downstream pipeline triggering for {0} as withMaven step not found.", - new Object[]{upstreamBuild}); + LOGGER.log( + Level.FINE, + "Skipping downstream pipeline triggering for {0} as withMaven step not found.", + new Object[] {upstreamBuild}); return; } long startTimeInNanos = System.nanoTime(); - if(LOGGER.isLoggable(Level.FINER)) { + if (LOGGER.isLoggable(Level.FINER)) { listener.getLogger().println("[withMaven] pipelineGraphPublisher - triggerDownstreamPipelines"); } - if (!globalPipelineMavenConfig.getTriggerDownstreamBuildsResultsCriteria().contains(upstreamBuild.getResult())) { - Map> omittedPipelineFullNamesAndCauses = new HashMap<>(); - for (Cause cause: upstreamBuild.getCauses()) { + if (!globalPipelineMavenConfig + .getTriggerDownstreamBuildsResultsCriteria() + .contains(upstreamBuild.getResult())) { + Map> omittedPipelineFullNamesAndCauses = new HashMap<>(); + for (Cause cause : upstreamBuild.getCauses()) { if (cause instanceof MavenDependencyCause) { MavenDependencyCause mavenDependencyCause = (MavenDependencyCause) cause; - for (String omittedPipelineFullName: mavenDependencyCause.getOmittedPipelineFullNames()) { - omittedPipelineFullNamesAndCauses.computeIfAbsent(omittedPipelineFullName, p-> new ArrayList<>()).add(mavenDependencyCause); + for (String omittedPipelineFullName : mavenDependencyCause.getOmittedPipelineFullNames()) { + omittedPipelineFullNamesAndCauses + .computeIfAbsent(omittedPipelineFullName, p -> new ArrayList<>()) + .add(mavenDependencyCause); } } } if (omittedPipelineFullNamesAndCauses.isEmpty()) { if (LOGGER.isLoggable(Level.FINER)) { - listener.getLogger().println("[withMaven] Skip triggering downstream jobs for upstream build with ignored result status " + upstreamBuild + ": " + upstreamBuild.getResult()); + listener.getLogger() + .println( + "[withMaven] Skip triggering downstream jobs for upstream build with ignored result status " + + upstreamBuild + ": " + upstreamBuild.getResult()); } } else { - for (Map.Entry> entry: omittedPipelineFullNamesAndCauses.entrySet()) { + for (Map.Entry> entry : + omittedPipelineFullNamesAndCauses.entrySet()) { Job omittedPipeline = Jenkins.get().getItemByFullName(entry.getKey(), Job.class); if (omittedPipeline == null) { - listener.getLogger().println("[withMaven] downstreamPipelineTriggerRunListener - Illegal state: " + entry.getKey() + " not resolved"); + listener.getLogger() + .println("[withMaven] downstreamPipelineTriggerRunListener - Illegal state: " + + entry.getKey() + " not resolved"); continue; } List omittedPipelineTriggerCauses = new ArrayList<>(); - for (MavenDependencyCause cause: entry.getValue()) { + for (MavenDependencyCause cause : entry.getValue()) { if (cause instanceof MavenDependencyUpstreamCause) { - MavenDependencyUpstreamCause mavenDependencyUpstreamCause = (MavenDependencyUpstreamCause) cause; - Run upstreamRun = mavenDependencyUpstreamCause.getUpstreamRun() == null ? upstreamBuild: mavenDependencyUpstreamCause.getUpstreamRun(); - omittedPipelineTriggerCauses.add(new MavenDependencyUpstreamCause(upstreamRun, mavenDependencyUpstreamCause.getMavenArtifacts(), Collections.emptyList())); + MavenDependencyUpstreamCause mavenDependencyUpstreamCause = + (MavenDependencyUpstreamCause) cause; + Run upstreamRun = mavenDependencyUpstreamCause.getUpstreamRun() == null + ? upstreamBuild + : mavenDependencyUpstreamCause.getUpstreamRun(); + omittedPipelineTriggerCauses.add(new MavenDependencyUpstreamCause( + upstreamRun, + mavenDependencyUpstreamCause.getMavenArtifacts(), + Collections.emptyList())); } else if (cause instanceof MavenDependencyAbstractCause) { try { - MavenDependencyCause mavenDependencyCause = ((MavenDependencyAbstractCause)cause).clone(); + MavenDependencyCause mavenDependencyCause = + ((MavenDependencyAbstractCause) cause).clone(); mavenDependencyCause.setOmittedPipelineFullNames(Collections.emptyList()); omittedPipelineTriggerCauses.add((Cause) mavenDependencyCause); } catch (CloneNotSupportedException e) { - listener.getLogger().println("[withMaven] downstreamPipelineTriggerRunListener - Failure to clone pipeline cause " + cause + " : " + e); - omittedPipelineTriggerCauses.add(new OtherMavenDependencyCause(((MavenDependencyAbstractCause) cause).getShortDescription())); + listener.getLogger() + .println( + "[withMaven] downstreamPipelineTriggerRunListener - Failure to clone pipeline cause " + + cause + " : " + e); + omittedPipelineTriggerCauses.add(new OtherMavenDependencyCause( + ((MavenDependencyAbstractCause) cause).getShortDescription())); } } else { - omittedPipelineTriggerCauses.add(new OtherMavenDependencyCause(((MavenDependencyAbstractCause) cause).getShortDescription())); + omittedPipelineTriggerCauses.add(new OtherMavenDependencyCause( + ((MavenDependencyAbstractCause) cause).getShortDescription())); } } // TODO deduplicate pipeline triggers // See jenkins.triggers.ReverseBuildTrigger.RunListenerImpl.onCompleted(Run, TaskListener) - Queue.Item queuedItem = ParameterizedJobMixIn.scheduleBuild2(omittedPipeline, -1, new CauseAction(omittedPipelineTriggerCauses)); + Queue.Item queuedItem = ParameterizedJobMixIn.scheduleBuild2( + omittedPipeline, -1, new CauseAction(omittedPipelineTriggerCauses)); if (queuedItem == null) { - listener.getLogger().println("[withMaven] downstreamPipelineTriggerRunListener - Failure to trigger omitted pipeline " + ModelHyperlinkNote.encodeTo(omittedPipeline) + " due to causes " + - omittedPipelineTriggerCauses + ", invocation rejected."); + listener.getLogger() + .println( + "[withMaven] downstreamPipelineTriggerRunListener - Failure to trigger omitted pipeline " + + ModelHyperlinkNote.encodeTo(omittedPipeline) + " due to causes " + + omittedPipelineTriggerCauses + ", invocation rejected."); } else { - listener.getLogger().println("[withMaven] downstreamPipelineTriggerRunListener - Triggering downstream pipeline " + ModelHyperlinkNote.encodeTo(omittedPipeline) + " despite build result " + - upstreamBuild.getResult() + " for the upstream causes: " + omittedPipelineTriggerCauses.stream().map( - Cause::getShortDescription).collect(Collectors.joining(", "))); + listener.getLogger() + .println( + "[withMaven] downstreamPipelineTriggerRunListener - Triggering downstream pipeline " + + ModelHyperlinkNote.encodeTo(omittedPipeline) + + " despite build result " + upstreamBuild.getResult() + + " for the upstream causes: " + + omittedPipelineTriggerCauses.stream() + .map(Cause::getShortDescription) + .collect(Collectors.joining(", "))); } } } @@ -130,9 +162,14 @@ public void onCompleted(Run upstreamBuild, @NonNull TaskListener listener) } try { - this.globalPipelineMavenConfig.getPipelineTriggerService().checkNoInfiniteLoopOfUpstreamCause(upstreamBuild); + this.globalPipelineMavenConfig + .getPipelineTriggerService() + .checkNoInfiniteLoopOfUpstreamCause(upstreamBuild); } catch (IllegalStateException e) { - listener.getLogger().println("[withMaven] WARNING abort infinite build trigger loop. Please consider opening a Jira issue: " + e.getMessage()); + listener.getLogger() + .println( + "[withMaven] WARNING abort infinite build trigger loop. Please consider opening a Jira issue: " + + e.getMessage()); return; } @@ -140,15 +177,19 @@ public void onCompleted(Run upstreamBuild, @NonNull TaskListener listener) String upstreamPipelineFullName = upstreamPipeline.getFullName(); int upstreamBuildNumber = upstreamBuild.getNumber(); - Map> downstreamPipelinesByArtifact = globalPipelineMavenConfig.getDao().listDownstreamJobsByArtifact(upstreamPipelineFullName, upstreamBuildNumber); - LOGGER.log(Level.FINER, "got downstreamPipelinesByArtifact for project {0} and build #{1}: {2}", new Object[]{upstreamPipelineFullName, upstreamBuildNumber, downstreamPipelinesByArtifact}); + Map> downstreamPipelinesByArtifact = globalPipelineMavenConfig + .getDao() + .listDownstreamJobsByArtifact(upstreamPipelineFullName, upstreamBuildNumber); + LOGGER.log(Level.FINER, "got downstreamPipelinesByArtifact for project {0} and build #{1}: {2}", new Object[] { + upstreamPipelineFullName, upstreamBuildNumber, downstreamPipelinesByArtifact + }); Map> jobsToTrigger = new TreeMap<>(); - Map> omittedPipelineTriggersByPipelineFullname = new HashMap<>(); + Map> omittedPipelineTriggersByPipelineFullname = new HashMap<>(); List rejectedPipelines = new ArrayList<>(); // build the list of pipelines to trigger - for (Map.Entry> entry: downstreamPipelinesByArtifact.entrySet()) { + for (Map.Entry> entry : downstreamPipelinesByArtifact.entrySet()) { MavenArtifact mavenArtifact = entry.getKey(); SortedSet downstreamPipelines = entry.getValue(); @@ -158,13 +199,23 @@ public void onCompleted(Run upstreamBuild, @NonNull TaskListener listener) if (jobsToTrigger.containsKey(downstreamPipelineFullName)) { // downstream pipeline has already been added to the list of pipelines to trigger, - // we have already verified that it's meeting requirements (not an infinite loop, authorized by security, not excessive triggering, buildable...) + // we have already verified that it's meeting requirements (not an infinite loop, authorized by + // security, not excessive triggering, buildable...) if (LOGGER.isLoggable(Level.FINEST)) { - listener.getLogger().println("[withMaven] downstreamPipelineTriggerRunListener - Skip eligibility check of pipeline " + downstreamPipelineFullName + " for artifact " + mavenArtifact.getShortDescription() + ", eligibility already confirmed"); + listener.getLogger() + .println( + "[withMaven] downstreamPipelineTriggerRunListener - Skip eligibility check of pipeline " + + downstreamPipelineFullName + " for artifact " + + mavenArtifact.getShortDescription() + + ", eligibility already confirmed"); } Set mavenArtifacts = jobsToTrigger.get(downstreamPipelineFullName); if (mavenArtifacts == null) { - listener.getLogger().println("[withMaven] downstreamPipelineTriggerRunListener - Invalid state, no artifacts found for pipeline '" + downstreamPipelineFullName + "' while evaluating " + mavenArtifact.getShortDescription()); + listener.getLogger() + .println( + "[withMaven] downstreamPipelineTriggerRunListener - Invalid state, no artifacts found for pipeline '" + + downstreamPipelineFullName + "' while evaluating " + + mavenArtifact.getShortDescription()); } else { mavenArtifacts.add(mavenArtifact); } @@ -178,43 +229,71 @@ public void onCompleted(Run upstreamBuild, @NonNull TaskListener listener) } if (rejectedPipelines.contains(downstreamPipelineFullName)) { - LOGGER.log(Level.FINE, "Downstream pipeline {0} already checked", - new Object[]{downstreamPipelineFullName}); + LOGGER.log(Level.FINE, "Downstream pipeline {0} already checked", new Object[] { + downstreamPipelineFullName + }); continue; } - final Job downstreamPipeline = Jenkins.get().getItemByFullName(downstreamPipelineFullName, Job.class); + final Job downstreamPipeline = + Jenkins.get().getItemByFullName(downstreamPipelineFullName, Job.class); if (downstreamPipeline == null || downstreamPipeline.getLastBuild() == null) { - LOGGER.log(Level.FINE, "Downstream pipeline {0} or downstream pipeline last build not found from upstream build {1}. Database synchronization issue or security restriction?", - new Object[]{downstreamPipelineFullName, upstreamBuild.getFullDisplayName(), Jenkins.getAuthentication()}); + LOGGER.log( + Level.FINE, + "Downstream pipeline {0} or downstream pipeline last build not found from upstream build {1}. Database synchronization issue or security restriction?", + new Object[] { + downstreamPipelineFullName, + upstreamBuild.getFullDisplayName(), + Jenkins.getAuthentication() + }); rejectedPipelines.add(downstreamPipelineFullName); continue; } int downstreamBuildNumber = downstreamPipeline.getLastBuild().getNumber(); - List downstreamPipelineGeneratedArtifacts = daoHelper.getGeneratedArtifacts(downstreamPipelineFullName, downstreamBuildNumber); + List downstreamPipelineGeneratedArtifacts = + daoHelper.getGeneratedArtifacts(downstreamPipelineFullName, downstreamBuildNumber); if (LOGGER.isLoggable(Level.FINEST)) { - listener.getLogger().println("[withMaven] downstreamPipelineTriggerRunListener - Pipeline " + ModelHyperlinkNote.encodeTo(downstreamPipeline) + " evaluated for because it has a dependency on " + mavenArtifact + " generates " + downstreamPipelineGeneratedArtifacts); - } + listener.getLogger() + .println("[withMaven] downstreamPipelineTriggerRunListener - Pipeline " + + ModelHyperlinkNote.encodeTo(downstreamPipeline) + + " evaluated for because it has a dependency on " + mavenArtifact + " generates " + + downstreamPipelineGeneratedArtifacts); + } for (MavenArtifact downstreamPipelineGeneratedArtifact : downstreamPipelineGeneratedArtifacts) { - if (Objects.equals(mavenArtifact.getGroupId(), downstreamPipelineGeneratedArtifact.getGroupId()) && - Objects.equals(mavenArtifact.getArtifactId(), downstreamPipelineGeneratedArtifact.getArtifactId())) { + if (Objects.equals(mavenArtifact.getGroupId(), downstreamPipelineGeneratedArtifact.getGroupId()) + && Objects.equals( + mavenArtifact.getArtifactId(), + downstreamPipelineGeneratedArtifact.getArtifactId())) { if (LOGGER.isLoggable(Level.FINE)) { - listener.getLogger().println("[withMaven] downstreamPipelineTriggerRunListener - Skip triggering " + ModelHyperlinkNote.encodeTo(downstreamPipeline) + " for " + mavenArtifact + " because it generates artifact with same groupId:artifactId " + downstreamPipelineGeneratedArtifact); + listener.getLogger() + .println("[withMaven] downstreamPipelineTriggerRunListener - Skip triggering " + + ModelHyperlinkNote.encodeTo(downstreamPipeline) + " for " + mavenArtifact + + " because it generates artifact with same groupId:artifactId " + + downstreamPipelineGeneratedArtifact); } continue downstreamPipelinesLoop; } } - Map> downstreamDownstreamPipelinesByArtifact = daoHelper.listDownstreamJobsByArtifact(downstreamPipelineFullName, downstreamBuildNumber); - for (Map.Entry> entry2 : downstreamDownstreamPipelinesByArtifact.entrySet()) { + Map> downstreamDownstreamPipelinesByArtifact = + daoHelper.listDownstreamJobsByArtifact(downstreamPipelineFullName, downstreamBuildNumber); + for (Map.Entry> entry2 : + downstreamDownstreamPipelinesByArtifact.entrySet()) { SortedSet downstreamDownstreamPipelines = entry2.getValue(); if (downstreamDownstreamPipelines.contains(upstreamPipelineFullName)) { - listener.getLogger().println("[withMaven] downstreamPipelineTriggerRunListener - Infinite loop detected: skip triggering " + ModelHyperlinkNote.encodeTo(downstreamPipeline) + " " + - " (dependency: " + mavenArtifact.getShortDescription() + ") because it is itself triggering this pipeline " + - ModelHyperlinkNote.encodeTo(upstreamPipeline) + " (dependency: " + entry2.getKey().getShortDescription() + ")"); + listener.getLogger() + .println( + "[withMaven] downstreamPipelineTriggerRunListener - Infinite loop detected: skip triggering " + + ModelHyperlinkNote.encodeTo(downstreamPipeline) + " " + + " (dependency: " + + mavenArtifact.getShortDescription() + + ") because it is itself triggering this pipeline " + + ModelHyperlinkNote.encodeTo(upstreamPipeline) + + " (dependency: " + + entry2.getKey().getShortDescription() + ")"); // prevent infinite loop continue downstreamPipelinesLoop; } @@ -222,22 +301,36 @@ public void onCompleted(Run upstreamBuild, @NonNull TaskListener listener) // Avoid excessive triggering // See #46313 - Map transitiveUpstreamPipelines = globalPipelineMavenConfig.getDao().listTransitiveUpstreamJobs(downstreamPipelineFullName, downstreamBuildNumber, upstreamMemory); + Map transitiveUpstreamPipelines = globalPipelineMavenConfig + .getDao() + .listTransitiveUpstreamJobs(downstreamPipelineFullName, downstreamBuildNumber, upstreamMemory); if (LOGGER.isLoggable(Level.FINER)) { - listener.getLogger().println("[withMaven] downstreamPipelineTriggerRunListener - Found transitive upstream pipelines for " + ModelHyperlinkNote.encodeTo(downstreamPipeline) + - ": " + transitiveUpstreamPipelines.keySet().stream().collect(Collectors.joining(","))); + listener.getLogger() + .println( + "[withMaven] downstreamPipelineTriggerRunListener - Found transitive upstream pipelines for " + + ModelHyperlinkNote.encodeTo(downstreamPipeline) + ": " + + transitiveUpstreamPipelines.keySet().stream() + .collect(Collectors.joining(","))); } - // If a job is running in this moment we get an empty list. We use the last successful build in this case + // If a job is running in this moment we get an empty list. We use the last successful build in this + // case if (transitiveUpstreamPipelines != null && transitiveUpstreamPipelines.isEmpty()) { Job job = Jenkins.get().getItemByFullName(downstreamPipelineFullName, Job.class); if (job != null) { Run lastSuccessfulBuild = job.getLastSuccessfulBuild(); if (lastSuccessfulBuild != null) { - transitiveUpstreamPipelines = globalPipelineMavenConfig.getDao().listTransitiveUpstreamJobs(downstreamPipelineFullName, lastSuccessfulBuild.number, upstreamMemory); + transitiveUpstreamPipelines = globalPipelineMavenConfig + .getDao() + .listTransitiveUpstreamJobs( + downstreamPipelineFullName, lastSuccessfulBuild.number, upstreamMemory); if (LOGGER.isLoggable(Level.FINER)) { - listener.getLogger().println("[withMaven] downstreamPipelineTriggerRunListener - Updated transitive upstream pipelines for " + ModelHyperlinkNote.encodeTo(downstreamPipeline) + - ": " + transitiveUpstreamPipelines.keySet().stream().collect(Collectors.joining(","))); + listener.getLogger() + .println( + "[withMaven] downstreamPipelineTriggerRunListener - Updated transitive upstream pipelines for " + + ModelHyperlinkNote.encodeTo(downstreamPipeline) + ": " + + transitiveUpstreamPipelines.keySet().stream() + .collect(Collectors.joining(","))); } } } @@ -246,128 +339,195 @@ public void onCompleted(Run upstreamBuild, @NonNull TaskListener listener) for (String transitiveUpstreamPipelineName : transitiveUpstreamPipelines.keySet()) { // Skip if one of the downstream's upstream is already building or in queue // Then it will get triggered anyway by that upstream, we don't need to trigger it again - Job transitiveUpstreamPipeline = Jenkins.get().getItemByFullName(transitiveUpstreamPipelineName, Job.class); + Job transitiveUpstreamPipeline = + Jenkins.get().getItemByFullName(transitiveUpstreamPipelineName, Job.class); if (transitiveUpstreamPipeline == null) { // security: not allowed to view this transitive upstream pipeline, continue to loop if (LOGGER.isLoggable(Level.FINER)) { - listener.getLogger().println("[withMaven] downstreamPipelineTriggerRunListener - Skip triggering " + ModelHyperlinkNote.encodeTo(downstreamPipeline) + - " because transitive pipeline " + transitiveUpstreamPipelineName + " is unaccessible"); + listener.getLogger() + .println("[withMaven] downstreamPipelineTriggerRunListener - Skip triggering " + + ModelHyperlinkNote.encodeTo(downstreamPipeline) + + " because transitive pipeline " + transitiveUpstreamPipelineName + + " is unaccessible"); } continue; } else if (transitiveUpstreamPipeline.getFullName().equals(upstreamPipeline.getFullName())) { - // this upstream pipeline of the current downstreamPipeline is the upstream pipeline itself, continue to loop + // this upstream pipeline of the current downstreamPipeline is the upstream pipeline itself, + // continue to loop if (LOGGER.isLoggable(Level.FINER)) { - listener.getLogger().println("[withMaven] downstreamPipelineTriggerRunListener - Skip triggering " + ModelHyperlinkNote.encodeTo(downstreamPipeline) + - " because transitive pipeline " + transitiveUpstreamPipelineName + " is the current one"); + listener.getLogger() + .println("[withMaven] downstreamPipelineTriggerRunListener - Skip triggering " + + ModelHyperlinkNote.encodeTo(downstreamPipeline) + + " because transitive pipeline " + transitiveUpstreamPipelineName + + " is the current one"); } continue; } else if (transitiveUpstreamPipeline.isBuilding()) { - listener.getLogger().println("[withMaven] downstreamPipelineTriggerRunListener - Skip triggering " + ModelHyperlinkNote.encodeTo(downstreamPipeline) + - " because it has a dependency already building: " + ModelHyperlinkNote.encodeTo(transitiveUpstreamPipeline)); + listener.getLogger() + .println("[withMaven] downstreamPipelineTriggerRunListener - Skip triggering " + + ModelHyperlinkNote.encodeTo(downstreamPipeline) + + " because it has a dependency already building: " + + ModelHyperlinkNote.encodeTo(transitiveUpstreamPipeline)); continue downstreamPipelinesLoop; } else if (isInQueue(transitiveUpstreamPipeline)) { - listener.getLogger().println("[withMaven] downstreamPipelineTriggerRunListener - Skip triggering " + ModelHyperlinkNote.encodeTo(downstreamPipeline) + - " because it has a dependency already building or in queue: " + ModelHyperlinkNote.encodeTo(transitiveUpstreamPipeline)); + listener.getLogger() + .println("[withMaven] downstreamPipelineTriggerRunListener - Skip triggering " + + ModelHyperlinkNote.encodeTo(downstreamPipeline) + + " because it has a dependency already building or in queue: " + + ModelHyperlinkNote.encodeTo(transitiveUpstreamPipeline)); continue downstreamPipelinesLoop; } else if (downstreamPipelines.contains(transitiveUpstreamPipelineName)) { - // Skip if this downstream pipeline will be triggered by another one of our downstream pipelines - // That's the case when one of the downstream's transitive upstream is our own downstream - listener.getLogger().println("[withMaven] downstreamPipelineTriggerRunListener - Skip triggering " + ModelHyperlinkNote.encodeTo(downstreamPipeline) + - " because it has a dependency on a pipeline that will be triggered by this build: " + ModelHyperlinkNote.encodeTo(transitiveUpstreamPipeline)); - omittedPipelineTriggersByPipelineFullname.computeIfAbsent(transitiveUpstreamPipelineName, p -> new TreeSet<>()).add(downstreamPipelineFullName); + // Skip if this downstream pipeline will be triggered by another one of our downstream pipelines + // That's the case when one of the downstream's transitive upstream is our own downstream + listener.getLogger() + .println("[withMaven] downstreamPipelineTriggerRunListener - Skip triggering " + + ModelHyperlinkNote.encodeTo(downstreamPipeline) + + " because it has a dependency on a pipeline that will be triggered by this build: " + + ModelHyperlinkNote.encodeTo(transitiveUpstreamPipeline)); + omittedPipelineTriggersByPipelineFullname + .computeIfAbsent(transitiveUpstreamPipelineName, p -> new TreeSet<>()) + .add(downstreamPipelineFullName); continue downstreamPipelinesLoop; } } - listener.getLogger().println("[withMaven] downstreamPipelineTriggerRunListener - checked transitive upstreams for: " + downstreamPipelineFullName + " build: " + - downstreamBuildNumber + " result: " + String.join(",", transitiveUpstreamPipelines.keySet())); + listener.getLogger() + .println("[withMaven] downstreamPipelineTriggerRunListener - checked transitive upstreams for: " + + downstreamPipelineFullName + " build: " + downstreamBuildNumber + " result: " + + String.join(",", transitiveUpstreamPipelines.keySet())); if (!downstreamPipeline.isBuildable()) { if (LOGGER.isLoggable(Level.FINER)) { - listener.getLogger().println("[withMaven] downstreamPipelineTriggerRunListener - Skip triggering of non buildable (disabled: " + - ((ParameterizedJobMixIn.ParameterizedJob) downstreamPipeline).isDisabled() + ", isHoldOffBuildUntilSave: " + - downstreamPipeline.isHoldOffBuildUntilSave() + ") downstream pipeline " + downstreamPipeline.getFullName() + - " from upstream build " + upstreamBuild.getFullDisplayName()); + listener.getLogger() + .println( + "[withMaven] downstreamPipelineTriggerRunListener - Skip triggering of non buildable (disabled: " + + ((ParameterizedJobMixIn.ParameterizedJob) downstreamPipeline) + .isDisabled() + + ", isHoldOffBuildUntilSave: " + + downstreamPipeline.isHoldOffBuildUntilSave() + + ") downstream pipeline " + downstreamPipeline.getFullName() + + " from upstream build " + + upstreamBuild.getFullDisplayName()); } rejectedPipelines.add(downstreamPipelineFullName); continue; } - WorkflowJobDependencyTrigger downstreamPipelineTrigger = this.globalPipelineMavenConfig.getPipelineTriggerService().getWorkflowJobDependencyTrigger((ParameterizedJobMixIn.ParameterizedJob) downstreamPipeline); + WorkflowJobDependencyTrigger downstreamPipelineTrigger = this.globalPipelineMavenConfig + .getPipelineTriggerService() + .getWorkflowJobDependencyTrigger( + (ParameterizedJobMixIn.ParameterizedJob) downstreamPipeline); if (downstreamPipelineTrigger == null) { - listener.getLogger().println("[withMaven] downstreamPipelineTriggerRunListener - Skip triggering of downstream pipeline " + downstreamPipeline.getFullName() + - " from upstream build " + upstreamBuild.getFullDisplayName() + ": dependency trigger not configured"); + listener.getLogger() + .println( + "[withMaven] downstreamPipelineTriggerRunListener - Skip triggering of downstream pipeline " + + downstreamPipeline.getFullName() + " from upstream build " + + upstreamBuild.getFullDisplayName() + + ": dependency trigger not configured"); rejectedPipelines.add(downstreamPipelineFullName); continue; } - boolean downstreamVisibleByUpstreamBuildAuth = this.globalPipelineMavenConfig.getPipelineTriggerService().isDownstreamVisibleByUpstreamBuildAuth(downstreamPipeline); - boolean upstreamVisibleByDownstreamBuildAuth = this.globalPipelineMavenConfig.getPipelineTriggerService().isUpstreamBuildVisibleByDownstreamBuildAuth(upstreamPipeline, downstreamPipeline); + boolean downstreamVisibleByUpstreamBuildAuth = this.globalPipelineMavenConfig + .getPipelineTriggerService() + .isDownstreamVisibleByUpstreamBuildAuth(downstreamPipeline); + boolean upstreamVisibleByDownstreamBuildAuth = this.globalPipelineMavenConfig + .getPipelineTriggerService() + .isUpstreamBuildVisibleByDownstreamBuildAuth(upstreamPipeline, downstreamPipeline); if (LOGGER.isLoggable(Level.FINER)) { - LOGGER.log(Level.FINER, - "upstreamPipeline (" + upstreamPipelineFullName + ", visibleByDownstreamBuildAuth: " + upstreamVisibleByDownstreamBuildAuth + "), " + - " downstreamPipeline (" + downstreamPipeline.getFullName() + ", visibleByUpstreamBuildAuth: " + downstreamVisibleByUpstreamBuildAuth + "), " + - "upstreamBuildAuth: " + Jenkins.getAuthentication()); + LOGGER.log( + Level.FINER, + "upstreamPipeline (" + upstreamPipelineFullName + ", visibleByDownstreamBuildAuth: " + + upstreamVisibleByDownstreamBuildAuth + "), " + " downstreamPipeline (" + + downstreamPipeline.getFullName() + ", visibleByUpstreamBuildAuth: " + + downstreamVisibleByUpstreamBuildAuth + "), " + "upstreamBuildAuth: " + + Jenkins.getAuthentication()); } if (downstreamVisibleByUpstreamBuildAuth && upstreamVisibleByDownstreamBuildAuth) { - Set mavenArtifactsCausingTheTrigger = jobsToTrigger.computeIfAbsent(downstreamPipelineFullName, k -> new TreeSet<>()); - if(mavenArtifactsCausingTheTrigger.contains(mavenArtifact)) { + Set mavenArtifactsCausingTheTrigger = + jobsToTrigger.computeIfAbsent(downstreamPipelineFullName, k -> new TreeSet<>()); + if (mavenArtifactsCausingTheTrigger.contains(mavenArtifact)) { // TODO display warning } else { mavenArtifactsCausingTheTrigger.add(mavenArtifact); } } else { if (LOGGER.isLoggable(Level.FINER)) { - listener.getLogger().println("[withMaven] downstreamPipelineTriggerRunListener - Skip triggering of " + downstreamPipeline.getFullName() + " by " + - upstreamBuild.getFullDisplayName() + ": downstreamVisibleByUpstreamBuildAuth: " + downstreamVisibleByUpstreamBuildAuth + - ", upstreamVisibleByDownstreamBuildAuth: " + upstreamVisibleByDownstreamBuildAuth); + listener.getLogger() + .println("[withMaven] downstreamPipelineTriggerRunListener - Skip triggering of " + + downstreamPipeline.getFullName() + " by " + upstreamBuild.getFullDisplayName() + + ": downstreamVisibleByUpstreamBuildAuth: " + + downstreamVisibleByUpstreamBuildAuth + + ", upstreamVisibleByDownstreamBuildAuth: " + + upstreamVisibleByDownstreamBuildAuth); } } } } - // note: we could verify that the upstreamBuild.getCauses().getOmittedPipelineFullNames are listed in jobsToTrigger + // note: we could verify that the upstreamBuild.getCauses().getOmittedPipelineFullNames are listed in + // jobsToTrigger // trigger the pipelines triggerPipelinesLoop: - for (Map.Entry> entry: jobsToTrigger.entrySet()) { + for (Map.Entry> entry : jobsToTrigger.entrySet()) { String downstreamJobFullName = entry.getKey(); Job downstreamJob = Jenkins.get().getItemByFullName(downstreamJobFullName, Job.class); if (downstreamJob == null) { - listener.getLogger().println("[withMaven] downstreamPipelineTriggerRunListener - Illegal state: " + downstreamJobFullName + " not resolved"); + listener.getLogger() + .println("[withMaven] downstreamPipelineTriggerRunListener - Illegal state: " + + downstreamJobFullName + " not resolved"); continue; } Set mavenArtifacts = entry.getValue(); // See jenkins.triggers.ReverseBuildTrigger.RunListenerImpl.onCompleted(Run, TaskListener) - MavenDependencyUpstreamCause cause = new MavenDependencyUpstreamCause(upstreamBuild, mavenArtifacts, omittedPipelineTriggersByPipelineFullname.get(downstreamJobFullName)); + MavenDependencyUpstreamCause cause = new MavenDependencyUpstreamCause( + upstreamBuild, + mavenArtifacts, + omittedPipelineTriggersByPipelineFullname.get(downstreamJobFullName)); Run downstreamJobLastBuild = downstreamJob.getLastBuild(); if (downstreamJobLastBuild == null) { // should never happen, we need at least one build to know the dependencies // trigger downstream pipeline anyway } else { - List matchingMavenDependencies = MavenDependencyCauseHelper.isSameCause(cause, downstreamJobLastBuild.getCauses()); + List matchingMavenDependencies = + MavenDependencyCauseHelper.isSameCause(cause, downstreamJobLastBuild.getCauses()); if (matchingMavenDependencies.isEmpty()) { - for (Map.Entry> omittedPipeline : omittedPipelineTriggersByPipelineFullname.entrySet()) { + for (Map.Entry> omittedPipeline : + omittedPipelineTriggersByPipelineFullname.entrySet()) { if (omittedPipeline.getValue().contains(downstreamJobFullName)) { Job transitiveDownstreamJob = Jenkins.get().getItemByFullName(entry.getKey(), Job.class); - listener.getLogger().println("[withMaven] downstreamPipelineTriggerRunListener - Skip triggering " - + "downstream pipeline " + ModelHyperlinkNote.encodeTo(downstreamJob) + "because it will be triggered by transitive downstream " + ModelHyperlinkNote.encodeTo(transitiveDownstreamJob)); + listener.getLogger() + .println("[withMaven] downstreamPipelineTriggerRunListener - Skip triggering " + + "downstream pipeline " + ModelHyperlinkNote.encodeTo(downstreamJob) + + "because it will be triggered by transitive downstream " + + ModelHyperlinkNote.encodeTo(transitiveDownstreamJob)); continue triggerPipelinesLoop; // don't trigger downstream pipeline } } // trigger downstream pipeline } else { downstreamJobLastBuild.addAction(new CauseAction(cause)); - listener.getLogger().println("[withMaven] downstreamPipelineTriggerRunListener - Skip triggering downstream pipeline " + ModelHyperlinkNote.encodeTo(downstreamJob) + " as it was already triggered for Maven dependencies: " + - matchingMavenDependencies.stream().map(mavenDependency -> mavenDependency == null ? null : mavenDependency.getShortDescription()).collect(Collectors.joining(", "))); + listener.getLogger() + .println( + "[withMaven] downstreamPipelineTriggerRunListener - Skip triggering downstream pipeline " + + ModelHyperlinkNote.encodeTo(downstreamJob) + + " as it was already triggered for Maven dependencies: " + + matchingMavenDependencies.stream() + .map(mavenDependency -> mavenDependency == null + ? null + : mavenDependency.getShortDescription()) + .collect(Collectors.joining(", "))); try { downstreamJobLastBuild.save(); } catch (IOException e) { - listener.getLogger().println("[withMaven] downstreamPipelineTriggerRunListener - Failure to update build " + downstreamJobLastBuild.getFullDisplayName() + ": " + e.toString()); + listener.getLogger() + .println("[withMaven] downstreamPipelineTriggerRunListener - Failure to update build " + + downstreamJobLastBuild.getFullDisplayName() + ": " + e.toString()); } continue; // don't trigger downstream pipeline } @@ -376,9 +536,12 @@ public void onCompleted(Run upstreamBuild, @NonNull TaskListener listener) scheduleBuild(downstreamJob, cause, listener); } - long durationInMillis = TimeUnit.MILLISECONDS.convert(System.nanoTime() - startTimeInNanos, TimeUnit.NANOSECONDS); + long durationInMillis = + TimeUnit.MILLISECONDS.convert(System.nanoTime() - startTimeInNanos, TimeUnit.NANOSECONDS); if (durationInMillis > TimeUnit.MILLISECONDS.convert(5, TimeUnit.SECONDS) || LOGGER.isLoggable(Level.FINE)) { - listener.getLogger().println("[withMaven] downstreamPipelineTriggerRunListener - completed in " + durationInMillis + " ms"); + listener.getLogger() + .println("[withMaven] downstreamPipelineTriggerRunListener - completed in " + durationInMillis + + " ms"); } } @@ -387,24 +550,29 @@ private void scheduleBuild(Job downstreamJob, MavenDependencyUpstreamCause cause // by locking in hudson.model.Queue.schedule2() if (isInQueue(downstreamJob)) { - listener.getLogger().println("[withMaven] downstreamPipelineTriggerRunListener - Skip triggering " + ModelHyperlinkNote.encodeTo(downstreamJob) + - " because it is already in the queue"); + listener.getLogger() + .println("[withMaven] downstreamPipelineTriggerRunListener - Skip triggering " + + ModelHyperlinkNote.encodeTo(downstreamJob) + " because it is already in the queue"); } else { Queue.Item queuedItem = ParameterizedJobMixIn.scheduleBuild2(downstreamJob, -1, new CauseAction(cause)); String dependenciesMessage = cause.getMavenArtifactsDescription(); if (queuedItem == null) { - listener.getLogger().println("[withMaven] downstreamPipelineTriggerRunListener - Skip triggering downstream pipeline " + ModelHyperlinkNote.encodeTo(downstreamJob) + " due to dependencies on " + - dependenciesMessage + ", invocation rejected."); + listener.getLogger() + .println( + "[withMaven] downstreamPipelineTriggerRunListener - Skip triggering downstream pipeline " + + ModelHyperlinkNote.encodeTo(downstreamJob) + " due to dependencies on " + + dependenciesMessage + ", invocation rejected."); } else { - listener.getLogger().println("[withMaven] downstreamPipelineTriggerRunListener - Triggering downstream pipeline " + ModelHyperlinkNote.encodeTo(downstreamJob) + "#" + downstreamJob.getNextBuildNumber() + " due to dependency on " + - dependenciesMessage + " ..."); + listener.getLogger() + .println("[withMaven] downstreamPipelineTriggerRunListener - Triggering downstream pipeline " + + ModelHyperlinkNote.encodeTo(downstreamJob) + "#" + downstreamJob.getNextBuildNumber() + + " due to dependency on " + dependenciesMessage + " ..."); } } } private boolean isInQueue(Job job) { - // isInQueue returns always false in WorkflowJob ! - return job instanceof Task && Jenkins.get().getQueue().contains((Task)job); + // isInQueue returns always false in WorkflowJob ! + return job instanceof Task && Jenkins.get().getQueue().contains((Task) job); } - } diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/AbstractHealthAwarePublisher.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/AbstractHealthAwarePublisher.java index 019a027d..c543525c 100644 --- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/AbstractHealthAwarePublisher.java +++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/AbstractHealthAwarePublisher.java @@ -35,7 +35,6 @@ public abstract class AbstractHealthAwarePublisher extends MavenPublisher { */ private String thresholdLimit = DEFAULT_PRIORITY_THRESHOLD_LIMIT; - public String getHealthy() { return healthy; } @@ -76,27 +75,24 @@ protected void setHealthAwarePublisherAttributes(Object healthAwarePublisher) { @Override public String toString() { - return getClass().getName() + "[" + - "disabled='" + isDisabled() + '\'' + - ", healthy='" + healthy + '\'' + - ", unHealthy='" + unHealthy + '\'' + - ", thresholdLimit='" + thresholdLimit + '\'' + - ']'; + return getClass().getName() + "[" + "disabled='" + + isDisabled() + '\'' + ", healthy='" + + healthy + '\'' + ", unHealthy='" + + unHealthy + '\'' + ", thresholdLimit='" + + thresholdLimit + '\'' + ']'; } /** * Required by org/jenkinsci/plugins/pipeline/maven/publishers/AbstractHealthAwarePublisher/health.jelly */ - public static abstract class DescriptorImpl extends MavenPublisher.DescriptorImpl { - - } - + public abstract static class DescriptorImpl extends MavenPublisher.DescriptorImpl {} /** * @author Cyrille Le Clerc */ static class Helper { - protected static void setHealthAwarePublisherAttributes(Object healthAwarePublisherAsObject, AbstractHealthAwarePublisher abstractHealthAwarePublisher) { + protected static void setHealthAwarePublisherAttributes( + Object healthAwarePublisherAsObject, AbstractHealthAwarePublisher abstractHealthAwarePublisher) { if (healthAwarePublisherAsObject instanceof HealthAwarePublisher) { HealthAwarePublisher healthAwarePublisher = (HealthAwarePublisher) healthAwarePublisherAsObject; healthAwarePublisher.setHealthy(abstractHealthAwarePublisher.getHealthy()); @@ -105,4 +101,4 @@ protected static void setHealthAwarePublisherAttributes(Object healthAwarePublis } } } -} \ No newline at end of file +} diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/ConcordionTestsPublisher.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/ConcordionTestsPublisher.java index 66ed4cce..0c91815d 100644 --- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/ConcordionTestsPublisher.java +++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/ConcordionTestsPublisher.java @@ -18,6 +18,7 @@ package org.jenkinsci.plugins.pipeline.maven.publishers; +import edu.umd.cs.findbugs.annotations.NonNull; import htmlpublisher.HtmlPublisher; import htmlpublisher.HtmlPublisherTarget; import hudson.Extension; @@ -25,15 +26,6 @@ import hudson.model.Run; import hudson.model.StreamBuildListener; import hudson.model.TaskListener; -import org.jenkinsci.Symbol; -import org.jenkinsci.plugins.pipeline.maven.MavenPublisher; -import org.jenkinsci.plugins.pipeline.maven.Messages; -import org.jenkinsci.plugins.pipeline.maven.util.XmlUtils; -import org.jenkinsci.plugins.workflow.steps.StepContext; -import org.kohsuke.stapler.DataBoundConstructor; -import org.w3c.dom.Element; - -import edu.umd.cs.findbugs.annotations.NonNull; import java.io.IOException; import java.io.OutputStream; import java.util.ArrayList; @@ -45,6 +37,13 @@ import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; +import org.jenkinsci.Symbol; +import org.jenkinsci.plugins.pipeline.maven.MavenPublisher; +import org.jenkinsci.plugins.pipeline.maven.Messages; +import org.jenkinsci.plugins.pipeline.maven.util.XmlUtils; +import org.jenkinsci.plugins.workflow.steps.StepContext; +import org.kohsuke.stapler.DataBoundConstructor; +import org.w3c.dom.Element; /** * @author Cyrille Le Clerc @@ -60,23 +59,21 @@ public class ConcordionTestsPublisher extends MavenPublisher { private static final long serialVersionUID = 1L; @DataBoundConstructor - public ConcordionTestsPublisher() { - - } + public ConcordionTestsPublisher() {} /* - - - - - - ${project.build.directory}/failsafe-reports - - target/concordion-reports - - - - */ + + + + + + ${project.build.directory}/failsafe-reports + + target/concordion-reports + + + + */ @Override public void process(@NonNull final StepContext context, @NonNull final Element mavenSpyLogsElt) throws IOException, InterruptedException { @@ -91,12 +88,15 @@ public void process(@NonNull final StepContext context, @NonNull final Element m final Run run = context.get(Run.class); Set concordionOutputDirPatterns = new HashSet<>(); - concordionOutputDirPatterns.addAll(findConcordionOutputDirPatterns(XmlUtils.getExecutionEventsByPlugin(mavenSpyLogsElt, GROUP_ID, SUREFIRE_ID, SUREFIRE_GOAL, "MojoSucceeded", "MojoFailed"))); - concordionOutputDirPatterns.addAll(findConcordionOutputDirPatterns(XmlUtils.getExecutionEventsByPlugin(mavenSpyLogsElt, GROUP_ID, FAILSAFE_ID, FAILSAFE_GOAL, "MojoSucceeded", "MojoFailed"))); + concordionOutputDirPatterns.addAll(findConcordionOutputDirPatterns(XmlUtils.getExecutionEventsByPlugin( + mavenSpyLogsElt, GROUP_ID, SUREFIRE_ID, SUREFIRE_GOAL, "MojoSucceeded", "MojoFailed"))); + concordionOutputDirPatterns.addAll(findConcordionOutputDirPatterns(XmlUtils.getExecutionEventsByPlugin( + mavenSpyLogsElt, GROUP_ID, FAILSAFE_ID, FAILSAFE_GOAL, "MojoSucceeded", "MojoFailed"))); if (concordionOutputDirPatterns.isEmpty()) { if (LOGGER.isLoggable(Level.FINE)) { - listener.getLogger().println("[withMaven] concordionPublisher - No concordion output dir pattern given, skip."); + listener.getLogger() + .println("[withMaven] concordionPublisher - No concordion output dir pattern given, skip."); } return; } @@ -107,22 +107,23 @@ public void process(@NonNull final StepContext context, @NonNull final Element m } if (paths.isEmpty()) { if (LOGGER.isLoggable(Level.FINE)) { - listener.getLogger().println( - "[withMaven] concordionPublisher - Did not found any Concordion reports directory, skip."); + listener.getLogger() + .println( + "[withMaven] concordionPublisher - Did not found any Concordion reports directory, skip."); } return; } - listener.getLogger().println( - "[withMaven] concordionPublisher - Found " + paths.size() + " file(s) in Concordion reports directory."); - + listener.getLogger() + .println("[withMaven] concordionPublisher - Found " + paths.size() + + " file(s) in Concordion reports directory."); try { Class.forName("htmlpublisher.HtmlPublisher"); } catch (final ClassNotFoundException e) { listener.getLogger().print("[withMaven] concordionPublisher - Jenkins "); - listener.hyperlink("https://wiki.jenkins.io/display/JENKINS/HTML+Publisher+Plugin", - "HTML Publisher Plugin"); + listener.hyperlink( + "https://wiki.jenkins.io/display/JENKINS/HTML+Publisher+Plugin", "HTML Publisher Plugin"); listener.getLogger().println(" not found, do not archive concordion reports."); return; } @@ -132,14 +133,15 @@ public void process(@NonNull final StepContext context, @NonNull final Element m files.add(XmlUtils.getPathInWorkspace(path.getRemote(), workspace)); } - final HtmlPublisherTarget target = new HtmlPublisherTarget("Concordion reports", ".", - XmlUtils.join(files, ","), true, true, true); + final HtmlPublisherTarget target = + new HtmlPublisherTarget("Concordion reports", ".", XmlUtils.join(files, ","), true, true, true); try { - listener.getLogger().println( - "[withMaven] concordionPublisher - Publishing HTML reports named \"" + target.getReportName() + - "\" with the following files: " + target.getReportFiles()); - HtmlPublisher.publishReports(run, workspace, listener, Collections.singletonList(target), HtmlPublisher.class); + listener.getLogger() + .println("[withMaven] concordionPublisher - Publishing HTML reports named \"" + + target.getReportName() + "\" with the following files: " + target.getReportFiles()); + HtmlPublisher.publishReports( + run, workspace, listener, Collections.singletonList(target), HtmlPublisher.class); } catch (final Exception e) { listener.error("[withMaven] concordionPublisher - exception archiving Concordion reports: " + e); LOGGER.log(Level.WARNING, "Exception processing Concordion reports archiving", e); @@ -151,7 +153,8 @@ public void process(@NonNull final StepContext context, @NonNull final Element m private Collection findConcordionOutputDirPatterns(@NonNull List elements) { List result = new ArrayList<>(); for (Element element : elements) { - Element envVars = XmlUtils.getUniqueChildElementOrNull(XmlUtils.getUniqueChildElement(element, "plugin"), "systemPropertyVariables"); + Element envVars = XmlUtils.getUniqueChildElementOrNull( + XmlUtils.getUniqueChildElement(element, "plugin"), "systemPropertyVariables"); if (envVars != null) { Element concordionOutputDir = XmlUtils.getUniqueChildElementOrNull(envVars, "concordion.output.dir"); if (concordionOutputDir != null) { diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/DependenciesFingerprintPublisher.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/DependenciesFingerprintPublisher.java index 1256f891..e471a9c0 100644 --- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/DependenciesFingerprintPublisher.java +++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/DependenciesFingerprintPublisher.java @@ -23,12 +23,23 @@ */ package org.jenkinsci.plugins.pipeline.maven.publishers; +import static org.jenkinsci.plugins.pipeline.maven.publishers.DependenciesLister.listDependencies; + +import edu.umd.cs.findbugs.annotations.NonNull; import hudson.Extension; import hudson.FilePath; import hudson.model.FingerprintMap; import hudson.model.Run; import hudson.model.TaskListener; import hudson.tasks.Fingerprinter; +import java.io.IOException; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.TreeSet; +import java.util.logging.Level; +import java.util.logging.Logger; import jenkins.model.Jenkins; import org.apache.commons.lang.StringUtils; import org.jenkinsci.Symbol; @@ -40,18 +51,6 @@ import org.kohsuke.stapler.DataBoundSetter; import org.w3c.dom.Element; -import edu.umd.cs.findbugs.annotations.NonNull; -import java.io.IOException; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.TreeSet; -import java.util.logging.Level; -import java.util.logging.Logger; - -import static org.jenkinsci.plugins.pipeline.maven.publishers.DependenciesLister.listDependencies; - /** * Fingerprint the dependencies of the maven project. * @@ -81,19 +80,16 @@ public DependenciesFingerprintPublisher() { protected Set getIncludedScopes() { Set includedScopes = new TreeSet<>(); - if (includeScopeCompile) - includedScopes.add("compile"); - if (includeScopeRuntime) - includedScopes.add("runtime"); - if (includeScopeProvided) - includedScopes.add("provided"); - if (includeScopeTest) - includedScopes.add("test"); + if (includeScopeCompile) includedScopes.add("compile"); + if (includeScopeRuntime) includedScopes.add("runtime"); + if (includeScopeProvided) includedScopes.add("provided"); + if (includeScopeTest) includedScopes.add("test"); return includedScopes; } @Override - public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsElt) throws IOException, InterruptedException { + public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsElt) + throws IOException, InterruptedException { Run run = context.get(Run.class); TaskListener listener = context.get(TaskListener.class); @@ -102,9 +98,11 @@ public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsE List dependencies = listDependencies(mavenSpyLogsElt, LOGGER); if (LOGGER.isLoggable(Level.FINE)) { - listener.getLogger().println("[withMaven] dependenciesFingerprintPublisher - filter: " + - "versions[snapshot: " + isIncludeSnapshotVersions() + ", release: " + isIncludeReleaseVersions() + "], " + - "scopes:" + getIncludedScopes()); + listener.getLogger() + .println("[withMaven] dependenciesFingerprintPublisher - filter: " + "versions[snapshot: " + + isIncludeSnapshotVersions() + ", release: " + isIncludeReleaseVersions() + "], " + + "scopes:" + + getIncludedScopes()); } Map artifactsToFingerPrint = new HashMap<>(); // artifactPathInFingerprintZone -> artifactMd5 @@ -112,21 +110,24 @@ public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsE if (dependency.isSnapshot()) { if (!includeSnapshotVersions) { if (LOGGER.isLoggable(Level.FINER)) { - listener.getLogger().println("[withMaven] Skip fingerprinting snapshot dependency: " + dependency); + listener.getLogger() + .println("[withMaven] Skip fingerprinting snapshot dependency: " + dependency); } continue; } } else { if (!includeReleaseVersions) { if (LOGGER.isLoggable(Level.FINER)) { - listener.getLogger().println("[withMaven] Skip fingerprinting release dependency: " + dependency); + listener.getLogger() + .println("[withMaven] Skip fingerprinting release dependency: " + dependency); } continue; } } if (!getIncludedScopes().contains(dependency.getScope())) { if (LOGGER.isLoggable(Level.FINER)) { - listener.getLogger().println("[withMaven] Skip fingerprinting dependency with ignored scope: " + dependency); + listener.getLogger() + .println("[withMaven] Skip fingerprinting dependency with ignored scope: " + dependency); } continue; } @@ -134,7 +135,9 @@ public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsE try { if (StringUtils.isEmpty(dependency.getFile())) { if (LOGGER.isLoggable(Level.FINER)) { - listener.getLogger().println("[withMaven] Can't fingerprint maven dependency with no file attached: " + dependency); + listener.getLogger() + .println("[withMaven] Can't fingerprint maven dependency with no file attached: " + + dependency); } continue; } @@ -144,28 +147,32 @@ public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsE if (!(dependency.getFile().endsWith("." + dependency.getExtension()))) { if (dependencyFilePath.isDirectory()) { if (LOGGER.isLoggable(Level.FINE)) { - listener.getLogger().println("[withMaven] Skip fingerprinting of maven dependency of type directory " + dependency); + listener.getLogger() + .println("[withMaven] Skip fingerprinting of maven dependency of type directory " + + dependency); } continue; } } String dependencyMavenRepoStyleFilePath = - dependency.getGroupId().replace('.', '/') + "/" + - dependency.getArtifactId() + "/" + - dependency.getBaseVersion() + "/" + - dependency.getFileNameWithBaseVersion(); - + dependency.getGroupId().replace('.', '/') + "/" + dependency.getArtifactId() + + "/" + dependency.getBaseVersion() + + "/" + dependency.getFileNameWithBaseVersion(); if (dependencyFilePath.exists()) { - // the subsequent call to digest could test the existence but we don't want to prematurely optimize performances + // the subsequent call to digest could test the existence but we don't want to prematurely optimize + // performances if (LOGGER.isLoggable(Level.FINE)) { - listener.getLogger().println("[withMaven] Fingerprint dependency " + dependencyMavenRepoStyleFilePath); + listener.getLogger() + .println("[withMaven] Fingerprint dependency " + dependencyMavenRepoStyleFilePath); } String artifactDigest = dependencyFilePath.digest(); artifactsToFingerPrint.put(dependencyMavenRepoStyleFilePath, artifactDigest); } else { - listener.getLogger().println("[withMaven] FAILURE to fingerprint " + dependencyMavenRepoStyleFilePath + ", file not found"); + listener.getLogger() + .println("[withMaven] FAILURE to fingerprint " + dependencyMavenRepoStyleFilePath + + ", file not found"); } } catch (IOException | RuntimeException e) { @@ -181,7 +188,9 @@ public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsE for (Map.Entry artifactToFingerprint : artifactsToFingerPrint.entrySet()) { String artifactPathInFingerprintZone = artifactToFingerprint.getKey(); String artifactMd5 = artifactToFingerprint.getValue(); - fingerprintMap.getOrCreate(null, artifactPathInFingerprintZone, artifactMd5).addFor(run); + fingerprintMap + .getOrCreate(null, artifactPathInFingerprintZone, artifactMd5) + .addFor(run); } // add action @@ -195,11 +204,10 @@ public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsE @Override public String toString() { - return getClass().getName() + "[" + - "disabled=" + isDisabled() + ", " + - "scopes=" + getIncludedScopes() + ", " + - "versions={snapshot:" + isIncludeSnapshotVersions() + ", release:" + isIncludeReleaseVersions() + "}" + - ']'; + return getClass().getName() + "[" + "disabled=" + + isDisabled() + ", " + "scopes=" + + getIncludedScopes() + ", " + "versions={snapshot:" + + isIncludeSnapshotVersions() + ", release:" + isIncludeReleaseVersions() + "}" + ']'; } public boolean isIncludeSnapshotVersions() { diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/DependenciesLister.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/DependenciesLister.java index 5abf5f37..a507cb9b 100644 --- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/DependenciesLister.java +++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/DependenciesLister.java @@ -1,19 +1,17 @@ package org.jenkinsci.plugins.pipeline.maven.publishers; -import org.apache.commons.lang.StringUtils; -import org.jenkinsci.plugins.pipeline.maven.MavenArtifact; -import org.jenkinsci.plugins.pipeline.maven.MavenDependency; -import org.jenkinsci.plugins.pipeline.maven.util.XmlUtils; -import org.w3c.dom.Element; - import edu.umd.cs.findbugs.annotations.NonNull; - import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; +import org.apache.commons.lang.StringUtils; +import org.jenkinsci.plugins.pipeline.maven.MavenArtifact; +import org.jenkinsci.plugins.pipeline.maven.MavenDependency; +import org.jenkinsci.plugins.pipeline.maven.util.XmlUtils; +import org.w3c.dom.Element; /** * List dependencies from the spy log. @@ -27,30 +25,30 @@ public class DependenciesLister { * @return list of {@link MavenArtifact} */ @NonNull - public static List listDependencies(final Element mavenSpyLogs, - final Logger logger) { + public static List listDependencies(final Element mavenSpyLogs, final Logger logger) { final Set result = new HashSet<>(); - for (final Element dependencyResolutionResult : XmlUtils.getChildrenElements(mavenSpyLogs, - "DependencyResolutionResult")) { - final Element resolvedDependenciesElt = XmlUtils.getUniqueChildElementOrNull( - dependencyResolutionResult, "resolvedDependencies"); + for (final Element dependencyResolutionResult : + XmlUtils.getChildrenElements(mavenSpyLogs, "DependencyResolutionResult")) { + final Element resolvedDependenciesElt = + XmlUtils.getUniqueChildElementOrNull(dependencyResolutionResult, "resolvedDependencies"); if (resolvedDependenciesElt == null) { continue; } - for (final Element dependencyElt : XmlUtils.getChildrenElements(resolvedDependenciesElt, - "dependency")) { - final MavenDependency dependencyArtifact = XmlUtils.newMavenDependency( - dependencyElt); + for (final Element dependencyElt : XmlUtils.getChildrenElements(resolvedDependenciesElt, "dependency")) { + final MavenDependency dependencyArtifact = XmlUtils.newMavenDependency(dependencyElt); final Element fileElt = XmlUtils.getUniqueChildElementOrNull(dependencyElt, "file"); - if (fileElt == null || fileElt.getTextContent() == null + if (fileElt == null + || fileElt.getTextContent() == null || fileElt.getTextContent().isEmpty()) { - logger.log(Level.WARNING, "listDependencies: no associated file found for " - + dependencyArtifact + " in " + XmlUtils.toString(dependencyElt)); + logger.log( + Level.WARNING, + "listDependencies: no associated file found for " + dependencyArtifact + " in " + + XmlUtils.toString(dependencyElt)); } else { dependencyArtifact.setFile(StringUtils.trim(fileElt.getTextContent())); } @@ -67,15 +65,13 @@ public static List listDependencies(final Element mavenSpyLogs, * @return list of {@link MavenArtifact} */ @NonNull - public static List listParentProjects(final Element mavenSpyLogs, - final Logger logger) { + public static List listParentProjects(final Element mavenSpyLogs, final Logger logger) { final Set result = new HashSet<>(); - for (final Element dependencyResolutionResult : XmlUtils.getExecutionEvents(mavenSpyLogs, - "ProjectStarted")) { - final Element parentProjectElt = XmlUtils.getUniqueChildElementOrNull( - dependencyResolutionResult, "parentProject"); + for (final Element dependencyResolutionResult : XmlUtils.getExecutionEvents(mavenSpyLogs, "ProjectStarted")) { + final Element parentProjectElt = + XmlUtils.getUniqueChildElementOrNull(dependencyResolutionResult, "parentProject"); if (parentProjectElt == null) { continue; @@ -93,5 +89,4 @@ public static List listParentProjects(final Element mavenSpyLogs, return new ArrayList<>(result); } - } diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/FindbugsAnalysisPublisher.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/FindbugsAnalysisPublisher.java index 6f30bada..4a3850ea 100644 --- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/FindbugsAnalysisPublisher.java +++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/FindbugsAnalysisPublisher.java @@ -24,6 +24,7 @@ package org.jenkinsci.plugins.pipeline.maven.publishers; +import edu.umd.cs.findbugs.annotations.NonNull; import hudson.Extension; import hudson.FilePath; import hudson.Launcher; @@ -31,6 +32,11 @@ import hudson.model.StreamBuildListener; import hudson.model.TaskListener; import hudson.plugins.findbugs.FindBugsPublisher; +import java.io.IOException; +import java.io.OutputStream; +import java.util.List; +import java.util.logging.Level; +import java.util.logging.Logger; import org.jenkinsci.Symbol; import org.jenkinsci.plugins.pipeline.maven.MavenArtifact; import org.jenkinsci.plugins.pipeline.maven.MavenSpyLogProcessor; @@ -40,13 +46,6 @@ import org.kohsuke.stapler.DataBoundConstructor; import org.w3c.dom.Element; -import edu.umd.cs.findbugs.annotations.NonNull; -import java.io.IOException; -import java.io.OutputStream; -import java.util.List; -import java.util.logging.Level; -import java.util.logging.Logger; - /** * @author Cyrille Le Clerc */ @@ -56,9 +55,7 @@ public class FindbugsAnalysisPublisher extends AbstractHealthAwarePublisher { private static final long serialVersionUID = 1L; @DataBoundConstructor - public FindbugsAnalysisPublisher() { - - } + public FindbugsAnalysisPublisher() {} /* @@ -161,7 +158,8 @@ public FindbugsAnalysisPublisher() { */ @Override - public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsElt) throws IOException, InterruptedException { + public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsElt) + throws IOException, InterruptedException { TaskListener listener = context.get(TaskListener.class); if (listener == null) { @@ -177,11 +175,19 @@ public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsE } catch (ClassNotFoundException e) { listener.getLogger().print("[withMaven] Jenkins "); listener.hyperlink("https://wiki.jenkins-ci.org/display/JENKINS/FindBugs+Plugin", "FindBugs Plugin"); - listener.getLogger().println(" not found, don't display org.codehaus.mojo:findbugs-maven-plugin:findbugs results in pipeline screen."); + listener.getLogger() + .println( + " not found, don't display org.codehaus.mojo:findbugs-maven-plugin:findbugs results in pipeline screen."); return; } - List findbugsEvents = XmlUtils.getExecutionEventsByPlugin(mavenSpyLogsElt, "org.codehaus.mojo", "findbugs-maven-plugin", "findbugs", "MojoSucceeded", "MojoFailed"); + List findbugsEvents = XmlUtils.getExecutionEventsByPlugin( + mavenSpyLogsElt, + "org.codehaus.mojo", + "findbugs-maven-plugin", + "findbugs", + "MojoSucceeded", + "MojoFailed"); if (findbugsEvents.isEmpty()) { LOGGER.log(Level.FINE, "No org.codehaus.mojo:findbugs-maven-plugin:findbugs execution found"); @@ -197,14 +203,18 @@ public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsE MavenSpyLogProcessor.PluginInvocation pluginInvocation = XmlUtils.newPluginInvocation(pluginElt); if (xmlOutputDirectoryElt == null) { - listener.getLogger().println("[withMaven] No element found for in " + XmlUtils.toString(findBugsTestEvent)); + listener.getLogger() + .println("[withMaven] No element found for in " + + XmlUtils.toString(findBugsTestEvent)); continue; } String xmlOutputDirectory = xmlOutputDirectoryElt.getTextContent().trim(); if (xmlOutputDirectory.contains("${project.build.directory}")) { String projectBuildDirectory = XmlUtils.getProjectBuildDirectory(projectElt); if (projectBuildDirectory == null || projectBuildDirectory.isEmpty()) { - listener.getLogger().println("[withMaven] '${project.build.directory}' found for in " + XmlUtils.toString(findBugsTestEvent)); + listener.getLogger() + .println("[withMaven] '${project.build.directory}' found for in " + + XmlUtils.toString(findBugsTestEvent)); continue; } @@ -213,7 +223,9 @@ public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsE } else if (xmlOutputDirectory.contains("${basedir}")) { String baseDir = projectElt.getAttribute("baseDir"); if (baseDir.isEmpty()) { - listener.getLogger().println("[withMaven] '${basedir}' found for in " + XmlUtils.toString(findBugsTestEvent)); + listener.getLogger() + .println("[withMaven] '${basedir}' found for in " + + XmlUtils.toString(findBugsTestEvent)); continue; } @@ -223,8 +235,10 @@ public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsE xmlOutputDirectory = XmlUtils.getPathInWorkspace(xmlOutputDirectory, workspace); String findBugsResultsFile = xmlOutputDirectory + "/findbugsXml.xml"; - listener.getLogger().println("[withMaven] findbugsPublisher - Archive FindBugs analysis results for Maven artifact " + mavenArtifact.toString() + " generated by " + - pluginInvocation + ": " + findBugsResultsFile); + listener.getLogger() + .println("[withMaven] findbugsPublisher - Archive FindBugs analysis results for Maven artifact " + + mavenArtifact.toString() + " generated by " + pluginInvocation + ": " + + findBugsResultsFile); FindBugsPublisher findBugsPublisher = new FindBugsPublisher(); findBugsPublisher.setPattern(findBugsResultsFile); @@ -234,11 +248,15 @@ public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsE try { findBugsPublisher.perform(run, workspace, launcher, listener); } catch (Exception e) { - listener.error("[withMaven] findbugsPublisher - exception archiving FindBugs results for Maven artifact " + mavenArtifact.toString() + " generated by " + - pluginInvocation + ": " + e); + listener.error( + "[withMaven] findbugsPublisher - exception archiving FindBugs results for Maven artifact " + + mavenArtifact.toString() + " generated by " + pluginInvocation + ": " + e); LOGGER.log(Level.WARNING, "Exception processing " + XmlUtils.toString(findBugsTestEvent), e); - throw new MavenPipelinePublisherException("findbugsPublisher", - "archiving FindBugs results for Maven artifact " + mavenArtifact.getId() + " generated by " + pluginInvocation.getId(), e); + throw new MavenPipelinePublisherException( + "findbugsPublisher", + "archiving FindBugs results for Maven artifact " + mavenArtifact.getId() + " generated by " + + pluginInvocation.getId(), + e); } } } diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/GeneratedArtifactsPublisher.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/GeneratedArtifactsPublisher.java index a76a99a4..1f258f86 100644 --- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/GeneratedArtifactsPublisher.java +++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/GeneratedArtifactsPublisher.java @@ -1,5 +1,14 @@ package org.jenkinsci.plugins.pipeline.maven.publishers; +import edu.umd.cs.findbugs.annotations.NonNull; +import hudson.Extension; +import hudson.FilePath; +import hudson.Launcher; +import hudson.model.FingerprintMap; +import hudson.model.Run; +import hudson.model.StreamBuildListener; +import hudson.model.TaskListener; +import hudson.tasks.Fingerprinter; import java.io.IOException; import java.io.OutputStream; import java.util.HashMap; @@ -8,9 +17,9 @@ import java.util.Objects; import java.util.logging.Level; import java.util.logging.Logger; - -import edu.umd.cs.findbugs.annotations.NonNull; - +import jenkins.model.ArtifactManager; +import jenkins.model.Jenkins; +import jenkins.util.BuildListenerAdapter; import org.apache.commons.lang.StringUtils; import org.jenkinsci.Symbol; import org.jenkinsci.plugins.pipeline.maven.MavenArtifact; @@ -22,18 +31,6 @@ import org.kohsuke.stapler.DataBoundSetter; import org.w3c.dom.Element; -import hudson.Extension; -import hudson.FilePath; -import hudson.Launcher; -import hudson.model.FingerprintMap; -import hudson.model.Run; -import hudson.model.StreamBuildListener; -import hudson.model.TaskListener; -import hudson.tasks.Fingerprinter; -import jenkins.model.ArtifactManager; -import jenkins.model.Jenkins; -import jenkins.util.BuildListenerAdapter; - /** * @author Cyrille Le Clerc */ @@ -47,14 +44,12 @@ public class GeneratedArtifactsPublisher extends MavenPublisher { private boolean fingerprintFilesDisabled = false; - @DataBoundConstructor - public GeneratedArtifactsPublisher() { - - } + public GeneratedArtifactsPublisher() {} @Override - public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsElt) throws IOException, InterruptedException { + public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsElt) + throws IOException, InterruptedException { Run run = context.get(Run.class); ArtifactManager artifactManager = run.pickArtifactManager(); @@ -67,95 +62,118 @@ public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsE FilePath workspace = context.get(FilePath.class); List join = XmlUtils.listGeneratedArtifacts(mavenSpyLogsElt, true); - Map artifactsToArchive = new HashMap<>(); // artifactPathInArchiveZone -> artifactPathInWorkspace + Map artifactsToArchive = + new HashMap<>(); // artifactPathInArchiveZone -> artifactPathInWorkspace Map artifactsToFingerPrint = new HashMap<>(); // artifactPathInArchiveZone -> artifactMd5 for (MavenArtifact mavenArtifact : join) { try { if (StringUtils.isEmpty(mavenArtifact.getFile())) { if (LOGGER.isLoggable(Level.FINER)) { - listener.getLogger().println("[withMaven] artifactsPublisher - Can't archive maven artifact with no file attached: " + mavenArtifact); + listener.getLogger() + .println( + "[withMaven] artifactsPublisher - Can't archive maven artifact with no file attached: " + + mavenArtifact); } continue; } else if (!(mavenArtifact.getFile().endsWith("." + mavenArtifact.getExtension()))) { - FilePath mavenGeneratedArtifact = workspace.child(XmlUtils.getPathInWorkspace(mavenArtifact.getFile(), workspace)); + FilePath mavenGeneratedArtifact = + workspace.child(XmlUtils.getPathInWorkspace(mavenArtifact.getFile(), workspace)); if (mavenGeneratedArtifact.isDirectory()) { if (LOGGER.isLoggable(Level.FINE)) { - listener.getLogger().println("[withMaven] artifactsPublisher - Skip archiving for generated maven artifact of type directory (it's likely to be target/classes, see JENKINS-43714) " + mavenArtifact); + listener.getLogger() + .println( + "[withMaven] artifactsPublisher - Skip archiving for generated maven artifact of type directory (it's likely to be target/classes, see JENKINS-43714) " + + mavenArtifact); } continue; } } String artifactPathInArchiveZone = - mavenArtifact.getGroupId().replace(".", "/") + "/" + - mavenArtifact.getArtifactId() + "/" + - mavenArtifact.getBaseVersion() + "/" + - mavenArtifact.getFileNameWithBaseVersion(); + mavenArtifact.getGroupId().replace(".", "/") + "/" + mavenArtifact.getArtifactId() + + "/" + mavenArtifact.getBaseVersion() + + "/" + mavenArtifact.getFileNameWithBaseVersion(); - String artifactPathInWorkspace = XmlUtils.getPathInWorkspace(mavenArtifact.getFile(), workspace).replace('\\', '/'); + String artifactPathInWorkspace = XmlUtils.getPathInWorkspace(mavenArtifact.getFile(), workspace) + .replace('\\', '/'); if (StringUtils.isEmpty(artifactPathInWorkspace)) { - listener.error("[withMaven] artifactsPublisher - Invalid path in the workspace (" + workspace.getRemote() + ") for artifact " + mavenArtifact); - } else if (Objects.equals(artifactPathInArchiveZone, mavenArtifact.getFile())) { // troubleshoot JENKINS-44088 - listener.error("[withMaven] artifactsPublisher - Failed to relativize '" + mavenArtifact.getFile() + "' in workspace '" + workspace.getRemote() + "'"); + listener.error("[withMaven] artifactsPublisher - Invalid path in the workspace (" + + workspace.getRemote() + ") for artifact " + mavenArtifact); + } else if (Objects.equals( + artifactPathInArchiveZone, mavenArtifact.getFile())) { // troubleshoot JENKINS-44088 + listener.error("[withMaven] artifactsPublisher - Failed to relativize '" + mavenArtifact.getFile() + + "' in workspace '" + workspace.getRemote() + "'"); } else { FilePath artifactFilePath = new FilePath(workspace, artifactPathInWorkspace); if (artifactFilePath.exists()) { - // the subsequent call to digest could test the existence but we don't want to prematurely optimize performances - listener.getLogger().println("[withMaven] artifactsPublisher - Archive artifact " + artifactPathInWorkspace + " under " + artifactPathInArchiveZone); + // the subsequent call to digest could test the existence but we don't want to prematurely + // optimize performances + listener.getLogger() + .println("[withMaven] artifactsPublisher - Archive artifact " + artifactPathInWorkspace + + " under " + artifactPathInArchiveZone); artifactsToArchive.put(artifactPathInArchiveZone, artifactPathInWorkspace); if (!fingerprintFilesDisabled) { - String artifactDigest = artifactFilePath.digest(); - artifactsToFingerPrint.put(artifactPathInArchiveZone, artifactDigest); + String artifactDigest = artifactFilePath.digest(); + artifactsToFingerPrint.put(artifactPathInArchiveZone, artifactDigest); } } else { - listener.getLogger().println("[withMaven] artifactsPublisher - FAILURE to archive " + artifactPathInWorkspace + " under " + artifactPathInArchiveZone + ", file not found in workspace " + workspace); + listener.getLogger() + .println("[withMaven] artifactsPublisher - FAILURE to archive " + + artifactPathInWorkspace + " under " + artifactPathInArchiveZone + + ", file not found in workspace " + workspace); } } } catch (IOException | RuntimeException e) { - listener.error("[withMaven] artifactsPublisher - WARNING: Exception archiving and fingerprinting " + mavenArtifact + ", skip archiving of the artifacts"); + listener.error("[withMaven] artifactsPublisher - WARNING: Exception archiving and fingerprinting " + + mavenArtifact + ", skip archiving of the artifacts"); e.printStackTrace(listener.getLogger()); listener.getLogger().flush(); } } if (LOGGER.isLoggable(Level.FINE)) { - listener.getLogger().println("[withMaven] artifactsPublisher - Archive and fingerprint artifacts " + artifactsToArchive + " located in workspace " + workspace.getRemote()); + listener.getLogger() + .println("[withMaven] artifactsPublisher - Archive and fingerprint artifacts " + artifactsToArchive + + " located in workspace " + workspace.getRemote()); } // ARCHIVE GENERATED MAVEN ARTIFACT // see org.jenkinsci.plugins.workflow.steps.ArtifactArchiverStepExecution#run if (!archiveFilesDisabled) { - try { - artifactManager.archive(workspace, launcher, new BuildListenerAdapter(listener), artifactsToArchive); - } catch (IOException e) { - throw new IOException("Exception archiving " + artifactsToArchive, e); - } catch (RuntimeException e) { - throw new RuntimeException("Exception archiving " + artifactsToArchive, e); - } + try { + artifactManager.archive(workspace, launcher, new BuildListenerAdapter(listener), artifactsToArchive); + } catch (IOException e) { + throw new IOException("Exception archiving " + artifactsToArchive, e); + } catch (RuntimeException e) { + throw new RuntimeException("Exception archiving " + artifactsToArchive, e); + } } // FINGERPRINT GENERATED MAVEN ARTIFACT if (!fingerprintFilesDisabled) { - synchronized (this) { // to avoid exceptions when creating folders under Jenkins home - FingerprintMap fingerprintMap = Jenkins.get().getFingerprintMap(); - for (Map.Entry artifactToFingerprint : artifactsToFingerPrint.entrySet()) { - String artifactPathInArchiveZone = artifactToFingerprint.getKey(); - String artifactMd5 = artifactToFingerprint.getValue(); - fingerprintMap.getOrCreate(run, artifactPathInArchiveZone, artifactMd5).addFor(run); - } - } - - // add action - Fingerprinter.FingerprintAction fingerprintAction = run.getAction(Fingerprinter.FingerprintAction.class); - if (fingerprintAction == null) { - run.addAction(new Fingerprinter.FingerprintAction(run, artifactsToFingerPrint)); - } else { - fingerprintAction.add(artifactsToFingerPrint); - } + synchronized (this) { // to avoid exceptions when creating folders under Jenkins home + FingerprintMap fingerprintMap = Jenkins.get().getFingerprintMap(); + for (Map.Entry artifactToFingerprint : artifactsToFingerPrint.entrySet()) { + String artifactPathInArchiveZone = artifactToFingerprint.getKey(); + String artifactMd5 = artifactToFingerprint.getValue(); + fingerprintMap + .getOrCreate(run, artifactPathInArchiveZone, artifactMd5) + .addFor(run); + } + } + + // add action + Fingerprinter.FingerprintAction fingerprintAction = run.getAction(Fingerprinter.FingerprintAction.class); + if (fingerprintAction == null) { + run.addAction(new Fingerprinter.FingerprintAction(run, artifactsToFingerPrint)); + } else { + fingerprintAction.add(artifactsToFingerPrint); + } } } @Symbol("artifactsPublisher") - @Extension public static class DescriptorImpl extends MavenPublisher.DescriptorImpl { + @Extension + public static class DescriptorImpl extends MavenPublisher.DescriptorImpl { @NonNull @Override public String getDisplayName() { @@ -167,7 +185,6 @@ public int ordinal() { return 1; } - @NonNull @Override public String getSkipFileName() { diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/InvokerRunsPublisher.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/InvokerRunsPublisher.java index d785a645..d1563762 100644 --- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/InvokerRunsPublisher.java +++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/InvokerRunsPublisher.java @@ -24,6 +24,8 @@ package org.jenkinsci.plugins.pipeline.maven.publishers; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; import hudson.Extension; import hudson.FilePath; import hudson.Launcher; @@ -31,7 +33,12 @@ import hudson.model.StreamBuildListener; import hudson.model.TaskListener; import hudson.tasks.junit.JUnitResultArchiver; - +import java.io.IOException; +import java.io.OutputStream; +import java.util.ArrayList; +import java.util.List; +import java.util.logging.Level; +import java.util.logging.Logger; import org.jenkinsci.Symbol; import org.jenkinsci.plugins.maveninvoker.MavenInvokerRecorder; import org.jenkinsci.plugins.pipeline.maven.MavenArtifact; @@ -45,15 +52,6 @@ import org.kohsuke.stapler.DataBoundConstructor; import org.w3c.dom.Element; -import edu.umd.cs.findbugs.annotations.NonNull; -import edu.umd.cs.findbugs.annotations.Nullable; -import java.io.IOException; -import java.io.OutputStream; -import java.util.ArrayList; -import java.util.List; -import java.util.logging.Level; -import java.util.logging.Logger; - public class InvokerRunsPublisher extends MavenPublisher { private static final Logger LOGGER = Logger.getLogger(InvokerRunsPublisher.class.getName()); protected static final String GROUP_ID = "org.apache.maven.plugins"; @@ -64,24 +62,23 @@ public class InvokerRunsPublisher extends MavenPublisher { private static final long serialVersionUID = 1L; @DataBoundConstructor - public InvokerRunsPublisher() { - - } + public InvokerRunsPublisher() {} /* - - - - - - ${invoker.projectsDirectory} - /var/lib/jenkins/workspace/ncjira-maven-plugin-pipeline/target/it - ${invoker.reportsDirectory} - - - */ + + + + + + ${invoker.projectsDirectory} + /var/lib/jenkins/workspace/ncjira-maven-plugin-pipeline/target/it + ${invoker.reportsDirectory} + + + */ @Override - public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsElt) throws IOException, InterruptedException { + public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsElt) + throws IOException, InterruptedException { TaskListener listener = context.get(TaskListener.class); if (listener == null) { @@ -90,14 +87,17 @@ public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsE } List events = new ArrayList<>(); - events.addAll(XmlUtils.getExecutionEventsByPlugin(mavenSpyLogsElt, GROUP_ID, ARTIFACT_ID, RUN_GOAL, "MojoSucceeded", "MojoFailed")); - events.addAll(XmlUtils.getExecutionEventsByPlugin(mavenSpyLogsElt, GROUP_ID, ARTIFACT_ID, INTEGRATION_TEST_GOAL, "MojoSucceeded", "MojoFailed")); + events.addAll(XmlUtils.getExecutionEventsByPlugin( + mavenSpyLogsElt, GROUP_ID, ARTIFACT_ID, RUN_GOAL, "MojoSucceeded", "MojoFailed")); + events.addAll(XmlUtils.getExecutionEventsByPlugin( + mavenSpyLogsElt, GROUP_ID, ARTIFACT_ID, INTEGRATION_TEST_GOAL, "MojoSucceeded", "MojoFailed")); if (events.isEmpty()) { if (LOGGER.isLoggable(Level.FINE)) { - listener.getLogger().println("[withMaven] invokerPublisher - " + - "No " + GROUP_ID + ":" + ARTIFACT_ID + ":" + RUN_GOAL + - " or " + GROUP_ID + ":" + ARTIFACT_ID + ":" + INTEGRATION_TEST_GOAL + " execution found"); + listener.getLogger() + .println("[withMaven] invokerPublisher - " + "No " + + GROUP_ID + ":" + ARTIFACT_ID + ":" + RUN_GOAL + " or " + + GROUP_ID + ":" + ARTIFACT_ID + ":" + INTEGRATION_TEST_GOAL + " execution found"); } return; } @@ -107,7 +107,11 @@ public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsE events.forEach(e -> { Element pluginElt = XmlUtils.getUniqueChildElement(e, "plugin"); Element writeJunitReportElt = XmlUtils.getUniqueChildElementOrNull(pluginElt, "writeJunitReport"); - if (writeJunitReportElt != null && writeJunitReportElt.getTextContent() != null && "true".equalsIgnoreCase(writeJunitReportElt.getTextContent().trim())) { + if (writeJunitReportElt != null + && writeJunitReportElt.getTextContent() != null + && "true" + .equalsIgnoreCase( + writeJunitReportElt.getTextContent().trim())) { junitLikeEvents.add(e); } else { oldEvents.add(e); @@ -118,7 +122,8 @@ public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsE manageOldEvents(context, listener, oldEvents); } - private void manageEvents(StepContext context, TaskListener listener, List testEvents) throws IOException, InterruptedException { + private void manageEvents(StepContext context, TaskListener listener, List testEvents) + throws IOException, InterruptedException { if (testEvents.isEmpty()) { return; } @@ -128,8 +133,10 @@ private void manageEvents(StepContext context, TaskListener listener, List element found for in " + XmlUtils.toString(testEvent)); + listener.getLogger() + .println("[withMaven] invokerPublisher - No element found for in " + + XmlUtils.toString(testEvent)); continue; } - String reportsDirectory = expandAndRelativize(reportsDirectoryElt, "reportsDirectory", testEvent, projectElt, workspace, listener); + String reportsDirectory = expandAndRelativize( + reportsDirectoryElt, "reportsDirectory", testEvent, projectElt, workspace, listener); String testResults = reportsDirectory + fileSeparatorOnAgent + "TEST-*.xml"; - listener.getLogger().println("[withMaven] invokerPublisher - Archive test results for Maven artifact " + mavenArtifact.getId() + " generated by " + - pluginInvocation.getId() + ": " + testResults); + listener.getLogger() + .println("[withMaven] invokerPublisher - Archive test results for Maven artifact " + + mavenArtifact.getId() + " generated by " + pluginInvocation.getId() + ": " + testResults); if (testResultsList.contains(testResults)) { if (LOGGER.isLoggable(Level.FINER)) { - listener.getLogger().println("[withMaven] invokerPublisher - Ignore already added testResults " + testResults); + listener.getLogger() + .println("[withMaven] invokerPublisher - Ignore already added testResults " + testResults); } } else { testResultsList.add(testResults); @@ -167,13 +179,16 @@ private void manageEvents(StepContext context, TaskListener listener, List testEvents) throws IOException, InterruptedException { + private void manageOldEvents(StepContext context, TaskListener listener, List testEvents) + throws IOException, InterruptedException { if (testEvents.isEmpty()) { return; } listener.getLogger().print("[withMaven] invokerPublisher - Found invoker tests without the "); - listener.hyperlink("https://maven.apache.org/plugins/maven-invoker-plugin/run-mojo.html#writeJunitReport", "writeJunitReport parameter"); + listener.hyperlink( + "https://maven.apache.org/plugins/maven-invoker-plugin/run-mojo.html#writeJunitReport", + "writeJunitReport parameter"); listener.getLogger().print(" set. According to the "); listener.hyperlink("https://github.com/jenkinsci/maven-invoker-plugin", "Maven Invoker Plugin"); listener.getLogger().println(" plugin, this is now the way to go."); @@ -183,14 +198,17 @@ private void manageOldEvents(StepContext context, TaskListener listener, List testEvents) throws IOException, InterruptedException { + private void executeReporterForOldEvents(StepContext context, TaskListener listener, List testEvents) + throws IOException, InterruptedException { FilePath workspace = context.get(FilePath.class); final String fileSeparatorOnAgent = XmlUtils.getFileSeparatorOnRemote(workspace); Run run = context.get(Run.class); @@ -206,34 +224,55 @@ private void executeReporterForOldEvents(StepContext context, TaskListener liste MavenArtifact mavenArtifact = XmlUtils.newMavenArtifact(projectElt); MavenSpyLogProcessor.PluginInvocation pluginInvocation = XmlUtils.newPluginInvocation(pluginElt); - String reportsDirectory = expandAndRelativize(reportsDirectoryElt, "reportsDirectory", testEvent, projectElt, workspace, listener); - String projectsDirectory = expandAndRelativize(projectsDirectoryElt, "projectsDirectory", testEvent, projectElt, workspace, listener); - String cloneProjectsTo = expandAndRelativize(cloneProjectsToElt, "cloneProjectsTo", testEvent, projectElt, workspace, listener); - if (reportsDirectory == null || projectsDirectory == null ) { - listener.getLogger().println("[withMaven] invokerPublisher - Missing or element for in " + XmlUtils.toString(testEvent)); + String reportsDirectory = expandAndRelativize( + reportsDirectoryElt, "reportsDirectory", testEvent, projectElt, workspace, listener); + String projectsDirectory = expandAndRelativize( + projectsDirectoryElt, "projectsDirectory", testEvent, projectElt, workspace, listener); + String cloneProjectsTo = expandAndRelativize( + cloneProjectsToElt, "cloneProjectsTo", testEvent, projectElt, workspace, listener); + if (reportsDirectory == null || projectsDirectory == null) { + listener.getLogger() + .println( + "[withMaven] invokerPublisher - Missing or element for in " + + XmlUtils.toString(testEvent)); continue; } String testResults = reportsDirectory + fileSeparatorOnAgent + "*.xml"; - listener.getLogger().println("[withMaven] invokerPublisher - Archive invoker results for Maven artifact " + mavenArtifact.getId() + " generated by " + - pluginInvocation.getId() + ": " + testResults); - MavenInvokerRecorder archiver = new MavenInvokerRecorder("**/" + reportsDirectory + "/BUILD*.xml", "**/" + (cloneProjectsTo != null ? cloneProjectsTo : projectsDirectory)); + listener.getLogger() + .println("[withMaven] invokerPublisher - Archive invoker results for Maven artifact " + + mavenArtifact.getId() + " generated by " + pluginInvocation.getId() + ": " + testResults); + MavenInvokerRecorder archiver = new MavenInvokerRecorder( + "**/" + reportsDirectory + "/BUILD*.xml", + "**/" + (cloneProjectsTo != null ? cloneProjectsTo : projectsDirectory)); try { archiver.perform(run, workspace, launcher, listener, node); } catch (Exception e) { - listener.error("[withMaven] invokerPublisher - exception archiving Invoker runs for Maven artifact " + mavenArtifact.getId() + " generated by " + pluginInvocation.getId() + ": " + e); + listener.error("[withMaven] invokerPublisher - exception archiving Invoker runs for Maven artifact " + + mavenArtifact.getId() + " generated by " + pluginInvocation.getId() + ": " + e); LOGGER.log(Level.WARNING, "Exception processing " + XmlUtils.toString(testEvent), e); - throw new MavenPipelinePublisherException("invokerPublisher", - "archiving Invoker runs for Maven artifact " + mavenArtifact.getId() + " generated by " + pluginInvocation.getId(), e); + throw new MavenPipelinePublisherException( + "invokerPublisher", + "archiving Invoker runs for Maven artifact " + mavenArtifact.getId() + " generated by " + + pluginInvocation.getId(), + e); } } } @Nullable - protected String expandAndRelativize(@Nullable Element element, @Nullable String name, Element testEvent, Element projectElt, FilePath workspace, TaskListener listener) { + protected String expandAndRelativize( + @Nullable Element element, + @Nullable String name, + Element testEvent, + Element projectElt, + FilePath workspace, + TaskListener listener) { if (element == null) { - listener.getLogger().println("[withMaven] invokerPublisher - No <" + name + "> element found for in " + XmlUtils.toString(testEvent)); + listener.getLogger() + .println("[withMaven] invokerPublisher - No <" + name + "> element found for in " + + XmlUtils.toString(testEvent)); return null; } @@ -248,7 +287,9 @@ protected String expandAndRelativize(@Nullable Element element, @Nullable String if (result.contains("${project.build.directory}")) { String projectBuildDirectory = XmlUtils.getProjectBuildDirectory(projectElt); if (projectBuildDirectory == null || projectBuildDirectory.isEmpty()) { - listener.getLogger().println("[withMaven] invokerPublisher - '${project.build.directory}' found for in " + XmlUtils.toString(testEvent)); + listener.getLogger() + .println("[withMaven] invokerPublisher - '${project.build.directory}' found for in " + + XmlUtils.toString(testEvent)); return null; } @@ -257,7 +298,9 @@ protected String expandAndRelativize(@Nullable Element element, @Nullable String } else if (result.contains("${basedir}")) { String baseDir = projectElt.getAttribute("baseDir"); if (baseDir.isEmpty()) { - listener.getLogger().println("[withMaven] invokerPublisher - '${basedir}' NOT found for in " + XmlUtils.toString(testEvent)); + listener.getLogger() + .println("[withMaven] invokerPublisher - '${basedir}' NOT found for in " + + XmlUtils.toString(testEvent)); return null; } @@ -266,7 +309,9 @@ protected String expandAndRelativize(@Nullable Element element, @Nullable String char separator = FileUtils.isWindows(result) ? '\\' : '/'; String baseDir = projectElt.getAttribute("baseDir"); if (baseDir.isEmpty()) { - listener.getLogger().println("[withMaven] invokerPublisher - '${basedir}' NOT found for in " + XmlUtils.toString(testEvent)); + listener.getLogger() + .println("[withMaven] invokerPublisher - '${basedir}' NOT found for in " + + XmlUtils.toString(testEvent)); return null; } result = baseDir + separator + result; @@ -292,7 +337,6 @@ public int ordinal() { return 10; } - @NonNull @Override public String getSkipFileName() { diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/JGivenTestsPublisher.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/JGivenTestsPublisher.java index 37db5cf2..b897ff16 100644 --- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/JGivenTestsPublisher.java +++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/JGivenTestsPublisher.java @@ -20,15 +20,19 @@ import static org.jenkinsci.plugins.pipeline.maven.publishers.DependenciesLister.listDependencies; +import edu.umd.cs.findbugs.annotations.NonNull; +import hudson.Extension; +import hudson.FilePath; +import hudson.Launcher; +import hudson.model.Run; +import hudson.model.StreamBuildListener; +import hudson.model.TaskListener; import java.io.IOException; import java.io.OutputStream; import java.util.ArrayList; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; - -import edu.umd.cs.findbugs.annotations.NonNull; - import org.jenkinsci.Symbol; import org.jenkinsci.plugins.jgiven.JgivenReportGenerator; import org.jenkinsci.plugins.pipeline.maven.MavenDependency; @@ -38,13 +42,6 @@ import org.kohsuke.stapler.DataBoundConstructor; import org.w3c.dom.Element; -import hudson.Extension; -import hudson.FilePath; -import hudson.Launcher; -import hudson.model.Run; -import hudson.model.StreamBuildListener; -import hudson.model.TaskListener; - /** * @author Cyrille Le Clerc */ @@ -57,9 +54,7 @@ public class JGivenTestsPublisher extends MavenPublisher { private static final long serialVersionUID = 1L; @DataBoundConstructor - public JGivenTestsPublisher() { - - } + public JGivenTestsPublisher() {} @Override public void process(@NonNull final StepContext context, @NonNull final Element mavenSpyLogsElt) @@ -94,8 +89,9 @@ public void process(@NonNull final StepContext context, @NonNull final Element m } if (!foundJGivenDependency) { if (LOGGER.isLoggable(Level.FINE)) { - listener.getLogger().println( - "[withMaven] jgivenPublisher - JGiven not found within your project dependencies, aborting."); + listener.getLogger() + .println( + "[withMaven] jgivenPublisher - JGiven not found within your project dependencies, aborting."); } return; } @@ -104,8 +100,9 @@ public void process(@NonNull final StepContext context, @NonNull final Element m final FilePath[] paths = workspace.list(pattern); if (paths == null || paths.length == 0) { if (LOGGER.isLoggable(Level.FINE)) { - listener.getLogger().println("[withMaven] jgivenPublisher - Pattern \"" + pattern - + "\" does not match any file on workspace, aborting."); + listener.getLogger() + .println("[withMaven] jgivenPublisher - Pattern \"" + pattern + + "\" does not match any file on workspace, aborting."); } return; } @@ -116,8 +113,7 @@ public void process(@NonNull final StepContext context, @NonNull final Element m listener.getLogger().println("[withMaven] jgivenPublisher - Running JGiven report generator"); generator.perform(run, workspace, launcher, listener); } catch (final Exception e) { - listener.error( - "[withMaven] jgivenPublisher - exception archiving JGiven reports: " + e); + listener.error("[withMaven] jgivenPublisher - exception archiving JGiven reports: " + e); LOGGER.log(Level.WARNING, "Exception processing JGiven reports archiving", e); throw new MavenPipelinePublisherException("jgivenPublisher", "archiving JGiven reports", e); } diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/JUnitUtils.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/JUnitUtils.java index 4e383210..c296ec28 100644 --- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/JUnitUtils.java +++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/JUnitUtils.java @@ -24,15 +24,6 @@ package org.jenkinsci.plugins.pipeline.maven.publishers; -import java.io.IOException; -import java.util.List; -import java.util.logging.Level; -import java.util.logging.Logger; - -import org.jenkinsci.plugins.workflow.actions.WarningAction; -import org.jenkinsci.plugins.workflow.graph.FlowNode; -import org.jenkinsci.plugins.workflow.steps.StepContext; - import hudson.FilePath; import hudson.Launcher; import hudson.model.Result; @@ -42,12 +33,20 @@ import hudson.tasks.junit.TestResultSummary; import hudson.tasks.junit.pipeline.JUnitResultsStepExecution; import hudson.tasks.test.PipelineTestDetails; +import java.io.IOException; +import java.util.List; +import java.util.logging.Level; +import java.util.logging.Logger; +import org.jenkinsci.plugins.workflow.actions.WarningAction; +import org.jenkinsci.plugins.workflow.graph.FlowNode; +import org.jenkinsci.plugins.workflow.steps.StepContext; public class JUnitUtils { private static final Logger LOGGER = Logger.getLogger(JUnitUtils.class.getName()); - static JUnitResultArchiver buildArchiver(final String testResults, final boolean keepLongStdio, final Double healthScaleFactor) { + static JUnitResultArchiver buildArchiver( + final String testResults, final boolean keepLongStdio, final Double healthScaleFactor) { final JUnitResultArchiver archiver = new JUnitResultArchiver(testResults); // even if "org.apache.maven.plugins:maven-surefire-plugin@test" succeeds, it // maybe with "-DskipTests" and thus not have any test results. @@ -59,7 +58,11 @@ static JUnitResultArchiver buildArchiver(final String testResults, final boolean return archiver; } - static void archiveResults(final StepContext context, final JUnitResultArchiver archiver, final String testResults, final String publisherName) + static void archiveResults( + final StepContext context, + final JUnitResultArchiver archiver, + final String testResults, + final String publisherName) throws IOException, InterruptedException { TaskListener listener = context.get(TaskListener.class); FilePath workspace = context.get(FilePath.class); @@ -73,18 +76,22 @@ static void archiveResults(final StepContext context, final JUnitResultArchiver PipelineTestDetails pipelineTestDetails = new PipelineTestDetails(); pipelineTestDetails.setNodeId(nodeId); pipelineTestDetails.setEnclosingBlocks(JUnitResultsStepExecution.getEnclosingBlockIds(enclosingBlocks)); - pipelineTestDetails.setEnclosingBlockNames(JUnitResultsStepExecution.getEnclosingBlockNames(enclosingBlocks)); + pipelineTestDetails.setEnclosingBlockNames( + JUnitResultsStepExecution.getEnclosingBlockNames(enclosingBlocks)); if (LOGGER.isLoggable(Level.FINER)) { - listener.getLogger().println("[withMaven] " + publisherName + " - collect test reports: testResults=" + archiver.getTestResults() - + ", healthScaleFactor=" + archiver.getHealthScaleFactor()); + listener.getLogger() + .println("[withMaven] " + publisherName + " - collect test reports: testResults=" + + archiver.getTestResults() + ", healthScaleFactor=" + archiver.getHealthScaleFactor()); } - TestResultSummary testResultSummary = JUnitResultArchiver.parseAndSummarize(archiver, pipelineTestDetails, run, workspace, launcher, listener); + TestResultSummary testResultSummary = JUnitResultArchiver.parseAndSummarize( + archiver, pipelineTestDetails, run, workspace, launcher, listener); if (testResultSummary == null) { // no unit test results found if (LOGGER.isLoggable(Level.FINE)) { - listener.getLogger().println("[withMaven] " + publisherName + " - no unit test results found, ignore"); + listener.getLogger() + .println("[withMaven] " + publisherName + " - no unit test results found, ignore"); } } else if (testResultSummary.getFailCount() == 0) { // unit tests are all successful @@ -93,14 +100,17 @@ static void archiveResults(final StepContext context, final JUnitResultArchiver } } else { if (LOGGER.isLoggable(Level.FINE)) { - listener.getLogger().println( - "[withMaven] " + publisherName + " - " + testResultSummary.getFailCount() + " unit test failure(s) found, mark job as unstable"); + listener.getLogger() + .println("[withMaven] " + publisherName + " - " + testResultSummary.getFailCount() + + " unit test failure(s) found, mark job as unstable"); } - node.addAction(new WarningAction(Result.UNSTABLE).withMessage(testResultSummary.getFailCount() + " unit test failure(s) found")); + node.addAction(new WarningAction(Result.UNSTABLE) + .withMessage(testResultSummary.getFailCount() + " unit test failure(s) found")); run.setResult(Result.UNSTABLE); } } catch (RuntimeException e) { - listener.error("[withMaven] " + publisherName + " - exception archiving JUnit results " + testResults + ": " + e); + listener.error( + "[withMaven] " + publisherName + " - exception archiving JUnit results " + testResults + ": " + e); LOGGER.log(Level.WARNING, "Exception processing " + testResults, e); throw new MavenPipelinePublisherException(publisherName, "archiving JUnit results " + testResults, e); } diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/JacocoReportPublisher.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/JacocoReportPublisher.java index da6e81b0..1c0d24a6 100644 --- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/JacocoReportPublisher.java +++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/JacocoReportPublisher.java @@ -24,12 +24,19 @@ package org.jenkinsci.plugins.pipeline.maven.publishers; +import edu.umd.cs.findbugs.annotations.NonNull; import hudson.Extension; import hudson.FilePath; import hudson.Launcher; import hudson.model.Run; import hudson.model.TaskListener; import hudson.plugins.jacoco.JacocoPublisher; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.logging.Level; +import java.util.logging.Logger; +import java.util.stream.Collectors; import org.jenkinsci.Symbol; import org.jenkinsci.plugins.pipeline.maven.MavenArtifact; import org.jenkinsci.plugins.pipeline.maven.MavenPublisher; @@ -40,14 +47,6 @@ import org.kohsuke.stapler.DataBoundConstructor; import org.w3c.dom.Element; -import edu.umd.cs.findbugs.annotations.NonNull; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.logging.Level; -import java.util.logging.Logger; -import java.util.stream.Collectors; - /** * @author Cyrille Le Clerc */ @@ -57,9 +56,7 @@ public class JacocoReportPublisher extends MavenPublisher { private static final long serialVersionUID = 1L; @DataBoundConstructor - public JacocoReportPublisher() { - - } + public JacocoReportPublisher() {} /* * @@ -118,15 +115,23 @@ public JacocoReportPublisher() { * @throws InterruptedException */ @Override - public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsElt) throws IOException, InterruptedException { + public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsElt) + throws IOException, InterruptedException { TaskListener listener = context.get(TaskListener.class); FilePath workspace = context.get(FilePath.class); Run run = context.get(Run.class); Launcher launcher = context.get(Launcher.class); - List jacocoPrepareAgentEvents = XmlUtils.getExecutionEventsByPlugin(mavenSpyLogsElt, "org.jacoco", "jacoco-maven-plugin", "prepare-agent", "MojoSucceeded", "MojoFailed"); - List jacocoPrepareAgentIntegrationEvents = XmlUtils.getExecutionEventsByPlugin(mavenSpyLogsElt, "org.jacoco", "jacoco-maven-plugin", "prepare-agent-integration", "MojoSucceeded", "MojoFailed"); + List jacocoPrepareAgentEvents = XmlUtils.getExecutionEventsByPlugin( + mavenSpyLogsElt, "org.jacoco", "jacoco-maven-plugin", "prepare-agent", "MojoSucceeded", "MojoFailed"); + List jacocoPrepareAgentIntegrationEvents = XmlUtils.getExecutionEventsByPlugin( + mavenSpyLogsElt, + "org.jacoco", + "jacoco-maven-plugin", + "prepare-agent-integration", + "MojoSucceeded", + "MojoFailed"); jacocoPrepareAgentEvents.addAll(jacocoPrepareAgentIntegrationEvents); // add prepare-agent-integration goals if (jacocoPrepareAgentEvents.isEmpty()) { @@ -139,7 +144,9 @@ public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsE } catch (ClassNotFoundException e) { listener.getLogger().print("[withMaven] Jenkins "); listener.hyperlink("https://wiki.jenkins.io/display/JENKINS/JaCoCo+Plugin", "JaCoCo Plugin"); - listener.getLogger().println(" not found, don't display org.jacoco:jacoco-maven-plugin:prepare-agent[-integration] results in pipeline screen."); + listener.getLogger() + .println( + " not found, don't display org.jacoco:jacoco-maven-plugin:prepare-agent[-integration] results in pipeline screen."); return; } @@ -150,7 +157,10 @@ public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsE Element buildElement = XmlUtils.getUniqueChildElementOrNull(jacocoPrepareAgentEvent, "project", "build"); if (buildElement == null) { if (LOGGER.isLoggable(Level.FINE)) - LOGGER.log(Level.FINE, "Ignore execution event with missing 'build' child:" + XmlUtils.toString(jacocoPrepareAgentEvent)); + LOGGER.log( + Level.FINE, + "Ignore execution event with missing 'build' child:" + + XmlUtils.toString(jacocoPrepareAgentEvent)); continue; } @@ -161,16 +171,21 @@ public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsE MavenSpyLogProcessor.PluginInvocation pluginInvocation = XmlUtils.newPluginInvocation(pluginElt); if (destFileElt == null) { - listener.getLogger().println("[withMaven] No element found for in " + XmlUtils.toString(jacocoPrepareAgentEvent)); + listener.getLogger() + .println("[withMaven] No element found for in " + + XmlUtils.toString(jacocoPrepareAgentEvent)); continue; } String destFile = destFileElt.getTextContent().trim(); if (destFile.equals("${jacoco.destFile}")) { destFile = "${project.build.directory}/jacoco.exec"; - if ("prepare-agent-integration".equals(pluginInvocation.goal)) destFile = "${project.build.directory}/jacoco-it.exec"; + if ("prepare-agent-integration".equals(pluginInvocation.goal)) + destFile = "${project.build.directory}/jacoco-it.exec"; String projectBuildDirectory = XmlUtils.getProjectBuildDirectory(projectElt); if (projectBuildDirectory == null || projectBuildDirectory.isEmpty()) { - listener.getLogger().println("[withMaven] '${project.build.directory}' found for in " + XmlUtils.toString(jacocoPrepareAgentEvent)); + listener.getLogger() + .println("[withMaven] '${project.build.directory}' found for in " + + XmlUtils.toString(jacocoPrepareAgentEvent)); continue; } @@ -178,7 +193,9 @@ public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsE } else if (destFile.contains("${project.build.directory}")) { String projectBuildDirectory = XmlUtils.getProjectBuildDirectory(projectElt); if (projectBuildDirectory == null || projectBuildDirectory.isEmpty()) { - listener.getLogger().println("[withMaven] '${project.build.directory}' found for in " + XmlUtils.toString(jacocoPrepareAgentEvent)); + listener.getLogger() + .println("[withMaven] '${project.build.directory}' found for in " + + XmlUtils.toString(jacocoPrepareAgentEvent)); continue; } destFile = destFile.replace("${project.build.directory}", projectBuildDirectory); @@ -186,7 +203,9 @@ public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsE } else if (destFile.contains("${basedir}")) { String baseDir = projectElt.getAttribute("baseDir"); if (baseDir.isEmpty()) { - listener.getLogger().println("[withMaven] '${basedir}' found for in " + XmlUtils.toString(jacocoPrepareAgentEvent)); + listener.getLogger() + .println("[withMaven] '${basedir}' found for in " + + XmlUtils.toString(jacocoPrepareAgentEvent)); continue; } destFile = destFile.replace("${basedir}", baseDir); @@ -200,16 +219,28 @@ public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsE String sourceDirectoryRelativePath = XmlUtils.getPathInWorkspace(sourceDirectory, workspace); String classesDirectoryRelativePath = XmlUtils.getPathInWorkspace(classesDirectory, workspace); - listener.getLogger().println("[withMaven] jacocoPublisher - Archive JaCoCo analysis results for Maven artifact " + mavenArtifact.toString() + " generated by " + - pluginInvocation + ": execFile: " + destFile + ", sources: " + sourceDirectoryRelativePath + ", classes: " + classesDirectoryRelativePath); - jacocoReportDetails.add(new JacocoReportDetails(destFile, sourceDirectoryRelativePath, classesDirectoryRelativePath, mavenArtifact.toString() + " " + pluginInvocation)); + listener.getLogger() + .println("[withMaven] jacocoPublisher - Archive JaCoCo analysis results for Maven artifact " + + mavenArtifact.toString() + " generated by " + pluginInvocation + ": execFile: " + destFile + + ", sources: " + sourceDirectoryRelativePath + ", classes: " + + classesDirectoryRelativePath); + jacocoReportDetails.add(new JacocoReportDetails( + destFile, + sourceDirectoryRelativePath, + classesDirectoryRelativePath, + mavenArtifact.toString() + " " + pluginInvocation)); } JacocoPublisher jacocoPublisher = new JacocoPublisher(); - String aggregatedDestFile = jacocoReportDetails.stream().map(details -> details.execFile).collect(Collectors.joining(",")); - String aggregatedSourceDirectory = jacocoReportDetails.stream().map(details -> details.sourceDirectory).collect(Collectors.joining(",")); - String aggregatedClassesDirectory = jacocoReportDetails.stream().map(details -> details.classesDirectory).collect(Collectors.joining(",")); + String aggregatedDestFile = + jacocoReportDetails.stream().map(details -> details.execFile).collect(Collectors.joining(",")); + String aggregatedSourceDirectory = jacocoReportDetails.stream() + .map(details -> details.sourceDirectory) + .collect(Collectors.joining(",")); + String aggregatedClassesDirectory = jacocoReportDetails.stream() + .map(details -> details.classesDirectory) + .collect(Collectors.joining(",")); jacocoPublisher.setExecPattern(aggregatedDestFile); jacocoPublisher.setSourcePattern(aggregatedSourceDirectory); @@ -218,16 +249,19 @@ public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsE try { jacocoPublisher.perform(run, workspace, launcher, listener); } catch (Exception e) { - listener.error("[withMaven] jacocoPublisher - exception archiving JaCoCo results for " + jacocoReportDetails + ": " + e); + listener.error("[withMaven] jacocoPublisher - exception archiving JaCoCo results for " + jacocoReportDetails + + ": " + e); LOGGER.log(Level.WARNING, "Exception processing JaCoCo results", e); - throw new MavenPipelinePublisherException("jacocoPublisher", "archiving JaCoCo results for " + jacocoReportDetails, e); + throw new MavenPipelinePublisherException( + "jacocoPublisher", "archiving JaCoCo results for " + jacocoReportDetails, e); } } public static class JacocoReportDetails { final String execFile, sourceDirectory, classesDirectory, description; - public JacocoReportDetails(String execFile, String sourceDirectory, String classesDirectory, String description) { + public JacocoReportDetails( + String execFile, String sourceDirectory, String classesDirectory, String description) { this.execFile = execFile; this.sourceDirectory = sourceDirectory; this.classesDirectory = classesDirectory; @@ -236,14 +270,14 @@ public JacocoReportDetails(String execFile, String sourceDirectory, String class @Override public String toString() { - return "JacocoReportDetails{" + - "execFile='" + execFile + '\'' + - ", sourceDirectory='" + sourceDirectory + '\'' + - ", classesDirectory='" + classesDirectory + '\'' + - ", description='" + description + '\'' + - '}'; + return "JacocoReportDetails{" + "execFile='" + + execFile + '\'' + ", sourceDirectory='" + + sourceDirectory + '\'' + ", classesDirectory='" + + classesDirectory + '\'' + ", description='" + + description + '\'' + '}'; } } + @Symbol("jacocoPublisher") @Extension public static class DescriptorImpl extends AbstractHealthAwarePublisher.DescriptorImpl { diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/JenkinsMavenEventSpyLogsPublisher.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/JenkinsMavenEventSpyLogsPublisher.java index 733115af..f4fa4d82 100644 --- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/JenkinsMavenEventSpyLogsPublisher.java +++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/JenkinsMavenEventSpyLogsPublisher.java @@ -1,19 +1,18 @@ package org.jenkinsci.plugins.pipeline.maven.publishers; +import edu.umd.cs.findbugs.annotations.NonNull; import hudson.FilePath; import hudson.Launcher; import hudson.model.Run; import hudson.model.TaskListener; -import jenkins.model.ArtifactManager; -import jenkins.util.BuildListenerAdapter; -import org.jenkinsci.plugins.workflow.steps.StepContext; - -import edu.umd.cs.findbugs.annotations.NonNull; import java.io.IOException; import java.io.PrintWriter; import java.io.Serializable; import java.util.Collections; import java.util.Map; +import jenkins.model.ArtifactManager; +import jenkins.util.BuildListenerAdapter; +import org.jenkinsci.plugins.workflow.steps.StepContext; /** * Primarily for debugging purpose, archive the Maven build logs @@ -24,7 +23,8 @@ public class JenkinsMavenEventSpyLogsPublisher implements Serializable { private static final long serialVersionUID = 1L; - public void process(@NonNull StepContext context, @NonNull FilePath mavenSpyLogs) throws IOException, InterruptedException { + public void process(@NonNull StepContext context, @NonNull FilePath mavenSpyLogs) + throws IOException, InterruptedException { Run run = context.get(Run.class); ArtifactManager artifactManager = run.pickArtifactManager(); @@ -36,12 +36,14 @@ public void process(@NonNull StepContext context, @NonNull FilePath mavenSpyLogs FilePath tmpFile = new FilePath(workspace, "." + mavenSpyLogs.getName()); try { mavenSpyLogs.copyTo(tmpFile); - listener.getLogger().println("[withMaven] Archive " + mavenSpyLogs.getRemote() + " as " + mavenSpyLogs.getName()); + listener.getLogger() + .println("[withMaven] Archive " + mavenSpyLogs.getRemote() + " as " + mavenSpyLogs.getName()); // filePathInArchiveZone -> filePathInWorkspace Map mavenBuildLogs = Collections.singletonMap(mavenSpyLogs.getName(), tmpFile.getName()); artifactManager.archive(workspace, launcher, new BuildListenerAdapter(listener), mavenBuildLogs); } catch (Exception e) { - PrintWriter errorWriter = listener.error("[withMaven] WARNING Exception archiving Maven build logs " + mavenSpyLogs + ", skip file. "); + PrintWriter errorWriter = listener.error( + "[withMaven] WARNING Exception archiving Maven build logs " + mavenSpyLogs + ", skip file. "); e.printStackTrace(errorWriter); } finally { boolean deleted = tmpFile.delete(); diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/JunitTestsPublisher.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/JunitTestsPublisher.java index 566cb0dd..6bb174bb 100644 --- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/JunitTestsPublisher.java +++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/JunitTestsPublisher.java @@ -24,12 +24,24 @@ package org.jenkinsci.plugins.pipeline.maven.publishers; +import edu.umd.cs.findbugs.annotations.CheckForNull; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; import hudson.Extension; import hudson.FilePath; import hudson.model.StreamBuildListener; import hudson.model.TaskListener; import hudson.tasks.junit.JUnitResultArchiver; import hudson.tasks.junit.TestDataPublisher; +import java.io.IOException; +import java.io.OutputStream; +import java.io.PrintWriter; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.logging.Level; +import java.util.logging.Logger; +import java.util.stream.Collectors; import org.jenkinsci.Symbol; import org.jenkinsci.plugins.pipeline.maven.MavenArtifact; import org.jenkinsci.plugins.pipeline.maven.MavenPublisher; @@ -41,19 +53,6 @@ import org.kohsuke.stapler.DataBoundSetter; import org.w3c.dom.Element; -import edu.umd.cs.findbugs.annotations.CheckForNull; -import edu.umd.cs.findbugs.annotations.NonNull; -import edu.umd.cs.findbugs.annotations.Nullable; -import java.io.IOException; -import java.io.OutputStream; -import java.io.PrintWriter; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.logging.Level; -import java.util.logging.Logger; -import java.util.stream.Collectors; - /** * @author Cyrille Le Clerc */ @@ -89,98 +88,96 @@ public class JunitTestsPublisher extends MavenPublisher { @CheckForNull private Double healthScaleFactor; - private boolean ignoreAttachments; @DataBoundConstructor - public JunitTestsPublisher() { - - } + public JunitTestsPublisher() {} /* - - - - - - ${maven.test.additionalClasspath} - ${argLine} - ${basedir} - ${childDelegation} - ${project.build.outputDirectory} - ${maven.test.dependency.excludes} - ${maven.surefire.debug} - ${dependenciesToScan} - ${disableXmlReport} - ${enableAssertions} - ${excludedGroups} - ${surefire.excludesFile} - ${surefire.failIfNoSpecifiedTests} - ${failIfNoTests} - 1C - ${forkMode} - ${surefire.timeout} - ${groups} - ${surefire.includesFile} - ${junitArtifactName} - ${jvm} - ${localRepository} - ${objectFactory} - ${parallel} - ${session.parallel} - ${parallelOptimized} - ${surefire.parallel.forcedTimeout} - ${surefire.parallel.timeout} - ${perCoreThreadCount} - ${plugin.artifactMap} - ${plugin} - ${surefire.printSummary} - ${project.artifactMap} - ${maven.test.redirectTestOutputToFile} - ${project.pluginArtifactRepositories} - ${surefire.reportFormat} - ${surefire.reportNameSuffix} - ${project.build.directory}/surefire-reports - ${surefire.rerunFailingTestsCount} - true - ${surefire.runOrder} - ${surefire.shutdown} - ${maven.test.skip} - ${surefire.skipAfterFailureCount} - ${maven.test.skip.exec} - ${skipTests} - ${surefire.suiteXmlFiles} - - ${test} - ${project.build.testOutputDirectory} - ${maven.test.failure.ignore} - ${testNGArtifactName} - ${project.build.testSourceDirectory} - ${threadCount} - ${threadCountClasses} - ${threadCountMethods} - ${threadCountSuites} - ${trimStackTrace} - ${surefire.useFile} - ${surefire.useManifestOnlyJar} - ${surefire.useSystemClassLoader} - ${useUnlimitedThreads} - ${basedir} - ${project} - ${session} - - - - - - - - ${project.build.directory}/surefire-reports - - - */ + + + + + + ${maven.test.additionalClasspath} + ${argLine} + ${basedir} + ${childDelegation} + ${project.build.outputDirectory} + ${maven.test.dependency.excludes} + ${maven.surefire.debug} + ${dependenciesToScan} + ${disableXmlReport} + ${enableAssertions} + ${excludedGroups} + ${surefire.excludesFile} + ${surefire.failIfNoSpecifiedTests} + ${failIfNoTests} + 1C + ${forkMode} + ${surefire.timeout} + ${groups} + ${surefire.includesFile} + ${junitArtifactName} + ${jvm} + ${localRepository} + ${objectFactory} + ${parallel} + ${session.parallel} + ${parallelOptimized} + ${surefire.parallel.forcedTimeout} + ${surefire.parallel.timeout} + ${perCoreThreadCount} + ${plugin.artifactMap} + ${plugin} + ${surefire.printSummary} + ${project.artifactMap} + ${maven.test.redirectTestOutputToFile} + ${project.pluginArtifactRepositories} + ${surefire.reportFormat} + ${surefire.reportNameSuffix} + ${project.build.directory}/surefire-reports + ${surefire.rerunFailingTestsCount} + true + ${surefire.runOrder} + ${surefire.shutdown} + ${maven.test.skip} + ${surefire.skipAfterFailureCount} + ${maven.test.skip.exec} + ${skipTests} + ${surefire.suiteXmlFiles} + + ${test} + ${project.build.testOutputDirectory} + ${maven.test.failure.ignore} + ${testNGArtifactName} + ${project.build.testSourceDirectory} + ${threadCount} + ${threadCountClasses} + ${threadCountMethods} + ${threadCountSuites} + ${trimStackTrace} + ${surefire.useFile} + ${surefire.useManifestOnlyJar} + ${surefire.useSystemClassLoader} + ${useUnlimitedThreads} + ${basedir} + ${project} + ${session} + + + + + + + + ${project.build.directory}/surefire-reports + + + */ @Override - public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsElt) throws IOException, InterruptedException { + public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsElt) + throws IOException, InterruptedException { TaskListener listener = context.get(TaskListener.class); if (listener == null) { @@ -193,25 +190,65 @@ public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsE } catch (ClassNotFoundException e) { listener.getLogger().print("[withMaven] Jenkins "); listener.hyperlink("http://wiki.jenkins-ci.org/display/JENKINS/JUnit+Plugin", "JUnit Plugin"); - listener.getLogger().print(" not found, don't display " + APACHE_GROUP_ID + ":" + SUREFIRE_ID + ":" + SUREFIRE_GOAL); - listener.getLogger().println(" nor " + APACHE_GROUP_ID + ":" + FAILSAFE_ID + ":" + FAILSAFE_GOAL + " results in pipeline screen."); + listener.getLogger() + .print(" not found, don't display " + APACHE_GROUP_ID + ":" + SUREFIRE_ID + ":" + SUREFIRE_GOAL); + listener.getLogger() + .println(" nor " + APACHE_GROUP_ID + ":" + FAILSAFE_ID + ":" + FAILSAFE_GOAL + + " results in pipeline screen."); return; } - List sureFireTestEvents = XmlUtils.getExecutionEventsByPlugin(mavenSpyLogsElt, APACHE_GROUP_ID, SUREFIRE_ID, SUREFIRE_GOAL, "MojoSucceeded", "MojoFailed"); - List failSafeTestEvents = XmlUtils.getExecutionEventsByPlugin(mavenSpyLogsElt, APACHE_GROUP_ID, FAILSAFE_ID, FAILSAFE_GOAL, "MojoSucceeded", "MojoFailed"); - List tychoTestEvents = XmlUtils.getExecutionEventsByPlugin(mavenSpyLogsElt, TYCHO_GROUP_ID, TYCHO_ID, TYCHO_GOAL, "MojoSucceeded", "MojoFailed"); - List karmaTestEvents = XmlUtils.getExecutionEventsByPlugin(mavenSpyLogsElt, KARMA_GROUP_ID, KARMA_ID, KARMA_GOAL, "MojoSucceeded", "MojoFailed"); - List frontendTestEvents = XmlUtils.getExecutionEventsByPlugin(mavenSpyLogsElt, FRONTEND_GROUP_ID, FRONTEND_ID, FRONTEND_GOAL, "MojoSucceeded", "MojoFailed"); - - executeReporter(context, listener, sureFireTestEvents, APACHE_GROUP_ID + ":" + SUREFIRE_ID + ":" + SUREFIRE_GOAL, "reportsDirectory"); - executeReporter(context, listener, failSafeTestEvents, APACHE_GROUP_ID + ":" + FAILSAFE_ID + ":" + FAILSAFE_GOAL, "reportsDirectory"); - executeReporter(context, listener, tychoTestEvents, APACHE_GROUP_ID + ":" + TYCHO_ID + ":" + TYCHO_GOAL, "reportsDirectory"); - executeReporter(context, listener, karmaTestEvents, KARMA_GROUP_ID + ":" + KARMA_ID + ":" + KARMA_GOAL, "reportsDirectory"); - executeReporter(context, listener, frontendTestEvents, FRONTEND_GROUP_ID + ":" + FRONTEND_ID + ":" + FRONTEND_GOAL, "environmentVariables", "REPORTS_DIRECTORY"); + List sureFireTestEvents = XmlUtils.getExecutionEventsByPlugin( + mavenSpyLogsElt, APACHE_GROUP_ID, SUREFIRE_ID, SUREFIRE_GOAL, "MojoSucceeded", "MojoFailed"); + List failSafeTestEvents = XmlUtils.getExecutionEventsByPlugin( + mavenSpyLogsElt, APACHE_GROUP_ID, FAILSAFE_ID, FAILSAFE_GOAL, "MojoSucceeded", "MojoFailed"); + List tychoTestEvents = XmlUtils.getExecutionEventsByPlugin( + mavenSpyLogsElt, TYCHO_GROUP_ID, TYCHO_ID, TYCHO_GOAL, "MojoSucceeded", "MojoFailed"); + List karmaTestEvents = XmlUtils.getExecutionEventsByPlugin( + mavenSpyLogsElt, KARMA_GROUP_ID, KARMA_ID, KARMA_GOAL, "MojoSucceeded", "MojoFailed"); + List frontendTestEvents = XmlUtils.getExecutionEventsByPlugin( + mavenSpyLogsElt, FRONTEND_GROUP_ID, FRONTEND_ID, FRONTEND_GOAL, "MojoSucceeded", "MojoFailed"); + + executeReporter( + context, + listener, + sureFireTestEvents, + APACHE_GROUP_ID + ":" + SUREFIRE_ID + ":" + SUREFIRE_GOAL, + "reportsDirectory"); + executeReporter( + context, + listener, + failSafeTestEvents, + APACHE_GROUP_ID + ":" + FAILSAFE_ID + ":" + FAILSAFE_GOAL, + "reportsDirectory"); + executeReporter( + context, + listener, + tychoTestEvents, + APACHE_GROUP_ID + ":" + TYCHO_ID + ":" + TYCHO_GOAL, + "reportsDirectory"); + executeReporter( + context, + listener, + karmaTestEvents, + KARMA_GROUP_ID + ":" + KARMA_ID + ":" + KARMA_GOAL, + "reportsDirectory"); + executeReporter( + context, + listener, + frontendTestEvents, + FRONTEND_GROUP_ID + ":" + FRONTEND_ID + ":" + FRONTEND_GOAL, + "environmentVariables", + "REPORTS_DIRECTORY"); } - private void executeReporter(StepContext context, TaskListener listener, List testEvents, String goal, String... reportsDirElementNames) throws IOException, InterruptedException { + private void executeReporter( + StepContext context, + TaskListener listener, + List testEvents, + String goal, + String... reportsDirElementNames) + throws IOException, InterruptedException { if (testEvents.isEmpty()) { if (LOGGER.isLoggable(Level.FINE)) { listener.getLogger().println("[withMaven] junitPublisher - No " + goal + " execution found"); @@ -233,14 +270,19 @@ private void executeReporter(StepContext context, TaskListener listener, List element found for in " + XmlUtils.toString(testEvent)); + listener.getLogger() + .println("[withMaven] No <" + + Arrays.stream(reportsDirElementNames).collect(Collectors.joining(".")) + + "> element found for in " + XmlUtils.toString(testEvent)); continue; } String reportsDirectory = reportsDirectoryElt.getTextContent().trim(); if (reportsDirectory.contains("${project.build.directory}")) { String projectBuildDirectory = XmlUtils.getProjectBuildDirectory(projectElt); if (projectBuildDirectory == null || projectBuildDirectory.isEmpty()) { - listener.getLogger().println("[withMaven] '${project.build.directory}' found for in " + XmlUtils.toString(testEvent)); + listener.getLogger() + .println("[withMaven] '${project.build.directory}' found for in " + + XmlUtils.toString(testEvent)); continue; } @@ -249,7 +291,8 @@ private void executeReporter(StepContext context, TaskListener listener, List in " + XmlUtils.toString(testEvent)); + listener.getLogger() + .println("[withMaven] '${basedir}' found for in " + XmlUtils.toString(testEvent)); continue; } @@ -259,11 +302,13 @@ private void executeReporter(StepContext context, TaskListener listener, List testDataPublishers = new ArrayList<>(); @@ -282,34 +328,41 @@ private void executeReporter(StepContext context, TaskListener listener, List getGeneratedArtifacts() { private static @CheckForNull Job getJob(String fullName) { try { return Jenkins.get().getItemByFullName(fullName, Job.class); - } catch (RuntimeException x) { // TODO switch to simple catch (AccessDeniedException) when baseline includes Spring Security + } catch ( + RuntimeException + x) { // TODO switch to simple catch (AccessDeniedException) when baseline includes Spring + // Security if (x.getClass().getSimpleName().startsWith("AccessDeniedException")) { return null; } else { @@ -97,62 +109,75 @@ public synchronized Collection getGeneratedArtifacts() { } public synchronized Collection getDownstreamJobs() { - List downstreamJobFullNames = GlobalPipelineMavenConfig - .get() + List downstreamJobFullNames = GlobalPipelineMavenConfig.get() .getDao() .listDownstreamJobsByArtifact(run.getParent().getFullName(), run.getNumber()) .values() .stream() .flatMap(Set::stream) .collect(Collectors.toList()); - return downstreamJobFullNames.stream().map(jobFullName -> { - if (jobFullName == null) { - return null; - } - return getJob(jobFullName); - }).filter(Objects::nonNull).collect(Collectors.toList()); + return downstreamJobFullNames.stream() + .map(jobFullName -> { + if (jobFullName == null) { + return null; + } + return getJob(jobFullName); + }) + .filter(Objects::nonNull) + .collect(Collectors.toList()); } public synchronized SortedMap> getDownstreamJobsByArtifact() { - Map> downstreamJobsByArtifact = GlobalPipelineMavenConfig.get().getDao().listDownstreamJobsByArtifact(run.getParent().getFullName(), run.getNumber()); + Map> downstreamJobsByArtifact = GlobalPipelineMavenConfig.get() + .getDao() + .listDownstreamJobsByArtifact(run.getParent().getFullName(), run.getNumber()); TreeMap> result = new TreeMap<>(); - for(Map.Entry> entry: downstreamJobsByArtifact.entrySet()) { + for (Map.Entry> entry : downstreamJobsByArtifact.entrySet()) { MavenArtifact mavenArtifact = entry.getKey(); SortedSet downstreamJobFullNames = entry.getValue(); - result.put(mavenArtifact, downstreamJobFullNames.stream().map(jobFullName -> { - if (jobFullName == null) { - return null; - } - return getJob(jobFullName); - }).filter(Objects::nonNull).collect(Collectors.toList())); + result.put( + mavenArtifact, + downstreamJobFullNames.stream() + .map(jobFullName -> { + if (jobFullName == null) { + return null; + } + return getJob(jobFullName); + }) + .filter(Objects::nonNull) + .collect(Collectors.toList())); } return result; } public synchronized Collection getUpstreamBuilds() { - Map upstreamJobs = GlobalPipelineMavenConfig.get().getDao().listUpstreamJobs(run.getParent().getFullName(), run.getNumber()); - return upstreamJobs.entrySet().stream().map(entry -> { - if (entry == null) - return null; - Job job = getJob(entry.getKey()); - if (job == null) - return null; - Run run = job.getBuildByNumber(entry.getValue()); - return run; - }).filter(Objects::nonNull).collect(Collectors.toList()); + Map upstreamJobs = GlobalPipelineMavenConfig.get() + .getDao() + .listUpstreamJobs(run.getParent().getFullName(), run.getNumber()); + return upstreamJobs.entrySet().stream() + .map(entry -> { + if (entry == null) return null; + Job job = getJob(entry.getKey()); + if (job == null) return null; + Run run = job.getBuildByNumber(entry.getValue()); + return run; + }) + .filter(Objects::nonNull) + .collect(Collectors.toList()); } public synchronized Collection getDeployedArtifacts() { - return getGeneratedArtifacts() - .stream() + return getGeneratedArtifacts().stream() .filter(mavenArtifact -> mavenArtifact != null && mavenArtifact.isDeployed()) .collect(Collectors.toList()); } - public synchronized Collection getDependencies(){ - return GlobalPipelineMavenConfig.get().getDao().listDependencies(run.getParent().getFullName(), run.getNumber()); + public synchronized Collection getDependencies() { + return GlobalPipelineMavenConfig.get() + .getDao() + .listDependencies(run.getParent().getFullName(), run.getNumber()); } public synchronized Run getRun() { diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/PipelineGraphPublisher.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/PipelineGraphPublisher.java index 3a8d8474..6bc645bf 100644 --- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/PipelineGraphPublisher.java +++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/PipelineGraphPublisher.java @@ -1,9 +1,19 @@ package org.jenkinsci.plugins.pipeline.maven.publishers; +import static org.jenkinsci.plugins.pipeline.maven.publishers.DependenciesLister.listDependencies; +import static org.jenkinsci.plugins.pipeline.maven.publishers.DependenciesLister.listParentProjects; + +import edu.umd.cs.findbugs.annotations.NonNull; import hudson.Extension; import hudson.model.Run; import hudson.model.TaskListener; import hudson.util.ListBoxModel; +import java.io.IOException; +import java.util.List; +import java.util.Set; +import java.util.TreeSet; +import java.util.logging.Level; +import java.util.logging.Logger; import org.jenkinsci.Symbol; import org.jenkinsci.plugins.pipeline.maven.GlobalPipelineMavenConfig; import org.jenkinsci.plugins.pipeline.maven.MavenArtifact; @@ -17,17 +27,6 @@ import org.kohsuke.stapler.DataBoundSetter; import org.w3c.dom.Element; -import edu.umd.cs.findbugs.annotations.NonNull; -import java.io.IOException; -import java.util.List; -import java.util.Set; -import java.util.TreeSet; -import java.util.logging.Level; -import java.util.logging.Logger; - -import static org.jenkinsci.plugins.pipeline.maven.publishers.DependenciesLister.listDependencies; -import static org.jenkinsci.plugins.pipeline.maven.publishers.DependenciesLister.listParentProjects; - /** * Fingerprint the dependencies of the maven project. * @@ -67,19 +66,16 @@ public PipelineGraphPublisher() { protected Set getIncludedScopes() { Set includedScopes = new TreeSet<>(); - if (includeScopeCompile) - includedScopes.add("compile"); - if (includeScopeRuntime) - includedScopes.add("runtime"); - if (includeScopeProvided) - includedScopes.add("provided"); - if (includeScopeTest) - includedScopes.add("test"); + if (includeScopeCompile) includedScopes.add("compile"); + if (includeScopeRuntime) includedScopes.add("runtime"); + if (includeScopeProvided) includedScopes.add("provided"); + if (includeScopeTest) includedScopes.add("test"); return includedScopes; } @Override - public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsElt) throws IOException, InterruptedException { + public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsElt) + throws IOException, InterruptedException { Run run = context.get(Run.class); TaskListener listener = context.get(TaskListener.class); @@ -90,30 +86,39 @@ public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsE List generatedArtifacts = XmlUtils.listGeneratedArtifacts(mavenSpyLogsElt, true); List executedLifecyclePhases = XmlUtils.getExecutedLifecyclePhases(mavenSpyLogsElt); - recordParentProject(parentProjects, run,listener, dao); + recordParentProject(parentProjects, run, listener, dao); recordDependencies(dependencies, generatedArtifacts, run, listener, dao); recordGeneratedArtifacts(generatedArtifacts, executedLifecyclePhases, run, listener, dao); } - protected void recordParentProject(List parentProjects, - @NonNull Run run, @NonNull TaskListener listener, @NonNull PipelineMavenPluginDao dao) { + protected void recordParentProject( + List parentProjects, + @NonNull Run run, + @NonNull TaskListener listener, + @NonNull PipelineMavenPluginDao dao) { if (LOGGER.isLoggable(Level.FINE)) { - listener.getLogger().println("[withMaven] pipelineGraphPublisher - recordParentProject - filter: " + - "versions[snapshot: " + isIncludeSnapshotVersions() + ", release: " + isIncludeReleaseVersions() + "]"); + listener.getLogger() + .println("[withMaven] pipelineGraphPublisher - recordParentProject - filter: " + + "versions[snapshot: " + isIncludeSnapshotVersions() + ", release: " + + isIncludeReleaseVersions() + "]"); } for (MavenArtifact parentProject : parentProjects) { if (parentProject.isSnapshot()) { if (!includeSnapshotVersions) { if (LOGGER.isLoggable(Level.FINER)) { - listener.getLogger().println("[withMaven] pipelineGraphPublisher - Skip recording snapshot parent project: " + parentProject.getId()); + listener.getLogger() + .println("[withMaven] pipelineGraphPublisher - Skip recording snapshot parent project: " + + parentProject.getId()); } continue; } } else { if (!includeReleaseVersions) { if (LOGGER.isLoggable(Level.FINER)) { - listener.getLogger().println("[withMaven] pipelineGraphPublisher - Skip recording release parent project: " + parentProject.getId()); + listener.getLogger() + .println("[withMaven] pipelineGraphPublisher - Skip recording release parent project: " + + parentProject.getId()); } continue; } @@ -121,64 +126,94 @@ protected void recordParentProject(List parentProjects, try { if (LOGGER.isLoggable(Level.FINE)) { - listener.getLogger().println("[withMaven] pipelineGraphPublisher - Record parent project: " + parentProject.getId() + ", ignoreUpstreamTriggers: " + ignoreUpstreamTriggers); + listener.getLogger() + .println("[withMaven] pipelineGraphPublisher - Record parent project: " + + parentProject.getId() + ", ignoreUpstreamTriggers: " + ignoreUpstreamTriggers); } - dao.recordParentProject(run.getParent().getFullName(), run.getNumber(), - parentProject.getGroupId(), parentProject.getArtifactId(), parentProject.getVersion(), + dao.recordParentProject( + run.getParent().getFullName(), + run.getNumber(), + parentProject.getGroupId(), + parentProject.getArtifactId(), + parentProject.getVersion(), this.ignoreUpstreamTriggers); } catch (RuntimeException e) { - listener.error("[withMaven] pipelineGraphPublisher - WARNING: Exception recording parent project " + parentProject.getId() + " on build, skip"); + listener.error("[withMaven] pipelineGraphPublisher - WARNING: Exception recording parent project " + + parentProject.getId() + " on build, skip"); e.printStackTrace(listener.getLogger()); listener.getLogger().flush(); } } - } - protected void recordDependencies(List dependencies, List generatedArtifacts, - @NonNull Run run, @NonNull TaskListener listener, @NonNull PipelineMavenPluginDao dao) { + protected void recordDependencies( + List dependencies, + List generatedArtifacts, + @NonNull Run run, + @NonNull TaskListener listener, + @NonNull PipelineMavenPluginDao dao) { if (LOGGER.isLoggable(Level.FINE)) { - listener.getLogger().println("[withMaven] pipelineGraphPublisher - recordDependencies - filter: " + - "versions[snapshot: " + isIncludeSnapshotVersions() + ", release: " + isIncludeReleaseVersions() + "], " + - "scopes:" + getIncludedScopes()); + listener.getLogger() + .println( + "[withMaven] pipelineGraphPublisher - recordDependencies - filter: " + "versions[snapshot: " + + isIncludeSnapshotVersions() + ", release: " + isIncludeReleaseVersions() + "], " + + "scopes:" + + getIncludedScopes()); } for (MavenDependency dependency : dependencies) { if (dependency.isSnapshot()) { if (!includeSnapshotVersions) { if (LOGGER.isLoggable(Level.FINER)) { - listener.getLogger().println("[withMaven] pipelineGraphPublisher - Skip recording snapshot dependency: " + dependency.getId()); + listener.getLogger() + .println("[withMaven] pipelineGraphPublisher - Skip recording snapshot dependency: " + + dependency.getId()); } continue; } } else { if (!includeReleaseVersions) { if (LOGGER.isLoggable(Level.FINER)) { - listener.getLogger().println("[withMaven] pipelineGraphPublisher - Skip recording release dependency: " + dependency.getId()); + listener.getLogger() + .println("[withMaven] pipelineGraphPublisher - Skip recording release dependency: " + + dependency.getId()); } continue; } } if (!getIncludedScopes().contains(dependency.getScope())) { if (LOGGER.isLoggable(Level.FINER)) { - listener.getLogger().println("[withMaven] pipelineGraphPublisher - Skip recording dependency with ignored scope: " + dependency.getId()); + listener.getLogger() + .println( + "[withMaven] pipelineGraphPublisher - Skip recording dependency with ignored scope: " + + dependency.getId()); } continue; } try { if (LOGGER.isLoggable(Level.FINE)) { - listener.getLogger().println("[withMaven] pipelineGraphPublisher - Record dependency: " + dependency.getId() + ", ignoreUpstreamTriggers: " + ignoreUpstreamTriggers); + listener.getLogger() + .println("[withMaven] pipelineGraphPublisher - Record dependency: " + dependency.getId() + + ", ignoreUpstreamTriggers: " + ignoreUpstreamTriggers); } - dao.recordDependency(run.getParent().getFullName(), run.getNumber(), - dependency.getGroupId(), dependency.getArtifactId(), dependency.getBaseVersion(), dependency.getType(), dependency.getScope(), - this.ignoreUpstreamTriggers, dependency.getClassifier()); + dao.recordDependency( + run.getParent().getFullName(), + run.getNumber(), + dependency.getGroupId(), + dependency.getArtifactId(), + dependency.getBaseVersion(), + dependency.getType(), + dependency.getScope(), + this.ignoreUpstreamTriggers, + dependency.getClassifier()); } catch (RuntimeException e) { - listener.error("[withMaven] pipelineGraphPublisher - WARNING: Exception recording " + dependency.getId() + " on build, skip"); + listener.error("[withMaven] pipelineGraphPublisher - WARNING: Exception recording " + dependency.getId() + + " on build, skip"); e.printStackTrace(listener.getLogger()); listener.getLogger().flush(); } @@ -192,43 +227,66 @@ protected void recordDependencies(List dependencies, List generatedArtifacts, List executedLifecyclePhases, @NonNull Run run, @NonNull TaskListener listener, @NonNull PipelineMavenPluginDao dao) { + protected void recordGeneratedArtifacts( + List generatedArtifacts, + List executedLifecyclePhases, + @NonNull Run run, + @NonNull TaskListener listener, + @NonNull PipelineMavenPluginDao dao) { if (LOGGER.isLoggable(Level.FINE)) { listener.getLogger().println("[withMaven] pipelineGraphPublisher - recordGeneratedArtifacts..."); } for (MavenArtifact artifact : generatedArtifacts) { - boolean skipDownstreamPipelines = this.skipDownstreamTriggers || - (!executedLifecyclePhases.contains(this.lifecycleThreshold)); + boolean skipDownstreamPipelines = + this.skipDownstreamTriggers || (!executedLifecyclePhases.contains(this.lifecycleThreshold)); if (LOGGER.isLoggable(Level.FINE)) { - LOGGER.log(Level.FINE, "Build {0}#{1} - record generated {2}:{3}, version:{4}, " + - "executedLifecyclePhases: {5}, " + - "skipDownstreamTriggers:{6}, lifecycleThreshold: {7}", - new Object[]{run.getParent().getFullName(), run.getNumber(), - artifact.getId(), artifact.getType(), artifact.getVersion(), - executedLifecyclePhases, - skipDownstreamTriggers, lifecycleThreshold}); - listener.getLogger().println("[withMaven] pipelineGraphPublisher - Record generated artifact: " + artifact.getId() + ", version: " + artifact.getVersion() + - ", executedLifecyclePhases: " + executedLifecyclePhases + - ", skipDownstreamTriggers: " + skipDownstreamTriggers + ", lifecycleThreshold:" + lifecycleThreshold + - ", file: " + artifact.getFile()); + LOGGER.log( + Level.FINE, + "Build {0}#{1} - record generated {2}:{3}, version:{4}, " + "executedLifecyclePhases: {5}, " + + "skipDownstreamTriggers:{6}, lifecycleThreshold: {7}", + new Object[] { + run.getParent().getFullName(), + run.getNumber(), + artifact.getId(), + artifact.getType(), + artifact.getVersion(), + executedLifecyclePhases, + skipDownstreamTriggers, + lifecycleThreshold + }); + listener.getLogger() + .println("[withMaven] pipelineGraphPublisher - Record generated artifact: " + artifact.getId() + + ", version: " + artifact.getVersion() + ", executedLifecyclePhases: " + + executedLifecyclePhases + ", skipDownstreamTriggers: " + + skipDownstreamTriggers + ", lifecycleThreshold:" + lifecycleThreshold + ", file: " + + artifact.getFile()); } - dao.recordGeneratedArtifact(run.getParent().getFullName(), run.getNumber(), - artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion(), artifact.getType(), artifact.getBaseVersion(), - artifact.getRepositoryUrl(), skipDownstreamPipelines, artifact.getExtension(), artifact.getClassifier()); + dao.recordGeneratedArtifact( + run.getParent().getFullName(), + run.getNumber(), + artifact.getGroupId(), + artifact.getArtifactId(), + artifact.getVersion(), + artifact.getType(), + artifact.getBaseVersion(), + artifact.getRepositoryUrl(), + skipDownstreamPipelines, + artifact.getExtension(), + artifact.getClassifier()); } } @Override public String toString() { - return getClass().getName() + "[" + - "disabled=" + isDisabled() + ", " + - "scopes=" + getIncludedScopes() + ", " + - "versions={snapshot:" + isIncludeSnapshotVersions() + ", release:" + isIncludeReleaseVersions() + "}, " + - "skipDownstreamTriggers=" + isSkipDownstreamTriggers() + ", " + - "lifecycleThreshold=" + getLifecycleThreshold() + ", " + - "ignoreUpstreamTriggers=" + isIgnoreUpstreamTriggers() + - ']'; + return getClass().getName() + "[" + "disabled=" + + isDisabled() + ", " + "scopes=" + + getIncludedScopes() + ", " + "versions={snapshot:" + + isIncludeSnapshotVersions() + ", release:" + isIncludeReleaseVersions() + "}, " + + "skipDownstreamTriggers=" + + isSkipDownstreamTriggers() + ", " + "lifecycleThreshold=" + + getLifecycleThreshold() + ", " + "ignoreUpstreamTriggers=" + + isIgnoreUpstreamTriggers() + ']'; } public boolean isIncludeSnapshotVersions() { diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/SpotBugsAnalysisPublisher.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/SpotBugsAnalysisPublisher.java index 301b9b4f..7be61beb 100644 --- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/SpotBugsAnalysisPublisher.java +++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/SpotBugsAnalysisPublisher.java @@ -24,12 +24,17 @@ package org.jenkinsci.plugins.pipeline.maven.publishers; +import edu.umd.cs.findbugs.annotations.NonNull; import hudson.Extension; import hudson.FilePath; import hudson.Launcher; import hudson.model.Run; import hudson.model.TaskListener; import hudson.plugins.findbugs.FindBugsPublisher; +import java.io.IOException; +import java.util.List; +import java.util.logging.Level; +import java.util.logging.Logger; import org.jenkinsci.Symbol; import org.jenkinsci.plugins.pipeline.maven.MavenArtifact; import org.jenkinsci.plugins.pipeline.maven.MavenSpyLogProcessor; @@ -39,12 +44,6 @@ import org.kohsuke.stapler.DataBoundConstructor; import org.w3c.dom.Element; -import edu.umd.cs.findbugs.annotations.NonNull; -import java.io.IOException; -import java.util.List; -import java.util.logging.Level; -import java.util.logging.Logger; - /** * handle {@code mvn spotbugs:spotbugs} invocations. * @@ -56,9 +55,7 @@ public class SpotBugsAnalysisPublisher extends AbstractHealthAwarePublisher { private static final long serialVersionUID = 1L; @DataBoundConstructor - public SpotBugsAnalysisPublisher() { - - } + public SpotBugsAnalysisPublisher() {} /* @@ -163,14 +160,21 @@ public SpotBugsAnalysisPublisher() { */ @Override - public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsElt) throws IOException, InterruptedException { + public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsElt) + throws IOException, InterruptedException { TaskListener listener = context.get(TaskListener.class); FilePath workspace = context.get(FilePath.class); Run run = context.get(Run.class); Launcher launcher = context.get(Launcher.class); - List spotbugsEvents = XmlUtils.getExecutionEventsByPlugin(mavenSpyLogsElt, "com.github.spotbugs", "spotbugs-maven-plugin", "spotbugs", "MojoSucceeded", "MojoFailed"); + List spotbugsEvents = XmlUtils.getExecutionEventsByPlugin( + mavenSpyLogsElt, + "com.github.spotbugs", + "spotbugs-maven-plugin", + "spotbugs", + "MojoSucceeded", + "MojoFailed"); if (spotbugsEvents.isEmpty()) { LOGGER.log(Level.FINE, "No com.github.spotbugs:spotbugs-maven-plugin:spotbugs execution found"); @@ -181,7 +185,9 @@ public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsE } catch (ClassNotFoundException e) { listener.getLogger().print("[withMaven] Jenkins "); listener.hyperlink("https://wiki.jenkins-ci.org/display/JENKINS/FindBugs+Plugin", "FindBugs Plugin"); - listener.getLogger().println(" not found, don't display com.github.spotbugs:spotbugs-maven-plugin:spotbugs results in pipeline screen."); + listener.getLogger() + .println( + " not found, don't display com.github.spotbugs:spotbugs-maven-plugin:spotbugs results in pipeline screen."); return; } @@ -198,14 +204,18 @@ public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsE MavenSpyLogProcessor.PluginInvocation pluginInvocation = XmlUtils.newPluginInvocation(pluginElt); if (xmlOutputDirectoryElt == null) { - listener.getLogger().println("[withMaven] No element found for in " + XmlUtils.toString(findBugsTestEvent)); + listener.getLogger() + .println("[withMaven] No element found for in " + + XmlUtils.toString(findBugsTestEvent)); continue; } String xmlOutputDirectory = xmlOutputDirectoryElt.getTextContent().trim(); if (xmlOutputDirectory.contains("${project.build.directory}")) { String projectBuildDirectory = XmlUtils.getProjectBuildDirectory(projectElt); if (projectBuildDirectory == null || projectBuildDirectory.isEmpty()) { - listener.getLogger().println("[withMaven] '${project.build.directory}' found for in " + XmlUtils.toString(findBugsTestEvent)); + listener.getLogger() + .println("[withMaven] '${project.build.directory}' found for in " + + XmlUtils.toString(findBugsTestEvent)); continue; } @@ -214,7 +224,9 @@ public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsE } else if (xmlOutputDirectory.contains("${basedir}")) { String baseDir = projectElt.getAttribute("baseDir"); if (baseDir.isEmpty()) { - listener.getLogger().println("[withMaven] '${basedir}' found for in " + XmlUtils.toString(findBugsTestEvent)); + listener.getLogger() + .println("[withMaven] '${basedir}' found for in " + + XmlUtils.toString(findBugsTestEvent)); continue; } @@ -224,8 +236,10 @@ public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsE xmlOutputDirectory = XmlUtils.getPathInWorkspace(xmlOutputDirectory, workspace); String findBugsResultsFile = xmlOutputDirectory + "/spotbugsXml.xml"; - listener.getLogger().println("[withMaven] SpotBugsPublisher - Archive SpotBugs analysis results for Maven artifact " + mavenArtifact.toString() + " generated by " + - pluginInvocation + ": " + findBugsResultsFile); + listener.getLogger() + .println("[withMaven] SpotBugsPublisher - Archive SpotBugs analysis results for Maven artifact " + + mavenArtifact.toString() + " generated by " + pluginInvocation + ": " + + findBugsResultsFile); FindBugsPublisher findBugsPublisher = new FindBugsPublisher(); findBugsPublisher.setPattern(findBugsResultsFile); @@ -235,11 +249,15 @@ public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsE try { findBugsPublisher.perform(run, workspace, launcher, listener); } catch (Exception e) { - listener.error("[withMaven] SpotBugsPublisher - exception archiving FindBugs results for Maven artifact " + mavenArtifact.toString() + " generated by " + - pluginInvocation + ": " + e); + listener.error( + "[withMaven] SpotBugsPublisher - exception archiving FindBugs results for Maven artifact " + + mavenArtifact.toString() + " generated by " + pluginInvocation + ": " + e); LOGGER.log(Level.WARNING, "Exception processing " + XmlUtils.toString(findBugsTestEvent), e); - throw new MavenPipelinePublisherException("SpotBugsPublisher", - "archiving FindBugs results for Maven artifact " + mavenArtifact.getId() + " generated by " + pluginInvocation.getId(), e); + throw new MavenPipelinePublisherException( + "SpotBugsPublisher", + "archiving FindBugs results for Maven artifact " + mavenArtifact.getId() + " generated by " + + pluginInvocation.getId(), + e); } } } diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/TasksScannerPublisher.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/TasksScannerPublisher.java index f1605856..74d2c3a0 100644 --- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/TasksScannerPublisher.java +++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/TasksScannerPublisher.java @@ -1,11 +1,19 @@ package org.jenkinsci.plugins.pipeline.maven.publishers; +import edu.umd.cs.findbugs.annotations.NonNull; import hudson.Extension; import hudson.FilePath; import hudson.Launcher; import hudson.model.Run; import hudson.model.StreamBuildListener; import hudson.model.TaskListener; +import java.io.IOException; +import java.io.OutputStream; +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; +import java.util.logging.Level; +import java.util.logging.Logger; import org.apache.commons.lang.StringUtils; import org.jenkinsci.Symbol; import org.jenkinsci.plugins.pipeline.maven.MavenArtifact; @@ -16,15 +24,6 @@ import org.kohsuke.stapler.DataBoundSetter; import org.w3c.dom.Element; -import edu.umd.cs.findbugs.annotations.NonNull; -import java.io.IOException; -import java.io.OutputStream; -import java.util.ArrayList; -import java.util.List; -import java.util.Objects; -import java.util.logging.Level; -import java.util.logging.Logger; - /** * @author Cyrille Le Clerc * @see hudson.plugins.tasks.TasksPublisher @@ -67,9 +66,7 @@ public class TasksScannerPublisher extends AbstractHealthAwarePublisher { private boolean asRegexp = false; @DataBoundConstructor - public TasksScannerPublisher() { - - } + public TasksScannerPublisher() {} /* @@ -80,7 +77,8 @@ public TasksScannerPublisher() { */ @Override - public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsElt) throws IOException, InterruptedException { + public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsElt) + throws IOException, InterruptedException { TaskListener listener = context.get(TaskListener.class); if (listener == null) { LOGGER.warning("TaskListener is NULL, default to stderr"); @@ -92,18 +90,21 @@ public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsE Run run = context.get(Run.class); Launcher launcher = context.get(Launcher.class); - try { Class.forName("hudson.plugins.tasks.TasksPublisher"); } catch (ClassNotFoundException e) { listener.getLogger().print("[withMaven] Jenkins "); - listener.hyperlink("https://wiki.jenkins-ci.org/display/JENKINS/Task+Scanner+Plugin", "Task Scanner Plugin"); - listener.getLogger().println(" not found, don't display results of source code scanning for 'TODO' and 'FIXME' in pipeline screen."); + listener.hyperlink( + "https://wiki.jenkins-ci.org/display/JENKINS/Task+Scanner+Plugin", "Task Scanner Plugin"); + listener.getLogger() + .println( + " not found, don't display results of source code scanning for 'TODO' and 'FIXME' in pipeline screen."); return; } List sourceDirectoriesPatterns = new ArrayList<>(); - for (Element executionEvent : XmlUtils.getExecutionEvents(mavenSpyLogsElt, "ProjectSucceeded", "ProjectFailed")) { + for (Element executionEvent : + XmlUtils.getExecutionEvents(mavenSpyLogsElt, "ProjectSucceeded", "ProjectFailed")) { /* @@ -116,7 +117,9 @@ public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsE Element buildElement = XmlUtils.getUniqueChildElementOrNull(executionEvent, "project", "build"); if (buildElement == null) { if (LOGGER.isLoggable(Level.FINE)) - LOGGER.log(Level.FINE, "Ignore execution event with missing 'build' child:" + XmlUtils.toString(executionEvent)); + LOGGER.log( + Level.FINE, + "Ignore execution event with missing 'build' child:" + XmlUtils.toString(executionEvent)); continue; } Element projectElt = XmlUtils.getUniqueChildElement(executionEvent, "project"); @@ -134,10 +137,15 @@ public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsE String sourceDirectoryRelativePath = XmlUtils.getPathInWorkspace(sourceDirectory, workspace); if (workspace.child(sourceDirectoryRelativePath).exists()) { - sourceDirectoriesPatterns.add(sourceDirectoryRelativePath + fileSeparatorOnAgent + "**" + fileSeparatorOnAgent + "*"); - listener.getLogger().println("[withMaven] openTasksPublisher - Scan Tasks for Maven artifact " + mavenArtifact.getId() + " in source directory " + sourceDirectoryRelativePath); + sourceDirectoriesPatterns.add( + sourceDirectoryRelativePath + fileSeparatorOnAgent + "**" + fileSeparatorOnAgent + "*"); + listener.getLogger() + .println("[withMaven] openTasksPublisher - Scan Tasks for Maven artifact " + + mavenArtifact.getId() + " in source directory " + sourceDirectoryRelativePath); } else { - LOGGER.log(Level.FINE, "Skip task scanning for {0}, folder {1} does not exist", new Object[]{mavenArtifact, sourceDirectoryRelativePath}); + LOGGER.log(Level.FINE, "Skip task scanning for {0}, folder {1} does not exist", new Object[] { + mavenArtifact, sourceDirectoryRelativePath + }); } } @@ -149,13 +157,15 @@ public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsE } // To avoid duplicates - hudson.plugins.tasks.TasksResultAction tasksResult = run.getAction(hudson.plugins.tasks.TasksResultAction.class); + hudson.plugins.tasks.TasksResultAction tasksResult = + run.getAction(hudson.plugins.tasks.TasksResultAction.class); if (tasksResult != null) { run.removeAction(tasksResult); } hudson.plugins.tasks.TasksPublisher tasksPublisher = new hudson.plugins.tasks.TasksPublisher(); - String pattern = StringUtils.isEmpty(this.pattern)? XmlUtils.join(sourceDirectoriesPatterns, ",") : this.pattern; + String pattern = + StringUtils.isEmpty(this.pattern) ? XmlUtils.join(sourceDirectoriesPatterns, ",") : this.pattern; tasksPublisher.setPattern(pattern); tasksPublisher.setExcludePattern(StringUtils.trimToNull(this.excludePattern)); diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/service/PipelineTriggerService.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/service/PipelineTriggerService.java index 041b4ca7..ac899c67 100644 --- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/service/PipelineTriggerService.java +++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/service/PipelineTriggerService.java @@ -1,6 +1,9 @@ package org.jenkinsci.plugins.pipeline.maven.service; import com.cloudbees.hudson.plugins.folder.computed.ComputedFolder; +import edu.umd.cs.findbugs.annotations.CheckForNull; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; import hudson.console.ModelHyperlinkNote; import hudson.model.Cause; import hudson.model.CauseAction; @@ -13,20 +16,6 @@ import hudson.security.ACLContext; import hudson.triggers.Trigger; import hudson.triggers.TriggerDescriptor; -import jenkins.model.Jenkins; -import jenkins.model.ParameterizedJobMixIn; -import org.acegisecurity.AccessDeniedException; -import org.acegisecurity.Authentication; -import org.jenkinsci.plugins.pipeline.maven.GlobalPipelineMavenConfig; -import org.jenkinsci.plugins.pipeline.maven.MavenArtifact; -import org.jenkinsci.plugins.pipeline.maven.cause.MavenDependencyCause; -import org.jenkinsci.plugins.pipeline.maven.cause.MavenDependencyCauseHelper; -import org.jenkinsci.plugins.pipeline.maven.dao.PipelineMavenPluginDao; -import org.jenkinsci.plugins.pipeline.maven.trigger.WorkflowJobDependencyTrigger; - -import edu.umd.cs.findbugs.annotations.CheckForNull; -import edu.umd.cs.findbugs.annotations.NonNull; -import edu.umd.cs.findbugs.annotations.Nullable; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; @@ -43,13 +32,24 @@ import java.util.concurrent.TimeUnit; import java.util.logging.Level; import java.util.stream.Collectors; +import jenkins.model.Jenkins; +import jenkins.model.ParameterizedJobMixIn; +import org.acegisecurity.AccessDeniedException; +import org.acegisecurity.Authentication; +import org.jenkinsci.plugins.pipeline.maven.GlobalPipelineMavenConfig; +import org.jenkinsci.plugins.pipeline.maven.MavenArtifact; +import org.jenkinsci.plugins.pipeline.maven.cause.MavenDependencyCause; +import org.jenkinsci.plugins.pipeline.maven.cause.MavenDependencyCauseHelper; +import org.jenkinsci.plugins.pipeline.maven.dao.PipelineMavenPluginDao; +import org.jenkinsci.plugins.pipeline.maven.trigger.WorkflowJobDependencyTrigger; /** * @author Cyrille Le Clerc */ public class PipelineTriggerService { - private final static java.util.logging.Logger LOGGER = java.util.logging.Logger.getLogger(PipelineTriggerService.class.getName()); + private static final java.util.logging.Logger LOGGER = + java.util.logging.Logger.getLogger(PipelineTriggerService.class.getName()); private final GlobalPipelineMavenConfig globalPipelineMavenConfig; @@ -57,7 +57,14 @@ public PipelineTriggerService(@NonNull GlobalPipelineMavenConfig globalPipelineM this.globalPipelineMavenConfig = globalPipelineMavenConfig; } - public Collection triggerDownstreamPipelines(@NonNull String groupId, @NonNull String artifactId, @Nullable String baseVersion, @NonNull String version, @NonNull String type, @NonNull MavenDependencyCause cause, @NonNull ServiceLogger logger) { + public Collection triggerDownstreamPipelines( + @NonNull String groupId, + @NonNull String artifactId, + @Nullable String baseVersion, + @NonNull String version, + @NonNull String type, + @NonNull MavenDependencyCause cause, + @NonNull ServiceLogger logger) { MavenArtifact mavenArtifact = new MavenArtifact(); mavenArtifact.setGroupId(groupId); mavenArtifact.setArtifactId(artifactId); @@ -68,7 +75,10 @@ public Collection triggerDownstreamPipelines(@NonNull String groupId, @N return triggerDownstreamPipelines(Collections.singleton(mavenArtifact), cause, logger); } - public Collection triggerDownstreamPipelines(@NonNull Collection upstreamArtifacts, @NonNull MavenDependencyCause cause, @NonNull ServiceLogger logger) { + public Collection triggerDownstreamPipelines( + @NonNull Collection upstreamArtifacts, + @NonNull MavenDependencyCause cause, + @NonNull ServiceLogger logger) { if (!(cause instanceof Cause)) { throw new IllegalArgumentException("Given cause must extend hudson.model.Cause: " + cause); @@ -81,18 +91,23 @@ public Collection triggerDownstreamPipelines(@NonNull Collection> downstreamPipelinesByArtifact = new HashMap<>(); - for(MavenArtifact mavenArtifact: upstreamArtifacts) { + for (MavenArtifact mavenArtifact : upstreamArtifacts) { PipelineMavenPluginDao dao = globalPipelineMavenConfig.getDao(); // FIXME use classifier in search query - SortedSet downstreamPipelines = dao.listDownstreamJobs(mavenArtifact.getGroupId(), mavenArtifact.getArtifactId(), mavenArtifact.getVersion(), mavenArtifact.getBaseVersion(), mavenArtifact.getType()); + SortedSet downstreamPipelines = dao.listDownstreamJobs( + mavenArtifact.getGroupId(), + mavenArtifact.getArtifactId(), + mavenArtifact.getVersion(), + mavenArtifact.getBaseVersion(), + mavenArtifact.getType()); downstreamPipelinesByArtifact.put(mavenArtifact, downstreamPipelines); } Map> jobsToTrigger = new TreeMap<>(); - Map> omittedPipelineTriggersByPipelineFullname = new HashMap<>(); + Map> omittedPipelineTriggersByPipelineFullname = new HashMap<>(); // build the list of pipelines to trigger - for(Map.Entry> entry: downstreamPipelinesByArtifact.entrySet()) { + for (Map.Entry> entry : downstreamPipelinesByArtifact.entrySet()) { MavenArtifact mavenArtifact = entry.getKey(); SortedSet downstreamPipelines = entry.getValue(); @@ -102,23 +117,34 @@ public Collection triggerDownstreamPipelines(@NonNull Collection mavenArtifacts = jobsToTrigger.get(downstreamPipelineFullName); if (mavenArtifacts == null) { - logger.log(Level.INFO, "Invalid state, no artifacts found for pipeline '" + downstreamPipelineFullName + "' while evaluating " + mavenArtifact.getShortDescription()); + logger.log( + Level.INFO, + "Invalid state, no artifacts found for pipeline '" + downstreamPipelineFullName + + "' while evaluating " + mavenArtifact.getShortDescription()); } else { mavenArtifacts.add(mavenArtifact); } continue; } - final Job downstreamPipeline = Jenkins.get().getItemByFullName(downstreamPipelineFullName, Job.class); + final Job downstreamPipeline = + Jenkins.get().getItemByFullName(downstreamPipelineFullName, Job.class); if (downstreamPipeline == null || downstreamPipeline.getLastBuild() == null) { if (logger.isLoggable(Level.FINE)) { - logger.log(Level.FINE, "Downstream pipeline " + downstreamPipelineFullName + " or downstream pipeline last build not found. Database synchronization issue or security restriction?"); + logger.log( + Level.FINE, + "Downstream pipeline " + downstreamPipelineFullName + + " or downstream pipeline last build not found. Database synchronization issue or security restriction?"); } continue; } @@ -127,61 +153,94 @@ public Collection triggerDownstreamPipelines(@NonNull Collection transitiveUpstreamPipelines = globalPipelineMavenConfig.getDao().listTransitiveUpstreamJobs(downstreamPipelineFullName, downstreamBuildNumber); + Map transitiveUpstreamPipelines = globalPipelineMavenConfig + .getDao() + .listTransitiveUpstreamJobs(downstreamPipelineFullName, downstreamBuildNumber); for (String transitiveUpstreamPipelineName : transitiveUpstreamPipelines.keySet()) { // Skip if one of the downstream's upstream is already building or in queue // Then it will get triggered anyway by that upstream, we don't need to trigger it again - Job transitiveUpstreamPipeline = Jenkins.get().getItemByFullName(transitiveUpstreamPipelineName, Job.class); + Job transitiveUpstreamPipeline = + Jenkins.get().getItemByFullName(transitiveUpstreamPipelineName, Job.class); if (transitiveUpstreamPipeline == null) { // security: not allowed to view this transitive upstream pipeline, continue to loop continue; } else if (transitiveUpstreamPipeline.isBuilding()) { - logger.log(Level.INFO, "Not triggering " + logger.modelHyperlinkNoteEncodeTo(downstreamPipeline) + - " because it has a dependency already building: " + logger.modelHyperlinkNoteEncodeTo(transitiveUpstreamPipeline)); + logger.log( + Level.INFO, + "Not triggering " + logger.modelHyperlinkNoteEncodeTo(downstreamPipeline) + + " because it has a dependency already building: " + + logger.modelHyperlinkNoteEncodeTo(transitiveUpstreamPipeline)); continue downstreamPipelinesLoop; } else if (transitiveUpstreamPipeline.isInQueue()) { - logger.log(Level.INFO, "Not triggering " + logger.modelHyperlinkNoteEncodeTo(downstreamPipeline) + - " because it has a dependency already building or in queue: " + logger.modelHyperlinkNoteEncodeTo(transitiveUpstreamPipeline)); + logger.log( + Level.INFO, + "Not triggering " + logger.modelHyperlinkNoteEncodeTo(downstreamPipeline) + + " because it has a dependency already building or in queue: " + + logger.modelHyperlinkNoteEncodeTo(transitiveUpstreamPipeline)); continue downstreamPipelinesLoop; } else if (downstreamPipelines.contains(transitiveUpstreamPipelineName)) { // Skip if this downstream pipeline will be triggered by another one of our downstream pipelines // That's the case when one of the downstream's transitive upstream is our own downstream - logger.log(Level.INFO, "Not triggering " + logger.modelHyperlinkNoteEncodeTo(downstreamPipeline) + - " because it has a dependency on a pipeline that will be triggered by this build: " + logger.modelHyperlinkNoteEncodeTo(transitiveUpstreamPipeline)); - omittedPipelineTriggersByPipelineFullname.computeIfAbsent(transitiveUpstreamPipelineName, p -> new TreeSet<>()).add(downstreamPipelineFullName); + logger.log( + Level.INFO, + "Not triggering " + logger.modelHyperlinkNoteEncodeTo(downstreamPipeline) + + " because it has a dependency on a pipeline that will be triggered by this build: " + + logger.modelHyperlinkNoteEncodeTo(transitiveUpstreamPipeline)); + omittedPipelineTriggersByPipelineFullname + .computeIfAbsent(transitiveUpstreamPipelineName, p -> new TreeSet<>()) + .add(downstreamPipelineFullName); continue downstreamPipelinesLoop; } } if (!downstreamPipeline.isBuildable()) { if (logger.isLoggable(Level.FINE)) { - logger.log(Level.FINE, "Skip triggering of non buildable (" + - (downstreamPipeline instanceof ParameterizedJobMixIn.ParameterizedJob ? ("disabled: " + ((ParameterizedJobMixIn.ParameterizedJob) downstreamPipeline).isDisabled() + ", ") : "") + - "isHoldOffBuildUntilSave: " + downstreamPipeline.isHoldOffBuildUntilSave() + - ") downstream pipeline " + downstreamPipeline.getFullName()); + logger.log( + Level.FINE, + "Skip triggering of non buildable (" + + (downstreamPipeline instanceof ParameterizedJobMixIn.ParameterizedJob + ? ("disabled: " + + ((ParameterizedJobMixIn.ParameterizedJob) + downstreamPipeline) + .isDisabled() + + ", ") + : "") + + "isHoldOffBuildUntilSave: " + + downstreamPipeline.isHoldOffBuildUntilSave() + ") downstream pipeline " + + downstreamPipeline.getFullName()); } continue; } - WorkflowJobDependencyTrigger downstreamPipelineTrigger = this.globalPipelineMavenConfig.getPipelineTriggerService().getWorkflowJobDependencyTrigger((ParameterizedJobMixIn.ParameterizedJob) downstreamPipeline); + WorkflowJobDependencyTrigger downstreamPipelineTrigger = this.globalPipelineMavenConfig + .getPipelineTriggerService() + .getWorkflowJobDependencyTrigger( + (ParameterizedJobMixIn.ParameterizedJob) downstreamPipeline); if (downstreamPipelineTrigger == null) { - LOGGER.log(Level.FINE, "Skip triggering of downstream pipeline {0}: dependency trigger not configured", new Object[]{downstreamPipeline.getFullName()}); + LOGGER.log( + Level.FINE, + "Skip triggering of downstream pipeline {0}: dependency trigger not configured", + new Object[] {downstreamPipeline.getFullName()}); continue; } - boolean downstreamVisibleByUpstreamBuildAuth = this.globalPipelineMavenConfig.getPipelineTriggerService().isDownstreamVisibleByUpstreamBuildAuth(downstreamPipeline); + boolean downstreamVisibleByUpstreamBuildAuth = this.globalPipelineMavenConfig + .getPipelineTriggerService() + .isDownstreamVisibleByUpstreamBuildAuth(downstreamPipeline); if (downstreamVisibleByUpstreamBuildAuth) { Set mavenArtifacts = jobsToTrigger.computeIfAbsent(downstreamPipelineFullName, k -> new TreeSet<>()); - if(mavenArtifacts.contains(mavenArtifact)) { + if (mavenArtifacts.contains(mavenArtifact)) { // TODO display warning } else { mavenArtifacts.add(mavenArtifact); } } else { - LOGGER.log(Level.FINE, "Skip triggering of {0} by {1}", new Object[]{downstreamPipeline.getFullName(), cause}); + LOGGER.log(Level.FINE, "Skip triggering of {0} by {1}", new Object[] { + downstreamPipeline.getFullName(), cause + }); } } } @@ -190,7 +249,7 @@ public Collection triggerDownstreamPipelines(@NonNull Collection> entry: jobsToTrigger.entrySet()) { + for (Map.Entry> entry : jobsToTrigger.entrySet()) { String downstreamJobFullName = entry.getKey(); Job downstreamJob = Jenkins.get().getItemByFullName(downstreamJobFullName, Job.class); if (downstreamJob == null) { @@ -208,44 +267,66 @@ public Collection triggerDownstreamPipelines(@NonNull Collection matchingMavenDependencies = MavenDependencyCauseHelper.isSameCause(cause, downstreamJobLastBuild.getCauses()); + List matchingMavenDependencies = + MavenDependencyCauseHelper.isSameCause(cause, downstreamJobLastBuild.getCauses()); if (matchingMavenDependencies.isEmpty()) { - for (Map.Entry> omittedPipeline : omittedPipelineTriggersByPipelineFullname.entrySet()) { + for (Map.Entry> omittedPipeline : + omittedPipelineTriggersByPipelineFullname.entrySet()) { if (omittedPipeline.getValue().contains(downstreamJobFullName)) { Job transitiveDownstreamJob = Jenkins.get().getItemByFullName(entry.getKey(), Job.class); - logger.log(Level.INFO,"[withMaven] downstreamPipelineTriggerRunListener - Skip triggering " - + "downstream pipeline " + ModelHyperlinkNote.encodeTo(downstreamJob) + "because it will be triggered by transitive downstream " + transitiveDownstreamJob); + logger.log( + Level.INFO, + "[withMaven] downstreamPipelineTriggerRunListener - Skip triggering " + + "downstream pipeline " + ModelHyperlinkNote.encodeTo(downstreamJob) + + "because it will be triggered by transitive downstream " + + transitiveDownstreamJob); continue triggerPipelinesLoop; // don't trigger downstream pipeline } } // trigger downstream pipeline } else { downstreamJobLastBuild.addAction(new CauseAction((Cause) cause)); - logger.log(Level.INFO, "Skip scheduling downstream pipeline " + logger.modelHyperlinkNoteEncodeTo(downstreamJob) + " as it was already triggered for Maven dependencies: " + - matchingMavenDependencies.stream().map(mavenDependency -> mavenDependency == null ? null : mavenDependency.getShortDescription()).collect(Collectors.joining(", "))); + logger.log( + Level.INFO, + "Skip scheduling downstream pipeline " + logger.modelHyperlinkNoteEncodeTo(downstreamJob) + + " as it was already triggered for Maven dependencies: " + + matchingMavenDependencies.stream() + .map(mavenDependency -> mavenDependency == null + ? null + : mavenDependency.getShortDescription()) + .collect(Collectors.joining(", "))); try { downstreamJobLastBuild.save(); } catch (IOException e) { - logger.log(Level.INFO, "Failure to update build " + downstreamJobLastBuild.getFullDisplayName() + ": " + e.toString()); + logger.log( + Level.INFO, + "Failure to update build " + downstreamJobLastBuild.getFullDisplayName() + ": " + + e.toString()); } continue; // don't trigger downstream pipeline } } - Queue.Item queuedItem = ParameterizedJobMixIn.scheduleBuild2(downstreamJob, -1, new CauseAction((Cause) cause)); + Queue.Item queuedItem = + ParameterizedJobMixIn.scheduleBuild2(downstreamJob, -1, new CauseAction((Cause) cause)); String dependenciesMessage = cause.getMavenArtifactsDescription(); if (queuedItem == null) { - logger.log(Level.INFO, "Skip triggering downstream pipeline " + logger.modelHyperlinkNoteEncodeTo(downstreamJob) + " due to dependencies on " + - dependenciesMessage + ", invocation rejected."); + logger.log( + Level.INFO, + "Skip triggering downstream pipeline " + logger.modelHyperlinkNoteEncodeTo(downstreamJob) + + " due to dependencies on " + dependenciesMessage + ", invocation rejected."); } else { triggeredPipelines.add(downstreamJobFullName); - logger.log(Level.FINE, "Triggering downstream pipeline " + logger.modelHyperlinkNoteEncodeTo(downstreamJob) + "#" + downstreamJob.getNextBuildNumber() + " due to dependency on " + - dependenciesMessage + " ..."); + logger.log( + Level.FINE, + "Triggering downstream pipeline " + logger.modelHyperlinkNoteEncodeTo(downstreamJob) + "#" + + downstreamJob.getNextBuildNumber() + " due to dependency on " + dependenciesMessage + + " ..."); } - } - long durationInMillis = TimeUnit.MILLISECONDS.convert(System.nanoTime() - startTimeInNanos, TimeUnit.NANOSECONDS); + long durationInMillis = + TimeUnit.MILLISECONDS.convert(System.nanoTime() - startTimeInNanos, TimeUnit.NANOSECONDS); if (durationInMillis > TimeUnit.MILLISECONDS.convert(5, TimeUnit.SECONDS) || logger.isLoggable(Level.FINE)) { logger.log(Level.INFO, "triggerDownstreamPipelines completed in " + durationInMillis + " ms"); } @@ -268,7 +349,9 @@ public void checkNoInfiniteLoopOfUpstreamCause(@NonNull Run initialBuild) throws Run upstreamBuild = upstreamCause.getUpstreamRun(); if (upstreamBuild == null) { // Can be Authorization, build deleted on the file system... - } else if (Objects.equals(upstreamBuild.getParent().getFullName(), initialBuild.getParent().getFullName())) { + } else if (Objects.equals( + upstreamBuild.getParent().getFullName(), + initialBuild.getParent().getFullName())) { throw new IllegalStateException("Infinite loop of job triggers "); } else { builds.add(upstreamBuild); @@ -279,7 +362,8 @@ public void checkNoInfiniteLoopOfUpstreamCause(@NonNull Run initialBuild) throws } @Nullable - public WorkflowJobDependencyTrigger getWorkflowJobDependencyTrigger(@NonNull ParameterizedJobMixIn.ParameterizedJob parameterizedJob) { + public WorkflowJobDependencyTrigger getWorkflowJobDependencyTrigger( + @NonNull ParameterizedJobMixIn.ParameterizedJob parameterizedJob) { Map> triggers = parameterizedJob.getTriggers(); for (Trigger trigger : triggers.values()) { if (trigger instanceof WorkflowJobDependencyTrigger) { @@ -306,22 +390,32 @@ public WorkflowJobDependencyTrigger getWorkflowJobDependencyTrigger(@NonNull Par } } } - } return null; } - public boolean isUpstreamBuildVisibleByDownstreamBuildAuth(@NonNull Job upstreamPipeline, @NonNull Job downstreamPipeline) { + public boolean isUpstreamBuildVisibleByDownstreamBuildAuth( + @NonNull Job upstreamPipeline, @NonNull Job downstreamPipeline) { Authentication downstreamPipelineAuth = Tasks.getAuthenticationOf((Queue.FlyweightTask) downstreamPipeline); - // see https://github.com/jenkinsci/jenkins/blob/jenkins-2.176.2/core/src/main/java/jenkins/triggers/ReverseBuildTrigger.java#L132 + // see + // https://github.com/jenkinsci/jenkins/blob/jenkins-2.176.2/core/src/main/java/jenkins/triggers/ReverseBuildTrigger.java#L132 // jenkins.triggers.ReverseBuildTrigger#shouldTrigger try (ACLContext ignored = ACL.as(downstreamPipelineAuth)) { - Job upstreamPipelineObtainedAsImpersonated = getItemByFullName(upstreamPipeline.getFullName(), Job.class); + Job upstreamPipelineObtainedAsImpersonated = + getItemByFullName(upstreamPipeline.getFullName(), Job.class); boolean result = upstreamPipelineObtainedAsImpersonated != null; if (LOGGER.isLoggable(Level.FINE)) { - LOGGER.log(Level.FINE, "isUpstreamBuildVisibleByDownstreamBuildAuth(upstreamPipeline: {0}, downstreamPipeline: {1}): downstreamPipelineAuth: {2}, upstreamPipelineObtainedAsImpersonated:{3}, result: {4}", - new Object[]{upstreamPipeline.getFullName(), downstreamPipeline.getFullName(), downstreamPipelineAuth, upstreamPipelineObtainedAsImpersonated, result}); + LOGGER.log( + Level.FINE, + "isUpstreamBuildVisibleByDownstreamBuildAuth(upstreamPipeline: {0}, downstreamPipeline: {1}): downstreamPipelineAuth: {2}, upstreamPipelineObtainedAsImpersonated:{3}, result: {4}", + new Object[] { + upstreamPipeline.getFullName(), + downstreamPipeline.getFullName(), + downstreamPipelineAuth, + upstreamPipelineObtainedAsImpersonated, + result + }); } return result; } @@ -329,8 +423,9 @@ public boolean isUpstreamBuildVisibleByDownstreamBuildAuth(@NonNull Job up public boolean isDownstreamVisibleByUpstreamBuildAuth(@NonNull Item downstreamPipeline) { boolean result = getItemByFullName(downstreamPipeline.getFullName(), Job.class) != null; - LOGGER.log(Level.FINE, "isDownstreamVisibleByUpstreamBuildAuth({0}, auth: {1}): {2}", - new Object[]{downstreamPipeline, Jenkins.getAuthentication(), result}); + LOGGER.log(Level.FINE, "isDownstreamVisibleByUpstreamBuildAuth({0}, auth: {1}): {2}", new Object[] { + downstreamPipeline, Jenkins.getAuthentication(), result + }); return result; } diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/service/ServiceLogger.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/service/ServiceLogger.java index 25ba840c..1600dddf 100644 --- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/service/ServiceLogger.java +++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/service/ServiceLogger.java @@ -24,10 +24,9 @@ package org.jenkinsci.plugins.pipeline.maven.service; -import hudson.model.Item; - import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; +import hudson.model.Item; import java.util.logging.Level; /** diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/service/ServiceLoggerImpl.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/service/ServiceLoggerImpl.java index 6806bee8..45a4a67a 100644 --- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/service/ServiceLoggerImpl.java +++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/service/ServiceLoggerImpl.java @@ -24,10 +24,9 @@ package org.jenkinsci.plugins.pipeline.maven.service; -import hudson.model.Item; - import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; +import hudson.model.Item; import java.io.PrintStream; import java.util.logging.Level; import java.util.logging.Logger; @@ -41,6 +40,7 @@ public class ServiceLoggerImpl implements ServiceLogger { @NonNull private final PrintStream stdOut, stdErr; + @Nullable String prefix; @@ -61,7 +61,7 @@ public void log(Level level, String message) { return; } StringBuilder messageToWrite = new StringBuilder(); - if (prefix != null && ! prefix.isEmpty()) { + if (prefix != null && !prefix.isEmpty()) { messageToWrite.append(prefix).append(" "); } messageToWrite.append(level).append(" ").append(message); diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/service/ServiceLoggerJulImpl.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/service/ServiceLoggerJulImpl.java index c0333287..27eb2626 100644 --- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/service/ServiceLoggerJulImpl.java +++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/service/ServiceLoggerJulImpl.java @@ -24,10 +24,9 @@ package org.jenkinsci.plugins.pipeline.maven.service; -import hudson.model.Item; - import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; +import hudson.model.Item; import java.util.logging.Level; import java.util.logging.Logger; @@ -36,7 +35,7 @@ */ public class ServiceLoggerJulImpl implements ServiceLogger { - private transient final Logger logger; + private final transient Logger logger; public ServiceLoggerJulImpl(@NonNull Logger logger) { this.logger = logger; diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/trigger/WorkflowJobDependencyTrigger.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/trigger/WorkflowJobDependencyTrigger.java index 9d057542..e1cb7f11 100644 --- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/trigger/WorkflowJobDependencyTrigger.java +++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/trigger/WorkflowJobDependencyTrigger.java @@ -17,21 +17,20 @@ public class WorkflowJobDependencyTrigger extends Trigger { @DataBoundConstructor - public WorkflowJobDependencyTrigger(){ - - } + public WorkflowJobDependencyTrigger() {} @Symbol("snapshotDependencies") @Extension public static class DescriptorImpl extends TriggerDescriptor { @Override public boolean isApplicable(Item item) { - return item instanceof BlockableResume || item instanceof MultiBranchProject || item instanceof OrganizationFolder; + return item instanceof BlockableResume + || item instanceof MultiBranchProject + || item instanceof OrganizationFolder; } public String getDisplayName() { return Messages.trigger_workflow_job_dependency_description(); } - } } diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/util/FileUtils.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/util/FileUtils.java index c49e1d06..2477194c 100644 --- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/util/FileUtils.java +++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/util/FileUtils.java @@ -1,8 +1,7 @@ package org.jenkinsci.plugins.pipeline.maven.util; -import hudson.FilePath; - import edu.umd.cs.findbugs.annotations.NonNull; +import hudson.FilePath; /** * @author Cyrille Le Clerc @@ -15,7 +14,7 @@ public static boolean isAbsolutePath(@NonNull String path) { // windows path such as "C:\path\to\..." return true; } else // Microsoft Windows UNC mount ("\\myserver\myfolder") - if (path.length() > 3 && path.charAt(1) == ':' && path.charAt(2) == '/') { + if (path.length() > 3 && path.charAt(1) == ':' && path.charAt(2) == '/') { // nasty windows path such as "C:/path/to/...". See JENKINS-44088 return true; } else return path.length() > 2 && path.charAt(0) == '\\' && path.charAt(1) == '\\'; @@ -23,7 +22,6 @@ public static boolean isAbsolutePath(@NonNull String path) { // see java.io.UnixFileSystem.prefixLength() return path.charAt(0) == '/'; } - } public static boolean isWindows(@NonNull FilePath path) { diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/util/TaskListenerTraceWrapper.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/util/TaskListenerTraceWrapper.java index 78132b3a..cf5b4305 100644 --- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/util/TaskListenerTraceWrapper.java +++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/util/TaskListenerTraceWrapper.java @@ -1,7 +1,6 @@ package org.jenkinsci.plugins.pipeline.maven.util; import hudson.model.TaskListener; - import java.io.IOException; import java.io.PrintStream; @@ -15,7 +14,6 @@ public class TaskListenerTraceWrapper { private final boolean traceability; private final PrintStream console; - /** * Wrap the given TaskListener. * @@ -28,7 +26,6 @@ public TaskListenerTraceWrapper(final TaskListener taskListener, final boolean t this.console = taskListener.getLogger(); } - /** * Prints the given String to the underlying TaskListener if traceability is enabled. * @@ -83,7 +80,6 @@ public PrintStream format(final String format, Object... args) { return console.format(format, args); } - /** * Wraps {@link TaskListener#getLogger()} format calls. * If traceability is disabled do nothing. diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/util/XmlUtils.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/util/XmlUtils.java index 72ceb5f9..c1158b66 100644 --- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/util/XmlUtils.java +++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/util/XmlUtils.java @@ -24,26 +24,9 @@ package org.jenkinsci.plugins.pipeline.maven.util; -import hudson.FilePath; -import org.apache.commons.lang.StringUtils; -import org.jenkinsci.plugins.pipeline.maven.MavenArtifact; -import org.jenkinsci.plugins.pipeline.maven.MavenDependency; -import org.jenkinsci.plugins.pipeline.maven.MavenSpyLogProcessor; -import org.w3c.dom.Attr; -import org.w3c.dom.Element; -import org.w3c.dom.Node; -import org.w3c.dom.NodeList; -import org.xml.sax.EntityResolver; -import org.xml.sax.InputSource; -import org.xml.sax.SAXException; - import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; -import javax.xml.transform.Transformer; -import javax.xml.transform.TransformerException; -import javax.xml.transform.TransformerFactory; -import javax.xml.transform.dom.DOMSource; -import javax.xml.transform.stream.StreamResult; +import hudson.FilePath; import java.io.File; import java.io.IOException; import java.io.StringWriter; @@ -57,6 +40,22 @@ import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; +import javax.xml.transform.Transformer; +import javax.xml.transform.TransformerException; +import javax.xml.transform.TransformerFactory; +import javax.xml.transform.dom.DOMSource; +import javax.xml.transform.stream.StreamResult; +import org.apache.commons.lang.StringUtils; +import org.jenkinsci.plugins.pipeline.maven.MavenArtifact; +import org.jenkinsci.plugins.pipeline.maven.MavenDependency; +import org.jenkinsci.plugins.pipeline.maven.MavenSpyLogProcessor; +import org.w3c.dom.Attr; +import org.w3c.dom.Element; +import org.w3c.dom.Node; +import org.w3c.dom.NodeList; +import org.xml.sax.EntityResolver; +import org.xml.sax.InputSource; +import org.xml.sax.SAXException; /** * @author Cyrille Le Clerc @@ -85,27 +84,30 @@ private static void loadMavenArtifact(Element artifactElt, MavenArtifact mavenAr mavenArtifact.setArtifactId(artifactElt.getAttribute("artifactId")); mavenArtifact.setVersion(artifactElt.getAttribute("version")); mavenArtifact.setBaseVersion(artifactElt.getAttribute("baseVersion")); - if (mavenArtifact.getBaseVersion() == null || mavenArtifact.getBaseVersion().isEmpty()) { + if (mavenArtifact.getBaseVersion() == null + || mavenArtifact.getBaseVersion().isEmpty()) { mavenArtifact.setBaseVersion(mavenArtifact.getVersion()); } String snapshot = artifactElt.getAttribute("snapshot"); - mavenArtifact.setSnapshot(snapshot != null && !snapshot.trim().isEmpty() - ? Boolean.parseBoolean(artifactElt.getAttribute("snapshot")) - : mavenArtifact.getBaseVersion().contains("SNAPSHOT")); + mavenArtifact.setSnapshot( + snapshot != null && !snapshot.trim().isEmpty() + ? Boolean.parseBoolean(artifactElt.getAttribute("snapshot")) + : mavenArtifact.getBaseVersion().contains("SNAPSHOT")); mavenArtifact.setType(artifactElt.getAttribute("type")); if (mavenArtifact.getType() == null || mavenArtifact.getType().isEmpty()) { - // workaround: sometimes we use "XmlUtils.newMavenArtifact()" on "project" elements, in this case, "packaging" is defined but "type" is not defined + // workaround: sometimes we use "XmlUtils.newMavenArtifact()" on "project" elements, in this case, + // "packaging" is defined but "type" is not defined // we should probably not use "MavenArtifact" mavenArtifact.setType(artifactElt.getAttribute("packaging")); } - mavenArtifact.setClassifier(artifactElt.hasAttribute("classifier") ? artifactElt.getAttribute("classifier") : null); + mavenArtifact.setClassifier( + artifactElt.hasAttribute("classifier") ? artifactElt.getAttribute("classifier") : null); mavenArtifact.setExtension(artifactElt.getAttribute("extension")); } - /* - - */ + + */ public static MavenSpyLogProcessor.PluginInvocation newPluginInvocation(Element pluginInvocationElt) { MavenSpyLogProcessor.PluginInvocation pluginInvocation = new MavenSpyLogProcessor.PluginInvocation(); pluginInvocation.groupId = pluginInvocationElt.getAttribute("groupId"); @@ -133,7 +135,8 @@ public static Element getUniqueChildElementOrNull(@NonNull Element element, Stri if (childElts.size() == 0) { return null; } else if (childElts.size() > 1) { - throw new IllegalStateException("More than 1 (" + childElts.size() + ") elements <" + childEltName + "> found in " + toString(element)); + throw new IllegalStateException("More than 1 (" + childElts.size() + ") elements <" + childEltName + + "> found in " + toString(element)); } result = childElts.get(0); @@ -243,13 +246,16 @@ public static List getArtifactDeployedEvents(@NonNull Element mavenSpyL * @return The "RepositoryEvent" of type "ARTIFACT_DEPLOYED" or {@code null} if non found */ @Nullable - public static Element getArtifactDeployedEvent(@NonNull List artifactDeployedEvents, @NonNull String filePath) { - for (Element artifactDeployedEvent: artifactDeployedEvents) { - if (!"RepositoryEvent".equals(artifactDeployedEvent.getNodeName()) || !"ARTIFACT_DEPLOYED".equals(artifactDeployedEvent.getAttribute("type"))) { + public static Element getArtifactDeployedEvent( + @NonNull List artifactDeployedEvents, @NonNull String filePath) { + for (Element artifactDeployedEvent : artifactDeployedEvents) { + if (!"RepositoryEvent".equals(artifactDeployedEvent.getNodeName()) + || !"ARTIFACT_DEPLOYED".equals(artifactDeployedEvent.getAttribute("type"))) { // skip unexpected element continue; } - String deployedArtifactFilePath = getUniqueChildElement(artifactDeployedEvent, "artifact").getAttribute("file"); + String deployedArtifactFilePath = + getUniqueChildElement(artifactDeployedEvent, "artifact").getAttribute("file"); if (Objects.equals(filePath, deployedArtifactFilePath)) { return artifactDeployedEvent; } @@ -258,15 +264,20 @@ public static Element getArtifactDeployedEvent(@NonNull List artifactDe } /* - - - - ${project.build.directory}/surefire-reports - - - */ + + + + ${project.build.directory}/surefire-reports + + + */ @NonNull - public static List getExecutionEventsByPlugin(@NonNull Element mavenSpyLogs, String pluginGroupId, String pluginArtifactId, String pluginGoal, String... eventType) { + public static List getExecutionEventsByPlugin( + @NonNull Element mavenSpyLogs, + String pluginGroupId, + String pluginArtifactId, + String pluginGoal, + String... eventType) { Set eventTypes = new HashSet<>(Arrays.asList(eventType)); List result = new ArrayList<>(); @@ -277,35 +288,34 @@ public static List getExecutionEventsByPlugin(@NonNull Element mavenSpy if (pluginElt == null) { } else { - if (pluginElt.getAttribute("groupId").equals(pluginGroupId) && - pluginElt.getAttribute("artifactId").equals(pluginArtifactId) && - pluginElt.getAttribute("goal").equals(pluginGoal)) { + if (pluginElt.getAttribute("groupId").equals(pluginGroupId) + && pluginElt.getAttribute("artifactId").equals(pluginArtifactId) + && pluginElt.getAttribute("goal").equals(pluginGoal)) { result.add(executionEventElt); } else { } } } - } return result; } /* - - - - - - ${jar.finalName} - ${project.build.directory} - - - */ + + + + + + ${jar.finalName} + ${project.build.directory} + + + */ @NonNull public static List getExecutedLifecyclePhases(@NonNull Element mavenSpyLogs) { List lifecyclePhases = new ArrayList<>(); - for (Element mojoSucceededEvent :getExecutionEvents(mavenSpyLogs, "MojoSucceeded")) { + for (Element mojoSucceededEvent : getExecutionEvents(mavenSpyLogs, "MojoSucceeded")) { Element pluginElement = getUniqueChildElement(mojoSucceededEvent, "plugin"); String lifecyclePhase = pluginElement.getAttribute("lifecyclePhase"); if (!lifecyclePhases.contains(lifecyclePhase)) { @@ -340,8 +350,10 @@ public static String getPathInWorkspace(@NonNull final String absoluteFilePath, if (workspaceRemote.startsWith("/var/") && absoluteFilePath.startsWith("/private/var/")) { // workaround MacOSX special folders path - // eg String workspace = "/var/folders/lq/50t8n2nx7l316pwm8gc_2rt40000gn/T/jenkinsTests.tmp/jenkins3845105900446934883test/workspace/build-on-master-with-tool-provided-maven"; - // eg String absolutePath = "/private/var/folders/lq/50t8n2nx7l316pwm8gc_2rt40000gn/T/jenkinsTests.tmp/jenkins3845105900446934883test/workspace/build-on-master-with-tool-provided-maven/pom.xml"; + // eg String workspace = + // "/var/folders/lq/50t8n2nx7l316pwm8gc_2rt40000gn/T/jenkinsTests.tmp/jenkins3845105900446934883test/workspace/build-on-master-with-tool-provided-maven"; + // eg String absolutePath = + // "/private/var/folders/lq/50t8n2nx7l316pwm8gc_2rt40000gn/T/jenkinsTests.tmp/jenkins3845105900446934883test/workspace/build-on-master-with-tool-provided-maven/pom.xml"; sanitizedWorkspaceRemote = workspaceRemote; sanitizedAbsoluteFilePath = absoluteFilePath.substring("/private".length()); } @@ -349,20 +361,24 @@ public static String getPathInWorkspace(@NonNull final String absoluteFilePath, if (StringUtils.startsWithIgnoreCase(sanitizedAbsoluteFilePath, sanitizedWorkspaceRemote)) { // OK } else if (sanitizedWorkspaceRemote.contains("/workspace/") - && sanitizedAbsoluteFilePath.contains("/workspace/") - ) { + && sanitizedAbsoluteFilePath.contains("/workspace/")) { // workaround JENKINS-46084 // sanitizedAbsoluteFilePath = '/app/Jenkins/home/workspace/testjob/pom.xml' // sanitizedWorkspaceRemote = '/var/lib/jenkins/workspace/testjob' - sanitizedAbsoluteFilePath = "/workspace/" + StringUtils.substringAfter(sanitizedAbsoluteFilePath, "/workspace/"); - sanitizedWorkspaceRemote = "/workspace/" + StringUtils.substringAfter(sanitizedWorkspaceRemote, "/workspace/"); - } else if (sanitizedWorkspaceRemote.endsWith("/workspace") && sanitizedAbsoluteFilePath.contains("/workspace/")) { + sanitizedAbsoluteFilePath = + "/workspace/" + StringUtils.substringAfter(sanitizedAbsoluteFilePath, "/workspace/"); + sanitizedWorkspaceRemote = + "/workspace/" + StringUtils.substringAfter(sanitizedWorkspaceRemote, "/workspace/"); + } else if (sanitizedWorkspaceRemote.endsWith("/workspace") + && sanitizedAbsoluteFilePath.contains("/workspace/")) { // workspace = "/var/lib/jenkins/jobs/Test-Pipeline/workspace"; // absolutePath = "/storage/jenkins/jobs/Test-Pipeline/workspace/pom.xml"; - sanitizedAbsoluteFilePath = "workspace/" + StringUtils.substringAfter(sanitizedAbsoluteFilePath, "/workspace/"); + sanitizedAbsoluteFilePath = + "workspace/" + StringUtils.substringAfter(sanitizedAbsoluteFilePath, "/workspace/"); sanitizedWorkspaceRemote = "workspace/"; } else { - throw new IllegalArgumentException("Cannot relativize '" + absoluteFilePath + "' relatively to '" + workspace.getRemote() + "'"); + throw new IllegalArgumentException( + "Cannot relativize '" + absoluteFilePath + "' relatively to '" + workspace.getRemote() + "'"); } String relativePath = StringUtils.removeStartIgnoreCase(sanitizedAbsoluteFilePath, sanitizedWorkspaceRemote); @@ -372,7 +388,9 @@ public static String getPathInWorkspace(@NonNull final String absoluteFilePath, if (windows) { relativePath = relativePath.replace('/', '\\'); } - LOGGER.log(Level.FINEST, "getPathInWorkspace({0}, {1}: {2}", new Object[]{absoluteFilePath, workspaceRemote, relativePath}); + LOGGER.log(Level.FINEST, "getPathInWorkspace({0}, {1}: {2}", new Object[] { + absoluteFilePath, workspaceRemote, relativePath + }); return relativePath; } @@ -435,7 +453,6 @@ public static String join(@NonNull Iterable elements, @NonNull String de return result.toString(); } - @NonNull public static List listGeneratedArtifacts(Element mavenSpyLogs, boolean includeAttachedArtifacts) { @@ -455,12 +472,14 @@ public static List listGeneratedArtifacts(Element mavenSpyLogs, b pomArtifact.setType("pom"); pomArtifact.setExtension("pom"); pomArtifact.setFile(projectElt.getAttribute("file")); - Element artifactDeployedEvent = XmlUtils.getArtifactDeployedEvent(artifactDeployedEvents, pomArtifact.getFile()); + Element artifactDeployedEvent = + XmlUtils.getArtifactDeployedEvent(artifactDeployedEvents, pomArtifact.getFile()); if (artifactDeployedEvent == null) { // artifact has not been deployed ("mvn deploy") pomArtifact.setVersion(projectArtifact.getVersion()); } else { - pomArtifact.setVersion(XmlUtils.getUniqueChildElement(artifactDeployedEvent, "artifact").getAttribute("version")); + pomArtifact.setVersion(XmlUtils.getUniqueChildElement(artifactDeployedEvent, "artifact") + .getAttribute("version")); } result.add(pomArtifact); @@ -472,47 +491,64 @@ public static List listGeneratedArtifacts(Element mavenSpyLogs, b // TODO: evaluate if we really want to skip this file - cyrille le clerc 2018-04-12 } else { Element fileElt = XmlUtils.getUniqueChildElementOrNull(artifactElt, "file"); - if (fileElt == null || fileElt.getTextContent() == null || fileElt.getTextContent().isEmpty()) { + if (fileElt == null + || fileElt.getTextContent() == null + || fileElt.getTextContent().isEmpty()) { if (LOGGER.isLoggable(Level.FINER)) { - LOGGER.log(Level.FINE, "listGeneratedArtifacts: Project " + projectArtifact + ": no associated file found for " + - mavenArtifact + " in " + XmlUtils.toString(artifactElt)); + LOGGER.log( + Level.FINE, + "listGeneratedArtifacts: Project " + projectArtifact + + ": no associated file found for " + mavenArtifact + " in " + + XmlUtils.toString(artifactElt)); } } else { mavenArtifact.setFile(StringUtils.trim(fileElt.getTextContent())); - artifactDeployedEvent = XmlUtils.getArtifactDeployedEvent(artifactDeployedEvents, mavenArtifact.getFile()); + artifactDeployedEvent = + XmlUtils.getArtifactDeployedEvent(artifactDeployedEvents, mavenArtifact.getFile()); if (artifactDeployedEvent == null) { // artifact has not been deployed ("mvn deploy") } else { - mavenArtifact.setVersion(XmlUtils.getUniqueChildElement(artifactDeployedEvent, "artifact").getAttribute("version")); - mavenArtifact.setRepositoryUrl(XmlUtils.getUniqueChildElement(artifactDeployedEvent, "repository").getAttribute("url")); + mavenArtifact.setVersion(XmlUtils.getUniqueChildElement(artifactDeployedEvent, "artifact") + .getAttribute("version")); + mavenArtifact.setRepositoryUrl( + XmlUtils.getUniqueChildElement(artifactDeployedEvent, "repository") + .getAttribute("url")); } } result.add(mavenArtifact); } if (includeAttachedArtifacts) { - Element attachedArtifactsParentElt = XmlUtils.getUniqueChildElement(projectSucceededElt, "attachedArtifacts"); - List attachedArtifactsElts = XmlUtils.getChildrenElements(attachedArtifactsParentElt, "artifact"); + Element attachedArtifactsParentElt = + XmlUtils.getUniqueChildElement(projectSucceededElt, "attachedArtifacts"); + List attachedArtifactsElts = + XmlUtils.getChildrenElements(attachedArtifactsParentElt, "artifact"); for (Element attachedArtifactElt : attachedArtifactsElts) { MavenArtifact attachedMavenArtifact = XmlUtils.newMavenArtifact(attachedArtifactElt); Element fileElt = XmlUtils.getUniqueChildElementOrNull(attachedArtifactElt, "file"); - if (fileElt == null || fileElt.getTextContent() == null || fileElt.getTextContent().isEmpty()) { + if (fileElt == null + || fileElt.getTextContent() == null + || fileElt.getTextContent().isEmpty()) { if (LOGGER.isLoggable(Level.FINER)) { - LOGGER.log(Level.FINER, "Project " + projectArtifact + ", no associated file found for attached artifact " + - attachedMavenArtifact + " in " + XmlUtils.toString(attachedArtifactElt)); + LOGGER.log( + Level.FINER, + "Project " + projectArtifact + ", no associated file found for attached artifact " + + attachedMavenArtifact + " in " + XmlUtils.toString(attachedArtifactElt)); } } else { attachedMavenArtifact.setFile(StringUtils.trim(fileElt.getTextContent())); - Element attachedArtifactDeployedEvent = XmlUtils.getArtifactDeployedEvent(artifactDeployedEvents, attachedMavenArtifact.getFile()); - if(attachedArtifactDeployedEvent == null) { + Element attachedArtifactDeployedEvent = XmlUtils.getArtifactDeployedEvent( + artifactDeployedEvents, attachedMavenArtifact.getFile()); + if (attachedArtifactDeployedEvent == null) { // artifact has not been deployed ("mvn deploy") } else { - attachedMavenArtifact.setRepositoryUrl(XmlUtils.getUniqueChildElement(attachedArtifactDeployedEvent, "repository").getAttribute("url")); + attachedMavenArtifact.setRepositoryUrl( + XmlUtils.getUniqueChildElement(attachedArtifactDeployedEvent, "repository") + .getAttribute("url")); } - } result.add(attachedMavenArtifact); } @@ -527,9 +563,9 @@ public static List listGeneratedArtifacts(Element mavenSpyLogs, b * * @see jenkins.util.xml.RestrictiveEntityResolver */ - public final static class RestrictiveEntityResolver implements EntityResolver { + public static final class RestrictiveEntityResolver implements EntityResolver { - public final static RestrictiveEntityResolver INSTANCE = new RestrictiveEntityResolver(); + public static final RestrictiveEntityResolver INSTANCE = new RestrictiveEntityResolver(); private RestrictiveEntityResolver() { // prevent multiple instantiation. @@ -541,7 +577,8 @@ private RestrictiveEntityResolver() { */ @Override public InputSource resolveEntity(String publicId, String systemId) throws SAXException, IOException { - throw new SAXException("Refusing to resolve entity with publicId(" + publicId + ") and systemId (" + systemId + ")"); + throw new SAXException( + "Refusing to resolve entity with publicId(" + publicId + ") and systemId (" + systemId + ")"); } } } diff --git a/pipeline-maven/src/test/java/jenkins/scm/impl/mock/GitSampleRepoRuleUtils.java b/pipeline-maven/src/test/java/jenkins/scm/impl/mock/GitSampleRepoRuleUtils.java index dc0a8a5b..e49478f1 100644 --- a/pipeline-maven/src/test/java/jenkins/scm/impl/mock/GitSampleRepoRuleUtils.java +++ b/pipeline-maven/src/test/java/jenkins/scm/impl/mock/GitSampleRepoRuleUtils.java @@ -1,8 +1,8 @@ package jenkins.scm.impl.mock; -import edu.umd.cs.findbugs.annotations.NonNull; -import jenkins.plugins.git.GitSampleRepoRule; +import static java.nio.file.FileVisitResult.CONTINUE; +import edu.umd.cs.findbugs.annotations.NonNull; import java.io.IOException; import java.nio.file.FileAlreadyExistsException; import java.nio.file.FileVisitOption; @@ -12,8 +12,7 @@ import java.nio.file.SimpleFileVisitor; import java.nio.file.attribute.BasicFileAttributes; import java.util.EnumSet; - -import static java.nio.file.FileVisitResult.CONTINUE; +import jenkins.plugins.git.GitSampleRepoRule; /** * @author Cyrille Le Clerc @@ -27,28 +26,28 @@ public class GitSampleRepoRuleUtils { * @param sampleDVCSRepoRule target sample repo * @throws IOException exception during the copy */ - public static void addFilesAndCommit(@NonNull Path rootFolder, @NonNull GitSampleRepoRule sampleDVCSRepoRule) throws IOException { + public static void addFilesAndCommit(@NonNull Path rootFolder, @NonNull GitSampleRepoRule sampleDVCSRepoRule) + throws IOException { - if (!Files.exists(rootFolder)) - return; + if (!Files.exists(rootFolder)) return; if (!Files.isDirectory(rootFolder)) throw new IllegalArgumentException("Given root file is not a folder: " + rootFolder); if (sampleDVCSRepoRule.sampleRepo == null) - throw new IllegalStateException("SampleDVCSRepoRule has not been initialized", new NullPointerException("sampleRepo is null")); + throw new IllegalStateException( + "SampleDVCSRepoRule has not been initialized", new NullPointerException("sampleRepo is null")); final Path source = rootFolder; final Path target = sampleDVCSRepoRule.sampleRepo.toPath(); - Files.walkFileTree(source, EnumSet.of(FileVisitOption.FOLLOW_LINKS), Integer.MAX_VALUE, - new SimpleFileVisitor() { + Files.walkFileTree( + source, EnumSet.of(FileVisitOption.FOLLOW_LINKS), Integer.MAX_VALUE, new SimpleFileVisitor() { @Override public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException { Path targetDir = target.resolve(source.relativize(dir)); try { Files.copy(dir, targetDir); } catch (FileAlreadyExistsException e) { - if (!Files.isDirectory(targetDir)) - throw e; + if (!Files.isDirectory(targetDir)) throw e; } return CONTINUE; } @@ -67,6 +66,5 @@ public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IO } catch (Exception e) { throw new IOException("Exception executing 'git commit' adding " + source, e); } - } } diff --git a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/AbstractIntegrationTest.java b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/AbstractIntegrationTest.java index fe13ed77..705f85ed 100644 --- a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/AbstractIntegrationTest.java +++ b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/AbstractIntegrationTest.java @@ -4,6 +4,10 @@ import static org.jenkinsci.plugins.pipeline.maven.TestUtils.runAfterMethod; import static org.jenkinsci.plugins.pipeline.maven.TestUtils.runBeforeMethod; +import hudson.FilePath; +import hudson.model.Fingerprint; +import hudson.tasks.Fingerprinter; +import hudson.tasks.Maven; import java.io.Closeable; import java.io.File; import java.io.IOException; @@ -12,7 +16,12 @@ import java.nio.file.Path; import java.nio.file.Paths; import java.util.Map; - +import jenkins.model.Jenkins; +import jenkins.mvn.DefaultGlobalSettingsProvider; +import jenkins.mvn.DefaultSettingsProvider; +import jenkins.mvn.GlobalMavenConfig; +import jenkins.plugins.git.GitSampleRepoRule; +import jenkins.scm.impl.mock.GitSampleRepoRuleUtils; import org.jenkinsci.plugins.pipeline.maven.dao.PipelineMavenPluginDao; import org.jenkinsci.plugins.workflow.job.WorkflowJob; import org.jenkinsci.plugins.workflow.job.WorkflowRun; @@ -24,17 +33,6 @@ import org.jvnet.hudson.test.JenkinsRule; import org.jvnet.hudson.test.junit.jupiter.WithJenkins; -import hudson.FilePath; -import hudson.model.Fingerprint; -import hudson.tasks.Fingerprinter; -import hudson.tasks.Maven; -import jenkins.model.Jenkins; -import jenkins.mvn.DefaultGlobalSettingsProvider; -import jenkins.mvn.DefaultSettingsProvider; -import jenkins.mvn.GlobalMavenConfig; -import jenkins.plugins.git.GitSampleRepoRule; -import jenkins.scm.impl.mock.GitSampleRepoRuleUtils; - /** * @author Cyrille Le Clerc */ @@ -64,7 +62,8 @@ public void setup(JenkinsRule r) throws Exception { Maven.MavenInstallation mvn = configureDefaultMaven("3.6.3", Maven.MavenInstallation.MAVEN_30); - Maven.MavenInstallation m3 = new Maven.MavenInstallation("apache-maven-3.6.3", mvn.getHome(), JenkinsRule.NO_PROPERTIES); + Maven.MavenInstallation m3 = + new Maven.MavenInstallation("apache-maven-3.6.3", mvn.getHome(), JenkinsRule.NO_PROPERTIES); Jenkins.get().getDescriptorByType(Maven.DescriptorImpl.class).setInstallations(m3); mavenInstallationName = mvn.getName(); @@ -89,20 +88,25 @@ public static void stopWatcher() { } protected void loadMonoDependencyMavenProjectInGitRepo(GitSampleRepoRule gitRepo) throws Exception { - loadSourceCodeInGitRepository(gitRepo, "/org/jenkinsci/plugins/pipeline/maven/test/test_maven_projects/mono_dependency_maven_jar_project/"); + loadSourceCodeInGitRepository( + gitRepo, + "/org/jenkinsci/plugins/pipeline/maven/test/test_maven_projects/mono_dependency_maven_jar_project/"); } protected void loadMavenJarProjectInGitRepo(GitSampleRepoRule gitRepo) throws Exception { - loadSourceCodeInGitRepository(gitRepo, "/org/jenkinsci/plugins/pipeline/maven/test/test_maven_projects/maven_jar_project/"); + loadSourceCodeInGitRepository( + gitRepo, "/org/jenkinsci/plugins/pipeline/maven/test/test_maven_projects/maven_jar_project/"); } protected void loadMavenWarProjectInGitRepo(GitSampleRepoRule gitRepo) throws Exception { - loadSourceCodeInGitRepository(gitRepo, "/org/jenkinsci/plugins/pipeline/maven/test/test_maven_projects/maven_war_project/"); + loadSourceCodeInGitRepository( + gitRepo, "/org/jenkinsci/plugins/pipeline/maven/test/test_maven_projects/maven_war_project/"); } protected void loadSourceCodeInGitRepository(GitSampleRepoRule gitRepo, String name) throws Exception { gitRepo.init(); - Path mavenProjectRoot = Paths.get(WithMavenStepOnMasterTest.class.getResource(name).toURI()); + Path mavenProjectRoot = + Paths.get(WithMavenStepOnMasterTest.class.getResource(name).toURI()); if (!Files.exists(mavenProjectRoot)) { throw new IllegalStateException("Folder '" + mavenProjectRoot + "' not found"); } @@ -116,22 +120,25 @@ protected Maven.MavenInstallation configureDefaultMaven(String mavenVersion, int File mvnHome = new File(buildDirectory, "apache-maven-" + mavenVersion); if (!mvnHome.exists()) { FilePath mvn = Jenkins.get().getRootPath().createTempFile("maven", "zip"); - mvn.copyFrom(new URL( - "https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/" + mavenVersion + "/apache-maven-" + mavenVersion + "-bin.tar.gz")); + mvn.copyFrom(new URL("https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/" + mavenVersion + + "/apache-maven-" + mavenVersion + "-bin.tar.gz")); mvn.untar(new FilePath(buildDirectory), FilePath.TarCompression.GZIP); } - Maven.MavenInstallation mavenInstallation = new Maven.MavenInstallation("default", mvnHome.getAbsolutePath(), JenkinsRule.NO_PROPERTIES); + Maven.MavenInstallation mavenInstallation = + new Maven.MavenInstallation("default", mvnHome.getAbsolutePath(), JenkinsRule.NO_PROPERTIES); Jenkins.get().getDescriptorByType(Maven.DescriptorImpl.class).setInstallations(mavenInstallation); return mavenInstallation; } - protected void verifyFileIsFingerPrinted(WorkflowJob pipeline, WorkflowRun build, String fileName) throws java.io.IOException { + protected void verifyFileIsFingerPrinted(WorkflowJob pipeline, WorkflowRun build, String fileName) + throws java.io.IOException { Fingerprinter.FingerprintAction fingerprintAction = build.getAction(Fingerprinter.FingerprintAction.class); Map records = fingerprintAction.getRecords(); String jarFileMd5sum = records.get(fileName); assertThat(jarFileMd5sum).isNotNull(); - Fingerprint jarFileFingerPrint = jenkinsRule.getInstance().getFingerprintMap().get(jarFileMd5sum); + Fingerprint jarFileFingerPrint = + jenkinsRule.getInstance().getFingerprintMap().get(jarFileMd5sum); assertThat(jarFileFingerPrint.getFileName()).isEqualTo(fileName); assertThat(jarFileFingerPrint.getOriginal().getJob().getName()).isEqualTo(pipeline.getName()); assertThat(jarFileFingerPrint.getOriginal().getNumber()).isEqualTo(build.getNumber()); diff --git a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/ConfigurationAsCodeNeedDockerTest.java b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/ConfigurationAsCodeNeedDockerTest.java index 0432e858..59fb8e80 100644 --- a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/ConfigurationAsCodeNeedDockerTest.java +++ b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/ConfigurationAsCodeNeedDockerTest.java @@ -6,9 +6,13 @@ import static java.util.Collections.singletonList; import static org.assertj.core.api.Assertions.assertThat; +import com.cloudbees.plugins.credentials.CredentialsProvider; +import hudson.ExtensionList; +import io.jenkins.plugins.casc.ConfigurationAsCode; +import io.jenkins.plugins.casc.ConfigurationContext; +import io.jenkins.plugins.casc.ConfiguratorRegistry; import java.nio.file.Files; import java.nio.file.Path; - import org.apache.commons.lang.StringUtils; import org.jenkinsci.plugins.pipeline.maven.dao.MonitoringPipelineMavenPluginDaoDecorator; import org.jenkinsci.plugins.pipeline.maven.dao.PipelineMavenPluginNullDao; @@ -23,22 +27,18 @@ import org.testcontainers.junit.jupiter.Container; import org.testcontainers.junit.jupiter.Testcontainers; -import com.cloudbees.plugins.credentials.CredentialsProvider; - -import hudson.ExtensionList; -import io.jenkins.plugins.casc.ConfigurationAsCode; -import io.jenkins.plugins.casc.ConfigurationContext; -import io.jenkins.plugins.casc.ConfiguratorRegistry; - @WithJenkins @Testcontainers(disabledWithoutDocker = true) // Testcontainers does not support docker on Windows 2019 servers public class ConfigurationAsCodeNeedDockerTest { @Container - public static MySQLContainer MYSQL_DB = new MySQLContainer<>(MySQLContainer.NAME).withUsername("aUser").withPassword("aPass"); + public static MySQLContainer MYSQL_DB = + new MySQLContainer<>(MySQLContainer.NAME).withUsername("aUser").withPassword("aPass"); @Container - public static PostgreSQLContainer POSTGRE_DB = new PostgreSQLContainer<>(PostgreSQLContainer.IMAGE).withUsername("aUser").withPassword("aPass"); + public static PostgreSQLContainer POSTGRE_DB = new PostgreSQLContainer<>(PostgreSQLContainer.IMAGE) + .withUsername("aUser") + .withPassword("aPass"); @Test public void should_support_mysql_configuration(JenkinsRule r) throws Exception { @@ -57,10 +57,12 @@ public void should_support_mysql_configuration(JenkinsRule r) throws Exception { ConfigurationAsCode.get().configure(singletonList(tmpYml.toFile().getAbsolutePath())); - GlobalPipelineMavenConfig config = r.jenkins.getExtensionList(GlobalPipelineMavenConfig.class).get(0); + GlobalPipelineMavenConfig config = + r.jenkins.getExtensionList(GlobalPipelineMavenConfig.class).get(0); assertThat(config.getJdbcUrl()).isEqualTo(jdbcUrl); - assertThat(config.getProperties()).isEqualTo("dataSource.cachePrepStmts=true\ndataSource.prepStmtCacheSize=250\n"); + assertThat(config.getProperties()) + .isEqualTo("dataSource.cachePrepStmts=true\ndataSource.prepStmtCacheSize=250\n"); assertThat(config.getDaoClass()).isEqualTo(PipelineMavenPluginMySqlDao.class.getName()); // we can't really test the PipelineMavenPluginMySqlDao is used as it is plenty @@ -98,7 +100,8 @@ public void should_support_postgres_configuration(JenkinsRule r) throws Exceptio ConfigurationAsCode.get().configure(singletonList(tmpYml.toFile().getAbsolutePath())); - GlobalPipelineMavenConfig config = r.jenkins.getExtensionList(GlobalPipelineMavenConfig.class).get(0); + GlobalPipelineMavenConfig config = + r.jenkins.getExtensionList(GlobalPipelineMavenConfig.class).get(0); assertThat(config.getJdbcUrl()).isEqualTo(jdbcUrl); assertThat(config.getDaoClass()).isEqualTo(PipelineMavenPluginPostgreSqlDao.class.getName()); @@ -120,5 +123,4 @@ public void should_support_postgres_configuration(JenkinsRule r) throws Exceptio POSTGRE_DB.stop(); } } - } diff --git a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/ConfigurationAsCodeTest.java b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/ConfigurationAsCodeTest.java index 1bb9fe73..1c8c57b7 100644 --- a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/ConfigurationAsCodeTest.java +++ b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/ConfigurationAsCodeTest.java @@ -6,23 +6,26 @@ import static java.util.Collections.singletonList; import static org.assertj.core.api.Assertions.assertThat; +import io.jenkins.plugins.casc.ConfigurationAsCode; +import io.jenkins.plugins.casc.ConfigurationContext; +import io.jenkins.plugins.casc.ConfiguratorRegistry; import org.jenkinsci.plugins.pipeline.maven.publishers.MavenLinkerPublisher2; import org.junit.jupiter.api.Test; import org.jvnet.hudson.test.JenkinsRule; import org.jvnet.hudson.test.junit.jupiter.WithJenkins; -import io.jenkins.plugins.casc.ConfigurationAsCode; -import io.jenkins.plugins.casc.ConfigurationContext; -import io.jenkins.plugins.casc.ConfiguratorRegistry; - @WithJenkins public class ConfigurationAsCodeTest { @Test public void should_support_default_configuration(JenkinsRule r) throws Exception { - ConfigurationAsCode.get().configure(singletonList(getClass().getResource("configuration-as-code_default.yml").toExternalForm())); + ConfigurationAsCode.get() + .configure(singletonList(getClass() + .getResource("configuration-as-code_default.yml") + .toExternalForm())); - GlobalPipelineMavenConfig config = r.jenkins.getExtensionList(GlobalPipelineMavenConfig.class).get(0); + GlobalPipelineMavenConfig config = + r.jenkins.getExtensionList(GlobalPipelineMavenConfig.class).get(0); assertThat(config.isGlobalTraceability()).isFalse(); assertThat(config.getJdbcUrl()).isNull(); @@ -45,9 +48,13 @@ public void should_support_default_configuration(JenkinsRule r) throws Exception @Test public void should_support_global_traceability(JenkinsRule r) throws Exception { - ConfigurationAsCode.get().configure(singletonList(getClass().getResource("configuration-as-code_traceability.yml").toExternalForm())); + ConfigurationAsCode.get() + .configure(singletonList(getClass() + .getResource("configuration-as-code_traceability.yml") + .toExternalForm())); - GlobalPipelineMavenConfig config = r.jenkins.getExtensionList(GlobalPipelineMavenConfig.class).get(0); + GlobalPipelineMavenConfig config = + r.jenkins.getExtensionList(GlobalPipelineMavenConfig.class).get(0); assertThat(config.isGlobalTraceability()).isTrue(); @@ -61,9 +68,13 @@ public void should_support_global_traceability(JenkinsRule r) throws Exception { @Test public void should_support_triggers_configuration(JenkinsRule r) throws Exception { - ConfigurationAsCode.get().configure(singletonList(getClass().getResource("configuration-as-code_triggers.yml").toExternalForm())); + ConfigurationAsCode.get() + .configure(singletonList(getClass() + .getResource("configuration-as-code_triggers.yml") + .toExternalForm())); - GlobalPipelineMavenConfig config = r.jenkins.getExtensionList(GlobalPipelineMavenConfig.class).get(0); + GlobalPipelineMavenConfig config = + r.jenkins.getExtensionList(GlobalPipelineMavenConfig.class).get(0); assertThat(config.isTriggerDownstreamUponResultAborted()).isTrue(); assertThat(config.isTriggerDownstreamUponResultFailure()).isTrue(); @@ -81,9 +92,13 @@ public void should_support_triggers_configuration(JenkinsRule r) throws Exceptio @Test public void should_support_postgresql_configuration(JenkinsRule r) throws Exception { - ConfigurationAsCode.get().configure(singletonList(getClass().getResource("configuration-as-code_postgresql.yml").toExternalForm())); + ConfigurationAsCode.get() + .configure(singletonList(getClass() + .getResource("configuration-as-code_postgresql.yml") + .toExternalForm())); - GlobalPipelineMavenConfig config = r.jenkins.getExtensionList(GlobalPipelineMavenConfig.class).get(0); + GlobalPipelineMavenConfig config = + r.jenkins.getExtensionList(GlobalPipelineMavenConfig.class).get(0); assertThat(config.getJdbcUrl()).isEqualTo("theJdbcUrl"); assertThat(config.getJdbcCredentialsId()).isEqualTo("credsId"); @@ -98,13 +113,18 @@ public void should_support_postgresql_configuration(JenkinsRule r) throws Except @Test public void should_support_publishers_configuration(JenkinsRule r) throws Exception { - ConfigurationAsCode.get().configure(singletonList(getClass().getResource("configuration-as-code_publishers.yml").toExternalForm())); + ConfigurationAsCode.get() + .configure(singletonList(getClass() + .getResource("configuration-as-code_publishers.yml") + .toExternalForm())); - GlobalPipelineMavenConfig config = r.jenkins.getExtensionList(GlobalPipelineMavenConfig.class).get(0); + GlobalPipelineMavenConfig config = + r.jenkins.getExtensionList(GlobalPipelineMavenConfig.class).get(0); assertThat(config.getPublisherOptions()).hasSize(1); assertThat(config.getPublisherOptions().get(0)).isInstanceOf(MavenLinkerPublisher2.class); - MavenLinkerPublisher2 publisher = (MavenLinkerPublisher2) config.getPublisherOptions().get(0); + MavenLinkerPublisher2 publisher = + (MavenLinkerPublisher2) config.getPublisherOptions().get(0); assertThat(publisher.isDisabled()).isTrue(); ConfiguratorRegistry registry = ConfiguratorRegistry.get(); diff --git a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/DependencyFingerprintPublisherTest.java b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/DependencyFingerprintPublisherTest.java index 04859566..b509e79d 100644 --- a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/DependencyFingerprintPublisherTest.java +++ b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/DependencyFingerprintPublisherTest.java @@ -2,15 +2,13 @@ import static org.assertj.core.api.Assertions.assertThat; +import hudson.model.Fingerprint; +import hudson.model.Result; import java.util.Hashtable; - import org.jenkinsci.plugins.workflow.cps.CpsFlowDefinition; import org.jenkinsci.plugins.workflow.job.WorkflowJob; import org.junit.jupiter.api.Test; -import hudson.model.Fingerprint; -import hudson.model.Result; - /** * @author Cyrille Le Clerc */ @@ -28,7 +26,7 @@ public void verify_fingerprinting_of_dependencies() throws Exception { loadMonoDependencyMavenProjectInGitRepo(this.gitRepoRule); - //@formatter:off + // @formatter:off String pipelineScript = "node() {\n" + " git($/" + gitRepoRule.toString() + "/$)\n" + " withMaven(options:[dependenciesFingerprintPublisher(includeReleaseVersions:true)]) {\n" + @@ -39,7 +37,7 @@ public void verify_fingerprinting_of_dependencies() throws Exception { " }\n" + " }\n" + "}"; - //@formatter:on + // @formatter:on String commonsLang3version35Md5 = "780b5a8b72eebe6d0dbff1c11b5658fa"; @@ -52,7 +50,8 @@ public void verify_fingerprinting_of_dependencies() throws Exception { Fingerprint fingerprint = jenkinsRule.jenkins.getFingerprintMap().get(commonsLang3version35Md5); assertThat(fingerprint).isNotNull(); - assertThat(fingerprint.getFileName()).isEqualTo("org/apache/commons/commons-lang3/3.5/commons-lang3-3.5.jar"); + assertThat(fingerprint.getFileName()) + .isEqualTo("org/apache/commons/commons-lang3/3.5/commons-lang3-3.5.jar"); Fingerprint.BuildPtr original = fingerprint.getOriginal(); assertThat(original).isNull(); Hashtable usages = fingerprint.getUsages(); @@ -60,7 +59,8 @@ public void verify_fingerprinting_of_dependencies() throws Exception { assertThat(usages).containsKey(firstPipeline.getName()); } { // second job using commons-lang3:3.5 - WorkflowJob secondPipeline = jenkinsRule.createProject(WorkflowJob.class, "build-mono-dependency-maven-project-2"); + WorkflowJob secondPipeline = + jenkinsRule.createProject(WorkflowJob.class, "build-mono-dependency-maven-project-2"); secondPipeline.setDefinition(new CpsFlowDefinition(pipelineScript, true)); jenkinsRule.assertBuildStatus(Result.SUCCESS, secondPipeline.scheduleBuild2(0)); @@ -71,6 +71,5 @@ public void verify_fingerprinting_of_dependencies() throws Exception { assertThat(usages).containsKey(firstPipeline.getName()); assertThat(usages).containsKey(secondPipeline.getName()); } - } } diff --git a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/DependencyGraphTest.java b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/DependencyGraphTest.java index 30b28e84..7a17117e 100644 --- a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/DependencyGraphTest.java +++ b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/DependencyGraphTest.java @@ -4,12 +4,20 @@ import static org.jenkinsci.plugins.pipeline.maven.TestUtils.runAfterMethod; import static org.jenkinsci.plugins.pipeline.maven.TestUtils.runBeforeMethod; +import hudson.ExtensionList; +import hudson.model.Cause; +import hudson.model.CauseAction; +import hudson.model.Result; import java.io.File; import java.util.ArrayList; import java.util.List; import java.util.concurrent.Future; import java.util.stream.Collectors; - +import jenkins.branch.BranchSource; +import jenkins.plugins.git.GitSCMSource; +import jenkins.plugins.git.GitSampleRepoRule; +import jenkins.plugins.git.traits.BranchDiscoveryTrait; +import jenkins.scm.api.trait.SCMSourceTrait; import org.jenkinsci.plugins.pipeline.maven.dao.PipelineMavenPluginDao; import org.jenkinsci.plugins.pipeline.maven.db.PipelineMavenPluginH2Dao; import org.jenkinsci.plugins.pipeline.maven.publishers.PipelineGraphPublisher; @@ -25,16 +33,6 @@ import org.junit.jupiter.api.condition.EnabledOnOs; import org.junit.jupiter.api.condition.OS; -import hudson.ExtensionList; -import hudson.model.Cause; -import hudson.model.CauseAction; -import hudson.model.Result; -import jenkins.branch.BranchSource; -import jenkins.plugins.git.GitSCMSource; -import jenkins.plugins.git.GitSampleRepoRule; -import jenkins.plugins.git.traits.BranchDiscoveryTrait; -import jenkins.scm.api.trait.SCMSourceTrait; - /** * @author Cyrille Le Clerc */ @@ -44,7 +42,8 @@ public class DependencyGraphTest extends AbstractIntegrationTest { @BeforeEach public void setup() throws Exception { - ExtensionList.lookupSingleton(GlobalPipelineMavenConfig.class).setDaoClass(PipelineMavenPluginH2Dao.class.getName()); + ExtensionList.lookupSingleton(GlobalPipelineMavenConfig.class) + .setDaoClass(PipelineMavenPluginH2Dao.class.getName()); String jdbcUrl = "jdbc:h2:file:" + new File("target", getClass().getName() + "-h2").getAbsolutePath() + ";" + "AUTO_SERVER=TRUE;MULTI_THREADED=1;QUERY_CACHE_SIZE=25;JMX=TRUE"; ExtensionList.lookupSingleton(GlobalPipelineMavenConfig.class).setJdbcUrl(jdbcUrl); @@ -77,7 +76,7 @@ public void verify_downstream_simple_pipeline_trigger() throws Exception { System.out.println("downstreamArtifactRepoRule: " + downstreamArtifactRepoRule); loadMavenWarProjectInGitRepo(this.downstreamArtifactRepoRule); - //@formatter:off + // @formatter:off String mavenJarPipelineScript = "node() {\n" + " git($/" + gitRepoRule.toString() + "/$)\n" + " withMaven() {\n" + @@ -98,28 +97,32 @@ public void verify_downstream_simple_pipeline_trigger() throws Exception { " }\n" + " }\n" + "}"; - //@formatter:on + // @formatter:on WorkflowJob mavenJarPipeline = jenkinsRule.createProject(WorkflowJob.class, "build-maven-jar"); mavenJarPipeline.setDefinition(new CpsFlowDefinition(mavenJarPipelineScript, true)); mavenJarPipeline.addTrigger(new WorkflowJobDependencyTrigger()); - WorkflowRun mavenJarPipelineFirstRun = jenkinsRule.assertBuildStatus(Result.SUCCESS, mavenJarPipeline.scheduleBuild2(0)); + WorkflowRun mavenJarPipelineFirstRun = + jenkinsRule.assertBuildStatus(Result.SUCCESS, mavenJarPipeline.scheduleBuild2(0)); // TODO check in DB that the generated artifact is recorded WorkflowJob mavenWarPipeline = jenkinsRule.createProject(WorkflowJob.class, "build-maven-war"); mavenWarPipeline.setDefinition(new CpsFlowDefinition(mavenWarPipelineScript, true)); mavenWarPipeline.addTrigger(new WorkflowJobDependencyTrigger()); - WorkflowRun mavenWarPipelineFirstRun = jenkinsRule.assertBuildStatus(Result.SUCCESS, mavenWarPipeline.scheduleBuild2(0)); + WorkflowRun mavenWarPipelineFirstRun = + jenkinsRule.assertBuildStatus(Result.SUCCESS, mavenWarPipeline.scheduleBuild2(0)); // TODO check in DB that the dependency on the war project is recorded System.out.println("mavenWarPipelineFirstRun: " + mavenWarPipelineFirstRun); - WorkflowRun mavenJarPipelineSecondRun = jenkinsRule.assertBuildStatus(Result.SUCCESS, mavenJarPipeline.scheduleBuild2(0)); + WorkflowRun mavenJarPipelineSecondRun = + jenkinsRule.assertBuildStatus(Result.SUCCESS, mavenJarPipeline.scheduleBuild2(0)); jenkinsRule.waitUntilNoActivity(); WorkflowRun mavenWarPipelineLastRun = mavenWarPipeline.getLastBuild(); - System.out.println("mavenWarPipelineLastBuild: " + mavenWarPipelineLastRun + " caused by " + mavenWarPipelineLastRun.getCauses()); + System.out.println("mavenWarPipelineLastBuild: " + mavenWarPipelineLastRun + " caused by " + + mavenWarPipelineLastRun.getCauses()); assertThat(mavenWarPipelineLastRun.getNumber()).isEqualTo(mavenWarPipelineFirstRun.getNumber() + 1); Cause.UpstreamCause upstreamCause = mavenWarPipelineLastRun.getCause(Cause.UpstreamCause.class); @@ -136,7 +139,7 @@ public void verify_downstream_multi_branch_pipeline_trigger() throws Exception { System.out.println("downstreamArtifactRepoRule: " + downstreamArtifactRepoRule); loadMavenWarProjectInGitRepo(this.downstreamArtifactRepoRule); - //@formatter:off + // @formatter:off String script = "node() {\n" + " checkout scm\n" + " withMaven() {\n" + @@ -147,7 +150,7 @@ public void verify_downstream_multi_branch_pipeline_trigger() throws Exception { " }\n" + " }\n" + "}"; - //@formatter:on + // @formatter:on gitRepoRule.write("Jenkinsfile", script); gitRepoRule.git("add", "Jenkinsfile"); gitRepoRule.git("commit", "--message=jenkinsfile"); @@ -158,11 +161,13 @@ public void verify_downstream_multi_branch_pipeline_trigger() throws Exception { // TRIGGER maven-jar#1 to record that "build-maven-jar" generates this jar and // install this maven jar in the local maven repo - WorkflowMultiBranchProject mavenJarPipeline = jenkinsRule.createProject(WorkflowMultiBranchProject.class, "build-maven-jar"); + WorkflowMultiBranchProject mavenJarPipeline = + jenkinsRule.createProject(WorkflowMultiBranchProject.class, "build-maven-jar"); mavenJarPipeline.addTrigger(new WorkflowJobDependencyTrigger()); mavenJarPipeline.getSourcesList().add(new BranchSource(buildGitSCMSource(gitRepoRule.toString()))); System.out.println("trigger maven-jar#1..."); - WorkflowJob mavenJarPipelineMasterPipeline = WorkflowMultibranchProjectTestsUtils.scheduleAndFindBranchProject(mavenJarPipeline, "master"); + WorkflowJob mavenJarPipelineMasterPipeline = + WorkflowMultibranchProjectTestsUtils.scheduleAndFindBranchProject(mavenJarPipeline, "master"); assertThat(mavenJarPipeline.getItems()).hasSize(1); System.out.println("wait for maven-jar#1..."); jenkinsRule.waitUntilNoActivity(); @@ -172,11 +177,15 @@ public void verify_downstream_multi_branch_pipeline_trigger() throws Exception { // TRIGGER maven-war#1 to record that "build-maven-war" has a dependency on // "build-maven-jar" - WorkflowMultiBranchProject mavenWarPipeline = jenkinsRule.createProject(WorkflowMultiBranchProject.class, "build-maven-war"); + WorkflowMultiBranchProject mavenWarPipeline = + jenkinsRule.createProject(WorkflowMultiBranchProject.class, "build-maven-war"); mavenWarPipeline.addTrigger(new WorkflowJobDependencyTrigger()); - mavenWarPipeline.getSourcesList().add(new BranchSource(buildGitSCMSource(downstreamArtifactRepoRule.toString()))); + mavenWarPipeline + .getSourcesList() + .add(new BranchSource(buildGitSCMSource(downstreamArtifactRepoRule.toString()))); System.out.println("trigger maven-war#1..."); - WorkflowJob mavenWarPipelineMasterPipeline = WorkflowMultibranchProjectTestsUtils.scheduleAndFindBranchProject(mavenWarPipeline, "master"); + WorkflowJob mavenWarPipelineMasterPipeline = + WorkflowMultibranchProjectTestsUtils.scheduleAndFindBranchProject(mavenWarPipeline, "master"); assertThat(mavenWarPipeline.getItems()).hasSize(1); System.out.println("wait for maven-war#1..."); jenkinsRule.waitUntilNoActivity(); @@ -186,27 +195,30 @@ public void verify_downstream_multi_branch_pipeline_trigger() throws Exception { // TRIGGER maven-jar#2 so that it triggers "maven-war" and creates maven-war#2 System.out.println("trigger maven-jar#2..."); - Future mavenJarPipelineMasterPipelineSecondRunFuture = mavenJarPipelineMasterPipeline.scheduleBuild2(0, - new CauseAction(new Cause.RemoteCause("127.0.0.1", "junit test"))); + Future mavenJarPipelineMasterPipelineSecondRunFuture = + mavenJarPipelineMasterPipeline.scheduleBuild2( + 0, new CauseAction(new Cause.RemoteCause("127.0.0.1", "junit test"))); System.out.println("wait for maven-jar#2..."); mavenJarPipelineMasterPipelineSecondRunFuture.get(); jenkinsRule.waitUntilNoActivity(); WorkflowRun mavenWarPipelineLastRun = mavenWarPipelineMasterPipeline.getLastBuild(); - System.out.println("mavenWarPipelineLastBuild: " + mavenWarPipelineLastRun + " caused by " + mavenWarPipelineLastRun.getCauses()); + System.out.println("mavenWarPipelineLastBuild: " + mavenWarPipelineLastRun + " caused by " + + mavenWarPipelineLastRun.getCauses()); assertThat(mavenWarPipelineLastRun.getNumber()).isEqualTo(mavenWarPipelineFirstRun.getNumber() + 1); Cause.UpstreamCause upstreamCause = mavenWarPipelineLastRun.getCause(Cause.UpstreamCause.class); assertThat(upstreamCause).isNotNull(); - } @Test public void verify_osgi_bundle_recorded_as_bundle_and_as_jar() throws Exception { - loadSourceCodeInGitRepository(this.gitRepoRule, "/org/jenkinsci/plugins/pipeline/maven/test/test_maven_projects/multi_module_bundle_project/"); + loadSourceCodeInGitRepository( + this.gitRepoRule, + "/org/jenkinsci/plugins/pipeline/maven/test/test_maven_projects/multi_module_bundle_project/"); - //@formatter:off + // @formatter:off String pipelineScript = "node() {\n" + " git($/" + gitRepoRule.toString() + "/$)\n" + " withMaven() {\n" + @@ -217,15 +229,18 @@ public void verify_osgi_bundle_recorded_as_bundle_and_as_jar() throws Exception " }\n" + " }\n" + "}"; - //@formatter:on + // @formatter:on // TRIGGER maven-jar#1 to record that "build-maven-jar" - WorkflowJob multiModuleBundleProjectPipeline = jenkinsRule.createProject(WorkflowJob.class, "build-multi-module-bundle"); + WorkflowJob multiModuleBundleProjectPipeline = + jenkinsRule.createProject(WorkflowJob.class, "build-multi-module-bundle"); multiModuleBundleProjectPipeline.setDefinition(new CpsFlowDefinition(pipelineScript, true)); - WorkflowRun build = jenkinsRule.assertBuildStatus(Result.SUCCESS, multiModuleBundleProjectPipeline.scheduleBuild2(0)); + WorkflowRun build = + jenkinsRule.assertBuildStatus(Result.SUCCESS, multiModuleBundleProjectPipeline.scheduleBuild2(0)); PipelineMavenPluginDao dao = GlobalPipelineMavenConfig.get().getDao(); - List generatedArtifacts = dao.getGeneratedArtifacts(multiModuleBundleProjectPipeline.getFullName(), build.getNumber()); + List generatedArtifacts = + dao.getGeneratedArtifacts(multiModuleBundleProjectPipeline.getFullName(), build.getNumber()); /* * [{skip_downstream_triggers=TRUE, type=pom, @@ -245,18 +260,22 @@ public void verify_osgi_bundle_recorded_as_bundle_and_as_jar() throws Exception System.out.println("generated artifacts" + generatedArtifacts); Iterable matchingArtifactTypes = generatedArtifacts.stream() - .filter(input -> input != null && input.getGroupId().equals("jenkins.mvn.test.bundle") && input.getArtifactId().equals("print-api") + .filter(input -> input != null + && input.getGroupId().equals("jenkins.mvn.test.bundle") + && input.getArtifactId().equals("print-api") && input.getVersion().equals("0.0.1-SNAPSHOT")) - .map(MavenArtifact::getType).collect(Collectors.toList()); + .map(MavenArtifact::getType) + .collect(Collectors.toList()); assertThat(matchingArtifactTypes).contains("jar", "bundle", "pom"); } @Test public void verify_downstream_pipeline_triggered_on_parent_pom_build() throws Exception { - loadSourceCodeInGitRepository(this.gitRepoRule, "/org/jenkinsci/plugins/pipeline/maven/test/test_maven_projects/maven_pom_project/"); + loadSourceCodeInGitRepository( + this.gitRepoRule, "/org/jenkinsci/plugins/pipeline/maven/test/test_maven_projects/maven_pom_project/"); - //@formatter:off + // @formatter:off String pipelineScript = "node() {\n" + " git($/" + gitRepoRule.toString() + "/$)\n" + " withMaven() {\n" + @@ -267,18 +286,25 @@ public void verify_downstream_pipeline_triggered_on_parent_pom_build() throws Ex " }\n" + " }\n" + "}"; - //@formatter:on + // @formatter:on // TRIGGER maven-jar#1 to record that "build-maven-jar" - WorkflowJob multiModuleBundleProjectPipeline = jenkinsRule.createProject(WorkflowJob.class, "build-multi-module-bundle"); + WorkflowJob multiModuleBundleProjectPipeline = + jenkinsRule.createProject(WorkflowJob.class, "build-multi-module-bundle"); multiModuleBundleProjectPipeline.setDefinition(new CpsFlowDefinition(pipelineScript, true)); - WorkflowRun build = jenkinsRule.assertBuildStatus(Result.SUCCESS, multiModuleBundleProjectPipeline.scheduleBuild2(0)); + WorkflowRun build = + jenkinsRule.assertBuildStatus(Result.SUCCESS, multiModuleBundleProjectPipeline.scheduleBuild2(0)); PipelineMavenPluginDao dao = GlobalPipelineMavenConfig.get().getDao(); - List generatedArtifacts = dao.getGeneratedArtifacts(multiModuleBundleProjectPipeline.getFullName(), build.getNumber()); + List generatedArtifacts = + dao.getGeneratedArtifacts(multiModuleBundleProjectPipeline.getFullName(), build.getNumber()); - Iterable matchingArtifactTypes = generatedArtifacts.stream().filter(input -> input != null && input.getGroupId().equals("com.example") - && input.getArtifactId().equals("my-pom") && input.getVersion().equals("0.1-SNAPSHOT")).map(MavenArtifact::getType) + Iterable matchingArtifactTypes = generatedArtifacts.stream() + .filter(input -> input != null + && input.getGroupId().equals("com.example") + && input.getArtifactId().equals("my-pom") + && input.getVersion().equals("0.1-SNAPSHOT")) + .map(MavenArtifact::getType) .collect(Collectors.toList()); assertThat(matchingArtifactTypes).contains("pom"); @@ -287,12 +313,14 @@ public void verify_downstream_pipeline_triggered_on_parent_pom_build() throws Ex @Test public void verify_downstream_pipeline_triggered_on_jar_having_parent_pom_dependency() throws Exception { System.out.println("gitRepoRule: " + gitRepoRule); - loadSourceCodeInGitRepository(this.gitRepoRule, "/org/jenkinsci/plugins/pipeline/maven/test/test_maven_projects/maven_pom_project/"); + loadSourceCodeInGitRepository( + this.gitRepoRule, "/org/jenkinsci/plugins/pipeline/maven/test/test_maven_projects/maven_pom_project/"); System.out.println("downstreamArtifactRepoRule: " + downstreamArtifactRepoRule); - loadSourceCodeInGitRepository(this.downstreamArtifactRepoRule, + loadSourceCodeInGitRepository( + this.downstreamArtifactRepoRule, "/org/jenkinsci/plugins/pipeline/maven/test/test_maven_projects/maven_jar_with_parent_pom_project/"); - //@formatter:off + // @formatter:off String mavenParentPipelineScript = "node() {\n" + " git($/" + gitRepoRule.toString() + "/$)\n" + " withMaven() {\n" + @@ -313,22 +341,25 @@ public void verify_downstream_pipeline_triggered_on_jar_having_parent_pom_depend " }\n" + " }\n" + "}"; - //@formatter:on + // @formatter:on WorkflowJob mavenParentPipeline = jenkinsRule.createProject(WorkflowJob.class, "build-maven-parent"); mavenParentPipeline.setDefinition(new CpsFlowDefinition(mavenParentPipelineScript, true)); mavenParentPipeline.addTrigger(new WorkflowJobDependencyTrigger()); - WorkflowRun mavenParentPipelineFirstRun = jenkinsRule.assertBuildStatus(Result.SUCCESS, mavenParentPipeline.scheduleBuild2(0)); + WorkflowRun mavenParentPipelineFirstRun = + jenkinsRule.assertBuildStatus(Result.SUCCESS, mavenParentPipeline.scheduleBuild2(0)); // TODO check in DB that the generated artifact is recorded WorkflowJob mavenJarPipeline = jenkinsRule.createProject(WorkflowJob.class, "build-maven-jar"); mavenJarPipeline.setDefinition(new CpsFlowDefinition(mavenJarPipelineScript, true)); mavenJarPipeline.addTrigger(new WorkflowJobDependencyTrigger()); - WorkflowRun mavenJarPipelineFirstRun = jenkinsRule.assertBuildStatus(Result.SUCCESS, mavenJarPipeline.scheduleBuild2(0)); + WorkflowRun mavenJarPipelineFirstRun = + jenkinsRule.assertBuildStatus(Result.SUCCESS, mavenJarPipeline.scheduleBuild2(0)); // TODO check in DB that the dependency on the war project is recorded - WorkflowRun mavenParentPipelineSecondRun = jenkinsRule.assertBuildStatus(Result.SUCCESS, mavenParentPipeline.scheduleBuild2(0)); + WorkflowRun mavenParentPipelineSecondRun = + jenkinsRule.assertBuildStatus(Result.SUCCESS, mavenParentPipeline.scheduleBuild2(0)); jenkinsRule.waitUntilNoActivity(); @@ -349,7 +380,7 @@ public void verify_downstream_pipeline_triggered_on_war_having_jar_dependency() System.out.println("downstreamArtifactRepoRule: " + downstreamArtifactRepoRule); loadMavenWarProjectInGitRepo(this.downstreamArtifactRepoRule); - //@formatter:off + // @formatter:off String mavenJarPipelineScript = "node() {\n" + " git($/" + gitRepoRule.toString() + "/$)\n" + " withMaven() {\n" + @@ -370,29 +401,33 @@ public void verify_downstream_pipeline_triggered_on_war_having_jar_dependency() " }\n" + " }\n" + "}"; - //@formatter:on + // @formatter:on WorkflowJob mavenJarPipeline = jenkinsRule.createProject(WorkflowJob.class, "build-maven-jar"); mavenJarPipeline.setDefinition(new CpsFlowDefinition(mavenJarPipelineScript, true)); mavenJarPipeline.addTrigger(new WorkflowJobDependencyTrigger()); - WorkflowRun mavenJarPipelineFirstRun = jenkinsRule.assertBuildStatus(Result.SUCCESS, mavenJarPipeline.scheduleBuild2(0)); + WorkflowRun mavenJarPipelineFirstRun = + jenkinsRule.assertBuildStatus(Result.SUCCESS, mavenJarPipeline.scheduleBuild2(0)); // TODO check in DB that the generated artifact is recorded WorkflowJob mavenWarPipeline = jenkinsRule.createProject(WorkflowJob.class, "build-maven-war"); mavenWarPipeline.setDefinition(new CpsFlowDefinition(mavenWarPipelineScript, true)); mavenWarPipeline.addTrigger(new WorkflowJobDependencyTrigger()); - WorkflowRun mavenWarPipelineFirstRun = jenkinsRule.assertBuildStatus(Result.SUCCESS, mavenWarPipeline.scheduleBuild2(0)); + WorkflowRun mavenWarPipelineFirstRun = + jenkinsRule.assertBuildStatus(Result.SUCCESS, mavenWarPipeline.scheduleBuild2(0)); // TODO check in DB that the dependency on the war project is recorded System.out.println("mavenWarPipelineFirstRun: " + mavenWarPipelineFirstRun); - WorkflowRun mavenJarPipelineSecondRun = jenkinsRule.assertBuildStatus(Result.SUCCESS, mavenJarPipeline.scheduleBuild2(0)); + WorkflowRun mavenJarPipelineSecondRun = + jenkinsRule.assertBuildStatus(Result.SUCCESS, mavenJarPipeline.scheduleBuild2(0)); jenkinsRule.waitUntilNoActivity(); WorkflowRun mavenWarPipelineLastRun = mavenWarPipeline.getLastBuild(); - System.out.println("mavenWarPipelineLastBuild: " + mavenWarPipelineLastRun + " caused by " + mavenWarPipelineLastRun.getCauses()); + System.out.println("mavenWarPipelineLastBuild: " + mavenWarPipelineLastRun + " caused by " + + mavenWarPipelineLastRun.getCauses()); assertThat(mavenWarPipelineLastRun.getNumber()).isEqualTo(mavenWarPipelineFirstRun.getNumber() + 1); Cause.UpstreamCause upstreamCause = mavenWarPipelineLastRun.getCause(Cause.UpstreamCause.class); @@ -402,55 +437,62 @@ public void verify_downstream_pipeline_triggered_on_war_having_jar_dependency() @Test public void verify_nbm_downstream_simple_pipeline_trigger() throws Exception { System.out.println("gitRepoRule: " + gitRepoRule); - loadSourceCodeInGitRepository(this.gitRepoRule, "/org/jenkinsci/plugins/pipeline/maven/test/test_maven_projects/maven_nbm_dependency_project/"); + loadSourceCodeInGitRepository( + this.gitRepoRule, + "/org/jenkinsci/plugins/pipeline/maven/test/test_maven_projects/maven_nbm_dependency_project/"); System.out.println("downstreamArtifactRepoRule: " + downstreamArtifactRepoRule); - loadSourceCodeInGitRepository(this.downstreamArtifactRepoRule, + loadSourceCodeInGitRepository( + this.downstreamArtifactRepoRule, "/org/jenkinsci/plugins/pipeline/maven/test/test_maven_projects/maven_nbm_base_project/"); - //@formatter:off - String mavenNbmDependencyPipelineScript = "node() {\n" - + " git($/" + gitRepoRule.toString() + "/$)\n" - + " withMaven() {\n" - + " if (isUnix()) {\n" - + " sh 'mvn install'\n" - + " } else {\n" - + " bat 'mvn install'\n" - + " }\n" - + " }\n" - + "}"; - String mavenNbmBasePipelineScript = "node() {\n" - + " git($/" + downstreamArtifactRepoRule.toString() + "/$)\n" - + " withMaven() {\n" - + " if (isUnix()) {\n" - + " sh 'mvn install'\n" - + " } else {\n" - + " bat 'mvn install'\n" - + " }\n" - + " }\n" - + "}"; - //@formatter:on + // @formatter:off + String mavenNbmDependencyPipelineScript = "node() {\n" + + " git($/" + gitRepoRule.toString() + "/$)\n" + + " withMaven() {\n" + + " if (isUnix()) {\n" + + " sh 'mvn install'\n" + + " } else {\n" + + " bat 'mvn install'\n" + + " }\n" + + " }\n" + + "}"; + String mavenNbmBasePipelineScript = "node() {\n" + + " git($/" + downstreamArtifactRepoRule.toString() + "/$)\n" + + " withMaven() {\n" + + " if (isUnix()) {\n" + + " sh 'mvn install'\n" + + " } else {\n" + + " bat 'mvn install'\n" + + " }\n" + + " }\n" + + "}"; + // @formatter:on WorkflowJob mavenNbmDependency = jenkinsRule.createProject(WorkflowJob.class, "build-nbm-dependency"); mavenNbmDependency.setDefinition(new CpsFlowDefinition(mavenNbmDependencyPipelineScript, true)); mavenNbmDependency.addTrigger(new WorkflowJobDependencyTrigger()); - WorkflowRun mavenNbmDependencyPipelineFirstRun = jenkinsRule.assertBuildStatus(Result.SUCCESS, mavenNbmDependency.scheduleBuild2(0)); + WorkflowRun mavenNbmDependencyPipelineFirstRun = + jenkinsRule.assertBuildStatus(Result.SUCCESS, mavenNbmDependency.scheduleBuild2(0)); // TODO check in DB that the generated artifact is recorded WorkflowJob mavenNbmBasePipeline = jenkinsRule.createProject(WorkflowJob.class, "build-nbm-base"); mavenNbmBasePipeline.setDefinition(new CpsFlowDefinition(mavenNbmBasePipelineScript, true)); mavenNbmBasePipeline.addTrigger(new WorkflowJobDependencyTrigger()); - WorkflowRun mavenNbmBasePipelineFirstRun = jenkinsRule.assertBuildStatus(Result.SUCCESS, mavenNbmBasePipeline.scheduleBuild2(0)); + WorkflowRun mavenNbmBasePipelineFirstRun = + jenkinsRule.assertBuildStatus(Result.SUCCESS, mavenNbmBasePipeline.scheduleBuild2(0)); // TODO check in DB that the dependency on the war project is recorded System.out.println("build-nbm-dependencyFirstRun: " + mavenNbmBasePipelineFirstRun); - WorkflowRun mavenNbmDependencyPipelineSecondRun = jenkinsRule.assertBuildStatus(Result.SUCCESS, mavenNbmDependency.scheduleBuild2(0)); + WorkflowRun mavenNbmDependencyPipelineSecondRun = + jenkinsRule.assertBuildStatus(Result.SUCCESS, mavenNbmDependency.scheduleBuild2(0)); jenkinsRule.waitUntilNoActivity(); WorkflowRun mavenNbmBasePipelineLastRun = mavenNbmBasePipeline.getLastBuild(); - System.out.println("build-nbm-baseLastBuild: " + mavenNbmBasePipelineLastRun + " caused by " + mavenNbmBasePipelineLastRun.getCauses()); + System.out.println("build-nbm-baseLastBuild: " + mavenNbmBasePipelineLastRun + " caused by " + + mavenNbmBasePipelineLastRun.getCauses()); assertThat(mavenNbmBasePipelineLastRun.getNumber()).isEqualTo(mavenNbmBasePipelineFirstRun.getNumber() + 1); Cause.UpstreamCause upstreamCause = mavenNbmBasePipelineLastRun.getCause(Cause.UpstreamCause.class); @@ -461,57 +503,65 @@ public void verify_nbm_downstream_simple_pipeline_trigger() throws Exception { @EnabledOnOs(OS.LINUX) // Docker does not work on Windows 2019 servers CI agents public void verify_docker_downstream_simple_pipeline_trigger() throws Exception { System.out.println("gitRepoRule: " + gitRepoRule); - loadSourceCodeInGitRepository(this.gitRepoRule, "/org/jenkinsci/plugins/pipeline/maven/test/test_maven_projects/maven_docker_dependency_project/"); + loadSourceCodeInGitRepository( + this.gitRepoRule, + "/org/jenkinsci/plugins/pipeline/maven/test/test_maven_projects/maven_docker_dependency_project/"); System.out.println("downstreamArtifactRepoRule: " + downstreamArtifactRepoRule); - loadSourceCodeInGitRepository(this.downstreamArtifactRepoRule, + loadSourceCodeInGitRepository( + this.downstreamArtifactRepoRule, "/org/jenkinsci/plugins/pipeline/maven/test/test_maven_projects/maven_docker_base_project/"); - //@formatter:off - String mavenDockerDependencyPipelineScript = "node() {\n" - + " git($/" + gitRepoRule.toString() + "/$)\n" - + " withMaven() {\n" - + " if (isUnix()) {\n" - + " sh 'mvn install'\n" - + " } else {\n" - + " bat 'mvn install'\n" - + " }\n" - + " }\n" - + "}"; - String mavenDockerBasePipelineScript = "node() {\n" - + " git($/" + downstreamArtifactRepoRule.toString() + "/$)\n" - + " withMaven() {\n" - + " if (isUnix()) {\n" - + " sh 'mvn install'\n" - + " } else {\n" - + " bat 'mvn install'\n" - + " }\n" - + " }\n" - + "}"; - //@formatter:on + // @formatter:off + String mavenDockerDependencyPipelineScript = "node() {\n" + + " git($/" + gitRepoRule.toString() + "/$)\n" + + " withMaven() {\n" + + " if (isUnix()) {\n" + + " sh 'mvn install'\n" + + " } else {\n" + + " bat 'mvn install'\n" + + " }\n" + + " }\n" + + "}"; + String mavenDockerBasePipelineScript = "node() {\n" + + " git($/" + downstreamArtifactRepoRule.toString() + "/$)\n" + + " withMaven() {\n" + + " if (isUnix()) {\n" + + " sh 'mvn install'\n" + + " } else {\n" + + " bat 'mvn install'\n" + + " }\n" + + " }\n" + + "}"; + // @formatter:on WorkflowJob mavenDockerDependency = jenkinsRule.createProject(WorkflowJob.class, "build-docker-dependency"); mavenDockerDependency.setDefinition(new CpsFlowDefinition(mavenDockerDependencyPipelineScript, true)); mavenDockerDependency.addTrigger(new WorkflowJobDependencyTrigger()); - WorkflowRun mavenDockerDependencyPipelineFirstRun = jenkinsRule.assertBuildStatus(Result.SUCCESS, mavenDockerDependency.scheduleBuild2(0)); + WorkflowRun mavenDockerDependencyPipelineFirstRun = + jenkinsRule.assertBuildStatus(Result.SUCCESS, mavenDockerDependency.scheduleBuild2(0)); // TODO check in DB that the generated artifact is recorded WorkflowJob mavenDockerBasePipeline = jenkinsRule.createProject(WorkflowJob.class, "build-docker-base"); mavenDockerBasePipeline.setDefinition(new CpsFlowDefinition(mavenDockerBasePipelineScript, true)); mavenDockerBasePipeline.addTrigger(new WorkflowJobDependencyTrigger()); - WorkflowRun mavenDockerBasePipelineFirstRun = jenkinsRule.assertBuildStatus(Result.SUCCESS, mavenDockerBasePipeline.scheduleBuild2(0)); + WorkflowRun mavenDockerBasePipelineFirstRun = + jenkinsRule.assertBuildStatus(Result.SUCCESS, mavenDockerBasePipeline.scheduleBuild2(0)); // TODO check in DB that the dependency on the docker project is recorded System.out.println("build-docker-dependencyFirstRun: " + mavenDockerBasePipelineFirstRun); - WorkflowRun mavenDockerDependencyPipelineSecondRun = jenkinsRule.assertBuildStatus(Result.SUCCESS, mavenDockerDependency.scheduleBuild2(0)); + WorkflowRun mavenDockerDependencyPipelineSecondRun = + jenkinsRule.assertBuildStatus(Result.SUCCESS, mavenDockerDependency.scheduleBuild2(0)); jenkinsRule.waitUntilNoActivity(); WorkflowRun mavenDockerBasePipelineLastRun = mavenDockerBasePipeline.getLastBuild(); - System.out.println("build-docker-baseLastBuild: " + mavenDockerBasePipelineLastRun + " caused by " + mavenDockerBasePipelineLastRun.getCauses()); + System.out.println("build-docker-baseLastBuild: " + mavenDockerBasePipelineLastRun + " caused by " + + mavenDockerBasePipelineLastRun.getCauses()); - assertThat(mavenDockerBasePipelineLastRun.getNumber()).isEqualTo(mavenDockerBasePipelineFirstRun.getNumber() + 1); + assertThat(mavenDockerBasePipelineLastRun.getNumber()) + .isEqualTo(mavenDockerBasePipelineFirstRun.getNumber() + 1); Cause.UpstreamCause upstreamCause = mavenDockerBasePipelineLastRun.getCause(Cause.UpstreamCause.class); assertThat(upstreamCause).isNotNull(); } @@ -519,62 +569,73 @@ public void verify_docker_downstream_simple_pipeline_trigger() throws Exception @Test public void verify_deployfile_downstream_simple_pipeline_trigger() throws Exception { PipelineGraphPublisher publisher = GlobalPipelineMavenConfig.get().getPublisherOptions().stream() - .filter(p -> PipelineGraphPublisher.class.isInstance(p)).findFirst().map(p -> (PipelineGraphPublisher) p).get(); + .filter(p -> PipelineGraphPublisher.class.isInstance(p)) + .findFirst() + .map(p -> (PipelineGraphPublisher) p) + .get(); publisher.setLifecycleThreshold("deploy"); System.out.println("gitRepoRule: " + gitRepoRule); - loadSourceCodeInGitRepository(this.gitRepoRule, "/org/jenkinsci/plugins/pipeline/maven/test/test_maven_projects/maven_deployfile_dependency_project/"); + loadSourceCodeInGitRepository( + this.gitRepoRule, + "/org/jenkinsci/plugins/pipeline/maven/test/test_maven_projects/maven_deployfile_dependency_project/"); System.out.println("downstreamArtifactRepoRule: " + downstreamArtifactRepoRule); - loadSourceCodeInGitRepository(this.downstreamArtifactRepoRule, + loadSourceCodeInGitRepository( + this.downstreamArtifactRepoRule, "/org/jenkinsci/plugins/pipeline/maven/test/test_maven_projects/maven_deployfile_base_project/"); - //@formatter:off - String mavenDeployFileDependencyPipelineScript = "node() {\n" - + " git($/" + gitRepoRule.toString() + "/$)\n" - + " withMaven() {\n" - + " if (isUnix()) {\n" - + " sh 'mvn install deploy:deploy-file@deploy-file'\n" - + " } else {\n" - + " bat 'mvn install deploy:deploy-file@deploy-file'\n" - + " }\n" - + " }\n" - + "}"; - String mavenDeployFileBasePipelineScript = "node() {\n" - + " git($/" + downstreamArtifactRepoRule.toString() + "/$)\n" - + " withMaven() {\n" - + " if (isUnix()) {\n" - + " sh 'mvn install'\n" - + " } else {\n" - + " bat 'mvn install'\n" - + " }\n" - + " }\n" - + "}"; - //@formatter:on - - WorkflowJob mavenDeployFileDependency = jenkinsRule.createProject(WorkflowJob.class, "build-deployfile-dependency"); + // @formatter:off + String mavenDeployFileDependencyPipelineScript = "node() {\n" + + " git($/" + gitRepoRule.toString() + "/$)\n" + + " withMaven() {\n" + + " if (isUnix()) {\n" + + " sh 'mvn install deploy:deploy-file@deploy-file'\n" + + " } else {\n" + + " bat 'mvn install deploy:deploy-file@deploy-file'\n" + + " }\n" + + " }\n" + + "}"; + String mavenDeployFileBasePipelineScript = "node() {\n" + + " git($/" + downstreamArtifactRepoRule.toString() + "/$)\n" + + " withMaven() {\n" + + " if (isUnix()) {\n" + + " sh 'mvn install'\n" + + " } else {\n" + + " bat 'mvn install'\n" + + " }\n" + + " }\n" + + "}"; + // @formatter:on + + WorkflowJob mavenDeployFileDependency = + jenkinsRule.createProject(WorkflowJob.class, "build-deployfile-dependency"); mavenDeployFileDependency.setDefinition(new CpsFlowDefinition(mavenDeployFileDependencyPipelineScript, true)); mavenDeployFileDependency.addTrigger(new WorkflowJobDependencyTrigger()); - WorkflowRun mavenDeployFileDependencyPipelineFirstRun = jenkinsRule.assertBuildStatus(Result.SUCCESS, mavenDeployFileDependency.scheduleBuild2(0)); + WorkflowRun mavenDeployFileDependencyPipelineFirstRun = + jenkinsRule.assertBuildStatus(Result.SUCCESS, mavenDeployFileDependency.scheduleBuild2(0)); // TODO check in DB that the generated artifact is recorded WorkflowJob mavenDeployFileBasePipeline = jenkinsRule.createProject(WorkflowJob.class, "build-deployfile-base"); mavenDeployFileBasePipeline.setDefinition(new CpsFlowDefinition(mavenDeployFileBasePipelineScript, true)); mavenDeployFileBasePipeline.addTrigger(new WorkflowJobDependencyTrigger()); - WorkflowRun mavenDeployFileBasePipelineFirstRun = jenkinsRule.assertBuildStatus(Result.SUCCESS, mavenDeployFileBasePipeline.scheduleBuild2(0)); + WorkflowRun mavenDeployFileBasePipelineFirstRun = + jenkinsRule.assertBuildStatus(Result.SUCCESS, mavenDeployFileBasePipeline.scheduleBuild2(0)); // TODO check in DB that the dependency on the jar project is recorded System.out.println("build-deployfile-dependencyFirstRun: " + mavenDeployFileBasePipelineFirstRun); - WorkflowRun mavenDeployFileDependencyPipelineSecondRun = jenkinsRule.assertBuildStatus(Result.SUCCESS, mavenDeployFileDependency.scheduleBuild2(0)); + WorkflowRun mavenDeployFileDependencyPipelineSecondRun = + jenkinsRule.assertBuildStatus(Result.SUCCESS, mavenDeployFileDependency.scheduleBuild2(0)); jenkinsRule.waitUntilNoActivity(); WorkflowRun mavenDeployFileBasePipelineLastRun = mavenDeployFileBasePipeline.getLastBuild(); - System.out.println( - "build-deployfile-baseLastBuild: " + mavenDeployFileBasePipelineLastRun + " caused by " + mavenDeployFileBasePipelineLastRun.getCauses()); + System.out.println("build-deployfile-baseLastBuild: " + mavenDeployFileBasePipelineLastRun + " caused by " + + mavenDeployFileBasePipelineLastRun.getCauses()); - assertThat(mavenDeployFileBasePipelineLastRun.getNumber()).isEqualTo(mavenDeployFileBasePipelineFirstRun.getNumber() + 1); + assertThat(mavenDeployFileBasePipelineLastRun.getNumber()) + .isEqualTo(mavenDeployFileBasePipelineFirstRun.getNumber() + 1); Cause.UpstreamCause upstreamCause = mavenDeployFileBasePipelineLastRun.getCause(Cause.UpstreamCause.class); assertThat(upstreamCause).isNotNull(); } diff --git a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/GlobalPipelineMavenConfigTest.java b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/GlobalPipelineMavenConfigTest.java index ba8ffead..0da5e7fd 100644 --- a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/GlobalPipelineMavenConfigTest.java +++ b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/GlobalPipelineMavenConfigTest.java @@ -2,10 +2,16 @@ import static org.assertj.core.api.Assertions.assertThat; +import com.cloudbees.plugins.credentials.CredentialsProvider; +import com.mysql.cj.conf.RuntimeProperty; +import com.mysql.cj.jdbc.ConnectionImpl; +import com.zaxxer.hikari.HikariDataSource; +import com.zaxxer.hikari.pool.HikariProxyConnection; +import hudson.ExtensionList; import java.lang.reflect.Field; import java.lang.reflect.Modifier; import java.sql.Connection; - +import jenkins.model.Jenkins; import org.jenkinsci.plugins.pipeline.maven.dao.CustomTypePipelineMavenPluginDaoDecorator; import org.jenkinsci.plugins.pipeline.maven.dao.MonitoringPipelineMavenPluginDaoDecorator; import org.jenkinsci.plugins.pipeline.maven.dao.PipelineMavenPluginDao; @@ -22,24 +28,18 @@ import org.testcontainers.junit.jupiter.Container; import org.testcontainers.junit.jupiter.Testcontainers; -import com.cloudbees.plugins.credentials.CredentialsProvider; -import com.mysql.cj.conf.RuntimeProperty; -import com.mysql.cj.jdbc.ConnectionImpl; -import com.zaxxer.hikari.HikariDataSource; -import com.zaxxer.hikari.pool.HikariProxyConnection; - -import hudson.ExtensionList; -import jenkins.model.Jenkins; - @Testcontainers(disabledWithoutDocker = true) // Testcontainers does not support docker on Windows 2019 servers @WithJenkins public class GlobalPipelineMavenConfigTest { @Container - public static MySQLContainer MYSQL_DB = new MySQLContainer<>(MySQLContainer.NAME).withUsername("aUser").withPassword("aPass"); + public static MySQLContainer MYSQL_DB = + new MySQLContainer<>(MySQLContainer.NAME).withUsername("aUser").withPassword("aPass"); @Container - public static PostgreSQLContainer POSTGRE_DB = new PostgreSQLContainer<>(PostgreSQLContainer.IMAGE).withUsername("aUser").withPassword("aPass"); + public static PostgreSQLContainer POSTGRE_DB = new PostgreSQLContainer<>(PostgreSQLContainer.IMAGE) + .withUsername("aUser") + .withPassword("aPass"); @BeforeAll public static void setup(JenkinsRule r) { @@ -65,7 +65,8 @@ public void shouldBuildH2Dao() throws Exception { @Test public void shouldBuildMysqlDao() throws Exception { config.setDaoClass(PipelineMavenPluginMySqlDao.class.getName()); - ExtensionList extensionList = Jenkins.getInstance().getExtensionList(CredentialsProvider.class); + ExtensionList extensionList = + Jenkins.getInstance().getExtensionList(CredentialsProvider.class); extensionList.add(extensionList.size(), new FakeCredentialsProvider("credsId", "aUser", "aPass", false)); config.setJdbcUrl(MYSQL_DB.getJdbcUrl()); config.setJdbcCredentialsId("credsId"); @@ -86,9 +87,11 @@ public void shouldBuildMysqlDao() throws Exception { assertThat(datasource.getPassword()).isEqualTo("aPass"); assertThat(datasource.getMaxLifetime()).isEqualTo(42000L); assertThat(datasource.getDataSourceProperties()).containsKey("dataSource.cachePrepStmts"); - assertThat(datasource.getDataSourceProperties().getProperty("dataSource.cachePrepStmts")).isEqualTo("true"); + assertThat(datasource.getDataSourceProperties().getProperty("dataSource.cachePrepStmts")) + .isEqualTo("true"); assertThat(datasource.getDataSourceProperties()).containsKey("dataSource.prepStmtCacheSize"); - assertThat(datasource.getDataSourceProperties().getProperty("dataSource.prepStmtCacheSize")).isEqualTo("250"); + assertThat(datasource.getDataSourceProperties().getProperty("dataSource.prepStmtCacheSize")) + .isEqualTo("250"); Connection connection = datasource.getConnection(); assertThat(connection).isInstanceOf(HikariProxyConnection.class); connection = (Connection) getField(connection, "delegate"); @@ -100,7 +103,8 @@ public void shouldBuildMysqlDao() throws Exception { @Test public void shouldBuildPostgresqlDao() throws Exception { config.setDaoClass(PipelineMavenPluginPostgreSqlDao.class.getName()); - ExtensionList extensionList = Jenkins.getInstance().getExtensionList(CredentialsProvider.class); + ExtensionList extensionList = + Jenkins.getInstance().getExtensionList(CredentialsProvider.class); extensionList.add(extensionList.size(), new FakeCredentialsProvider("credsId", "aUser", "aPass", false)); config.setJdbcUrl(POSTGRE_DB.getJdbcUrl()); config.setJdbcCredentialsId("credsId"); @@ -142,7 +146,9 @@ private Field findField(Class clazz, String name, Class type) { } private void makeAccessible(Field field) { - if ((!Modifier.isPublic(field.getModifiers()) || !Modifier.isPublic(field.getDeclaringClass().getModifiers()) || Modifier.isFinal(field.getModifiers())) + if ((!Modifier.isPublic(field.getModifiers()) + || !Modifier.isPublic(field.getDeclaringClass().getModifiers()) + || Modifier.isFinal(field.getModifiers())) && !field.isAccessible()) { field.setAccessible(true); } diff --git a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/MavenPublisherStrategyTest.java b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/MavenPublisherStrategyTest.java index 257ee6d2..6bc0e0f7 100644 --- a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/MavenPublisherStrategyTest.java +++ b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/MavenPublisherStrategyTest.java @@ -2,13 +2,14 @@ import static org.assertj.core.api.Assertions.assertThat; +import hudson.util.StreamTaskListener; import java.io.ByteArrayOutputStream; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; - import org.jenkinsci.plugins.pipeline.maven.publishers.ConcordionTestsPublisher; +import org.jenkinsci.plugins.pipeline.maven.publishers.DependenciesFingerprintPublisher; import org.jenkinsci.plugins.pipeline.maven.publishers.FindbugsAnalysisPublisher; import org.jenkinsci.plugins.pipeline.maven.publishers.GeneratedArtifactsPublisher; import org.jenkinsci.plugins.pipeline.maven.publishers.InvokerRunsPublisher; @@ -23,8 +24,6 @@ import org.jvnet.hudson.test.JenkinsRule; import org.jvnet.hudson.test.junit.jupiter.WithJenkins; -import hudson.util.StreamTaskListener; - /** * @author Cyrille Le Clerc */ @@ -37,23 +36,25 @@ public void listMavenPublishers(JenkinsRule r) throws Exception { ByteArrayOutputStream baos = new ByteArrayOutputStream(); - List mavenPublishers = MavenPublisherStrategy.IMPLICIT.buildPublishersList(Collections.emptyList(), new StreamTaskListener(baos)); + List mavenPublishers = MavenPublisherStrategy.IMPLICIT.buildPublishersList( + Collections.emptyList(), new StreamTaskListener(baos)); assertThat(mavenPublishers).hasSize(12); Map reportersByDescriptorId = new HashMap<>(); for (MavenPublisher mavenPublisher : mavenPublishers) { reportersByDescriptorId.put(mavenPublisher.getDescriptor().getId(), mavenPublisher); } - assertThat(reportersByDescriptorId).containsKey(new GeneratedArtifactsPublisher.DescriptorImpl().getId()); assertThat(reportersByDescriptorId).containsKey(new FindbugsAnalysisPublisher.DescriptorImpl().getId()); assertThat(reportersByDescriptorId).containsKey(new SpotBugsAnalysisPublisher.DescriptorImpl().getId()); - assertThat(reportersByDescriptorId).containsKey(new JunitTestsPublisher.DescriptorImpl().getId()); assertThat(reportersByDescriptorId).containsKey(new TasksScannerPublisher.DescriptorImpl().getId()); - assertThat(reportersByDescriptorId).containsKey(new PipelineGraphPublisher.DescriptorImpl().getId()); - assertThat(reportersByDescriptorId).containsKey(new InvokerRunsPublisher.DescriptorImpl().getId()); assertThat(reportersByDescriptorId).containsKey(new ConcordionTestsPublisher.DescriptorImpl().getId()); + assertThat(reportersByDescriptorId).containsKey(new DependenciesFingerprintPublisher.DescriptorImpl().getId()); + assertThat(reportersByDescriptorId).containsKey(new GeneratedArtifactsPublisher.DescriptorImpl().getId()); + assertThat(reportersByDescriptorId).containsKey(new InvokerRunsPublisher.DescriptorImpl().getId()); + assertThat(reportersByDescriptorId).containsKey(new JacocoReportPublisher.DescriptorImpl().getId()); assertThat(reportersByDescriptorId).containsKey(new JGivenTestsPublisher.DescriptorImpl().getId()); + assertThat(reportersByDescriptorId).containsKey(new JunitTestsPublisher.DescriptorImpl().getId()); assertThat(reportersByDescriptorId).containsKey(new MavenLinkerPublisher2.DescriptorImpl().getId()); - assertThat(reportersByDescriptorId).containsKey(new JacocoReportPublisher.DescriptorImpl().getId()); + assertThat(reportersByDescriptorId).containsKey(new PipelineGraphPublisher.DescriptorImpl().getId()); } } diff --git a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/MavenSpyLogProcessorTest.java b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/MavenSpyLogProcessorTest.java index aa06d34f..0fbab574 100644 --- a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/MavenSpyLogProcessorTest.java +++ b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/MavenSpyLogProcessorTest.java @@ -3,6 +3,4 @@ /** * @author Cyrille Le Clerc */ -public class MavenSpyLogProcessorTest { - -} +public class MavenSpyLogProcessorTest {} diff --git a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/TestUtils.java b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/TestUtils.java index b856d6df..b1ccb0af 100644 --- a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/TestUtils.java +++ b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/TestUtils.java @@ -6,23 +6,22 @@ import static org.springframework.util.ReflectionUtils.makeAccessible; import static org.springframework.util.ReflectionUtils.setField; +import hudson.model.Run; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.Collection; import java.util.List; - import org.jenkinsci.plugins.workflow.job.WorkflowJob; import org.jenkinsci.plugins.workflow.job.WorkflowRun; -import hudson.model.Run; - /** * @author Cyrille Le Clerc */ public class TestUtils { - public static Collection artifactsToArtifactsFileNames(Iterable.Artifact> artifacts) { + public static Collection artifactsToArtifactsFileNames( + Iterable.Artifact> artifacts) { List result = new ArrayList<>(); for (Run.Artifact artifact : artifacts) { result.add(artifact.getFileName()); diff --git a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/WithMavenStepExecution2Test.java b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/WithMavenStepExecution2Test.java index 3d3167c5..e5dfcd89 100644 --- a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/WithMavenStepExecution2Test.java +++ b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/WithMavenStepExecution2Test.java @@ -14,21 +14,21 @@ public class WithMavenStepExecution2Test { @Issue("JENKINS-57324") public void testEscapeWindowsBatchChars() { - //@formatter:off + // @formatter:off String mavenConfig = "--batch-mode --show-version " + - "--settings \"e:\\folder\\branches%2Ftest\\workspace@tmp\\withMaven94865076\\settings.xml\" " + - "--global-settings \"e:\\folder\\branches%2Ftest\\workspace@tmp\\withMaven94865076\\globalSettings.xml\""; - //@formatter:on + "--settings \"e:\\folder\\branches%2Ftest\\workspace@tmp\\withMaven94865076\\settings.xml\" " + + "--global-settings \"e:\\folder\\branches%2Ftest\\workspace@tmp\\withMaven94865076\\globalSettings.xml\""; + // @formatter:on String actualEscapedMavenConfig = mavenConfig.replace("%", "%%"); - //@formatter:off - String expectedEscapedMavenConfig = "--batch-mode --show-version " + - "--settings \"e:\\folder\\branches%%2Ftest\\workspace@tmp\\withMaven94865076\\settings.xml\" " + - "--global-settings \"e:\\folder\\branches%%2Ftest\\workspace@tmp\\withMaven94865076\\globalSettings.xml\""; - //formatter:on + // @formatter:off + String expectedEscapedMavenConfig = "--batch-mode --show-version " + + "--settings \"e:\\folder\\branches%%2Ftest\\workspace@tmp\\withMaven94865076\\settings.xml\" " + + "--global-settings \"e:\\folder\\branches%%2Ftest\\workspace@tmp\\withMaven94865076\\globalSettings.xml\""; + // formatter:on - System.out.println("Expected escaped mavenConfig: " + expectedEscapedMavenConfig); + System.out.println("Expected escaped mavenConfig: " + expectedEscapedMavenConfig); - assertThat(actualEscapedMavenConfig).isEqualTo(expectedEscapedMavenConfig); + assertThat(actualEscapedMavenConfig).isEqualTo(expectedEscapedMavenConfig); } } diff --git a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/WithMavenStepGlobalConfigurationTest.java b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/WithMavenStepGlobalConfigurationTest.java index 8ae2f8af..babe4805 100644 --- a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/WithMavenStepGlobalConfigurationTest.java +++ b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/WithMavenStepGlobalConfigurationTest.java @@ -23,21 +23,30 @@ */ package org.jenkinsci.plugins.pipeline.maven; +import hudson.model.Result; import java.util.Collections; import java.util.logging.Level; import java.util.logging.Logger; - +import java.util.stream.Stream; import org.jenkinsci.Symbol; +import org.jenkinsci.plugins.pipeline.maven.publishers.ConcordionTestsPublisher; +import org.jenkinsci.plugins.pipeline.maven.publishers.DependenciesFingerprintPublisher; import org.jenkinsci.plugins.pipeline.maven.publishers.FindbugsAnalysisPublisher; import org.jenkinsci.plugins.pipeline.maven.publishers.GeneratedArtifactsPublisher; +import org.jenkinsci.plugins.pipeline.maven.publishers.InvokerRunsPublisher; +import org.jenkinsci.plugins.pipeline.maven.publishers.JGivenTestsPublisher; +import org.jenkinsci.plugins.pipeline.maven.publishers.JacocoReportPublisher; import org.jenkinsci.plugins.pipeline.maven.publishers.JunitTestsPublisher; +import org.jenkinsci.plugins.pipeline.maven.publishers.MavenLinkerPublisher2; +import org.jenkinsci.plugins.pipeline.maven.publishers.PipelineGraphPublisher; +import org.jenkinsci.plugins.pipeline.maven.publishers.SpotBugsAnalysisPublisher; import org.jenkinsci.plugins.pipeline.maven.publishers.TasksScannerPublisher; import org.jenkinsci.plugins.workflow.cps.CpsFlowDefinition; import org.jenkinsci.plugins.workflow.job.WorkflowJob; import org.jenkinsci.plugins.workflow.job.WorkflowRun; -import org.junit.jupiter.api.Test; - -import hudson.model.Result; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; /** * TODO migrate to {@link WithMavenStepTest} once we have implemented a @@ -45,27 +54,10 @@ */ public class WithMavenStepGlobalConfigurationTest extends AbstractIntegrationTest { - @Test - public void maven_build_jar_project_on_master_disable_globally_findbugs_publisher_succeeds() throws Exception { - maven_build_jar_project_on_master_with_globally_disabled_publisher_succeeds(new FindbugsAnalysisPublisher.DescriptorImpl()); - } - - @Test - public void maven_build_jar_project_on_master_disable_globally_tasks_publisher_succeeds() throws Exception { - maven_build_jar_project_on_master_with_globally_disabled_publisher_succeeds(new TasksScannerPublisher.DescriptorImpl()); - } - - @Test - public void maven_build_jar_project_on_master_disable_globally_junit_publisher_succeeds() throws Exception { - maven_build_jar_project_on_master_with_globally_disabled_publisher_succeeds(new JunitTestsPublisher.DescriptorImpl()); - } - - @Test - public void maven_build_jar_project_on_master_disable_globally_generated_artifacts_publisher_succeeds() throws Exception { - maven_build_jar_project_on_master_with_globally_disabled_publisher_succeeds(new GeneratedArtifactsPublisher.DescriptorImpl()); - } - - private void maven_build_jar_project_on_master_with_globally_disabled_publisher_succeeds(MavenPublisher.DescriptorImpl descriptor) throws Exception { + @ParameterizedTest + @MethodSource("mavenPublisherDescriptors") + public void maven_build_jar_project_on_master_with_globally_disabled_publisher_succeeds( + MavenPublisher.DescriptorImpl descriptor) throws Exception { MavenPublisher publisher = descriptor.clazz.newInstance(); publisher.setDisabled(true); @@ -84,7 +76,7 @@ private void maven_build_jar_project_on_master_with_globally_disabled_publisher_ loadMavenJarProjectInGitRepo(this.gitRepoRule); - //@formatter:off + // @formatter:off String pipelineScript = "node() {\n" + " git($/" + gitRepoRule.toString() + "/$)\n" + " withMaven() {\n" + @@ -95,9 +87,10 @@ private void maven_build_jar_project_on_master_with_globally_disabled_publisher_ " }\n" + " }\n" + "}"; - //@formatter:on + // @formatter:on - WorkflowJob pipeline = jenkinsRule.createProject(WorkflowJob.class, "build-on-master-" + symbol + "-publisher-globally-disabled"); + WorkflowJob pipeline = jenkinsRule.createProject( + WorkflowJob.class, "build-on-master-" + symbol + "-publisher-globally-disabled"); pipeline.setDefinition(new CpsFlowDefinition(pipelineScript, true)); WorkflowRun build = jenkinsRule.assertBuildStatus(Result.SUCCESS, pipeline.scheduleBuild2(0)); @@ -109,28 +102,9 @@ private void maven_build_jar_project_on_master_with_globally_disabled_publisher_ } } - @Test - public void maven_build_jar_project_on_master_with_findbugs_publisher_configured_both_globally_and_on_the_pipeline_succeeds() throws Exception { - maven_build_jar_project_on_master_with_publisher_configured_both_globally_and_on_the_pipeline_succeeds(new FindbugsAnalysisPublisher.DescriptorImpl()); - } - - @Test - public void maven_build_jar_project_on_master_with_task_scanner_publisher_configured_both_globally_and_on_the_pipeline_succeeds() throws Exception { - maven_build_jar_project_on_master_with_publisher_configured_both_globally_and_on_the_pipeline_succeeds(new TasksScannerPublisher.DescriptorImpl()); - } - - @Test - public void maven_build_jar_project_on_master_with_junit_publisher_configured_both_globally_and_on_the_pipeline_succeeds() throws Exception { - maven_build_jar_project_on_master_with_publisher_configured_both_globally_and_on_the_pipeline_succeeds(new JunitTestsPublisher.DescriptorImpl()); - } - - @Test - public void maven_build_jar_project_on_master_with_generated_artifacts_publisher_configured_both_globally_and_on_the_pipeline_succeeds() throws Exception { - maven_build_jar_project_on_master_with_publisher_configured_both_globally_and_on_the_pipeline_succeeds( - new GeneratedArtifactsPublisher.DescriptorImpl()); - } - - private void maven_build_jar_project_on_master_with_publisher_configured_both_globally_and_on_the_pipeline_succeeds( + @ParameterizedTest + @MethodSource("mavenPublisherDescriptors") + public void maven_build_jar_project_on_master_with_publisher_configured_both_globally_and_on_the_pipeline_succeeds( MavenPublisher.DescriptorImpl descriptor) throws Exception { MavenPublisher publisher = descriptor.clazz.newInstance(); @@ -150,7 +124,7 @@ private void maven_build_jar_project_on_master_with_publisher_configured_both_gl loadMavenJarProjectInGitRepo(this.gitRepoRule); - //@formatter:off + // @formatter:off String pipelineScript = "node() {\n" + " git($/" + gitRepoRule.toString() + "/$)\n" + " withMaven(options:[" + symbol + "(disabled: true)]) {\n" + @@ -161,10 +135,10 @@ private void maven_build_jar_project_on_master_with_publisher_configured_both_gl " }\n" + " }\n" + "}"; - //@formatter:on + // @formatter:on - WorkflowJob pipeline = jenkinsRule.createProject(WorkflowJob.class, - "build-on-master-" + symbol + "-publisher-defined-globally-and-in-the-pipeline"); + WorkflowJob pipeline = jenkinsRule.createProject( + WorkflowJob.class, "build-on-master-" + symbol + "-publisher-defined-globally-and-in-the-pipeline"); pipeline.setDefinition(new CpsFlowDefinition(pipelineScript, true)); WorkflowRun build = jenkinsRule.assertBuildStatus(Result.SUCCESS, pipeline.scheduleBuild2(0)); @@ -179,4 +153,19 @@ private void maven_build_jar_project_on_master_with_publisher_configured_both_gl } } + private static Stream mavenPublisherDescriptors() { + return Stream.of( + Arguments.of(new FindbugsAnalysisPublisher.DescriptorImpl()), + Arguments.of(new SpotBugsAnalysisPublisher.DescriptorImpl()), + Arguments.of(new TasksScannerPublisher.DescriptorImpl()), + Arguments.of(new ConcordionTestsPublisher.DescriptorImpl()), + Arguments.of(new DependenciesFingerprintPublisher.DescriptorImpl()), + Arguments.of(new GeneratedArtifactsPublisher.DescriptorImpl()), + Arguments.of(new InvokerRunsPublisher.DescriptorImpl()), + Arguments.of(new JacocoReportPublisher.DescriptorImpl()), + Arguments.of(new JGivenTestsPublisher.DescriptorImpl()), + Arguments.of(new JunitTestsPublisher.DescriptorImpl()), + Arguments.of(new MavenLinkerPublisher2.DescriptorImpl()), + Arguments.of(new PipelineGraphPublisher.DescriptorImpl())); + } } diff --git a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/WithMavenStepMavenExecResolutionTest.java b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/WithMavenStepMavenExecResolutionTest.java index 8921738d..00df07ac 100644 --- a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/WithMavenStepMavenExecResolutionTest.java +++ b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/WithMavenStepMavenExecResolutionTest.java @@ -25,22 +25,11 @@ import static org.assertj.core.api.Assertions.assertThat; -import java.util.Collections; - -import org.jenkinsci.plugins.workflow.cps.CpsFlowDefinition; -import org.jenkinsci.plugins.workflow.job.WorkflowJob; -import org.jenkinsci.plugins.workflow.job.WorkflowRun; -import org.junit.jupiter.api.Test; -import org.jvnet.hudson.test.Issue; -import org.testcontainers.containers.GenericContainer; -import org.testcontainers.junit.jupiter.Testcontainers; - import com.cloudbees.plugins.credentials.Credentials; import com.cloudbees.plugins.credentials.CredentialsScope; import com.cloudbees.plugins.credentials.SystemCredentialsProvider; import com.cloudbees.plugins.credentials.domains.Domain; import com.cloudbees.plugins.credentials.impl.UsernamePasswordCredentialsImpl; - import hudson.model.DownloadService; import hudson.model.Result; import hudson.plugins.sshslaves.SSHLauncher; @@ -49,6 +38,14 @@ import hudson.tasks.Maven; import hudson.tasks.Maven.MavenInstallation; import hudson.tools.InstallSourceProperty; +import java.util.Collections; +import org.jenkinsci.plugins.workflow.cps.CpsFlowDefinition; +import org.jenkinsci.plugins.workflow.job.WorkflowJob; +import org.jenkinsci.plugins.workflow.job.WorkflowRun; +import org.junit.jupiter.api.Test; +import org.jvnet.hudson.test.Issue; +import org.testcontainers.containers.GenericContainer; +import org.testcontainers.junit.jupiter.Testcontainers; @Testcontainers(disabledWithoutDocker = true) // Testcontainers does not support docker on Windows 2019 servers @Issue("JENKINS-43651") @@ -61,21 +58,24 @@ public class WithMavenStepMavenExecResolutionTest extends AbstractIntegrationTes @Test public void testMavenNotInstalledInDockerImage() throws Exception { - try (GenericContainer nonMavenContainerRule = new GenericContainer<>("localhost/pipeline-maven/java-git").withExposedPorts(22)) { + try (GenericContainer nonMavenContainerRule = + new GenericContainer<>("localhost/pipeline-maven/java-git").withExposedPorts(22)) { nonMavenContainerRule.start(); - assertThat(nonMavenContainerRule.execInContainer("mvn", "--version").getStdout()).contains("exec: \"mvn\": executable file not found in $PATH"); + assertThat(nonMavenContainerRule.execInContainer("mvn", "--version").getStdout()) + .contains("exec: \"mvn\": executable file not found in $PATH"); } } @Test public void testMavenGlobalToolRecognizedInScriptedPipeline() throws Exception { - try (GenericContainer nonMavenContainerRule = new GenericContainer<>("localhost/pipeline-maven/java-git").withExposedPorts(22)) { + try (GenericContainer nonMavenContainerRule = + new GenericContainer<>("localhost/pipeline-maven/java-git").withExposedPorts(22)) { nonMavenContainerRule.start(); registerAgentForContainer(nonMavenContainerRule); String version = registerLatestMavenVersionAsGlobalTool(); - //@formatter:off - WorkflowRun run = runPipeline("" + + // @formatter:off + WorkflowRun run = runPipeline( "node('" + AGENT_NAME + "') {\n" + " def mavenHome = tool '" + MAVEN_GLOBAL_TOOL_NAME + "'\n" + " withEnv([\"MAVEN_HOME=${mavenHome}\"]) {\n" + @@ -83,28 +83,31 @@ public void testMavenGlobalToolRecognizedInScriptedPipeline() throws Exception { " sh \"mvn --version\"\n" + " }\n" + " }\n" + - "}"); - //@formatter:on + "}" + ); + // @formatter:on jenkinsRule.assertLogContains("Apache Maven " + version, run); - jenkinsRule.assertLogContains("using Maven installation provided by the build agent with the environment variable MAVEN_HOME=/home/test/slave", + jenkinsRule.assertLogContains( + "using Maven installation provided by the build agent with the environment variable MAVEN_HOME=/home/test/slave", run); } } @Test public void testMavenGlobalToolRecognizedInDeclarativePipeline() throws Exception { - try (GenericContainer nonMavenContainerRule = new GenericContainer<>("localhost/pipeline-maven/java-git").withExposedPorts(22)) { + try (GenericContainer nonMavenContainerRule = + new GenericContainer<>("localhost/pipeline-maven/java-git").withExposedPorts(22)) { nonMavenContainerRule.start(); registerAgentForContainer(nonMavenContainerRule); String version = registerLatestMavenVersionAsGlobalTool(); - //@formatter:off - WorkflowRun run = runPipeline("" + + // @formatter:off + WorkflowRun run = runPipeline( "pipeline {\n" + " agent { label '" + AGENT_NAME + "' }\n" + " tools {\n" + - " maven '" + MAVEN_GLOBAL_TOOL_NAME + "'\n" + + " maven '" +MAVEN_GLOBAL_TOOL_NAME + "'\n" + " }\n" + " stages {\n" + " stage('Build') {\n" + @@ -115,53 +118,61 @@ public void testMavenGlobalToolRecognizedInDeclarativePipeline() throws Exceptio " }\n" + " }\n" + " }\n" + - "}"); - //@formatter:on + "}" + ); + // @formatter:on jenkinsRule.assertLogContains("Apache Maven " + version, run); - jenkinsRule.assertLogContains("using Maven installation provided by the build agent with the environment variable MAVEN_HOME=/home/test/slave", + jenkinsRule.assertLogContains( + "using Maven installation provided by the build agent with the environment variable MAVEN_HOME=/home/test/slave", run); } } @Test public void testPreInstalledMavenRecognizedWithoutMavenHome() throws Exception { - try (GenericContainer javaMavenGitContainerRule = new GenericContainer<>("localhost/pipeline-maven/java-maven-git").withExposedPorts(22)) { + try (GenericContainer javaMavenGitContainerRule = + new GenericContainer<>("localhost/pipeline-maven/java-maven-git").withExposedPorts(22)) { javaMavenGitContainerRule.start(); registerAgentForContainer(javaMavenGitContainerRule); - //@formatter:off - WorkflowRun run = runPipeline("" + + // @formatter:off + WorkflowRun run = runPipeline( "node('" + AGENT_NAME + "') {\n" + " withMaven(traceability: true) {\n" + " sh \"mvn --version\"\n" + " }\n" + - "}"); - //@formatter:on + "}" + ); + // @formatter:on jenkinsRule.assertLogContains("Apache Maven 3.6.0", run); - jenkinsRule.assertLogContains("using Maven installation provided by the build agent with executable /usr/bin/mvn", run); + jenkinsRule.assertLogContains( + "using Maven installation provided by the build agent with executable /usr/bin/mvn", run); } } @Test public void testPreInstalledMavenRecognizedWithMavenHome() throws Exception { - try (GenericContainer mavenWithMavenHomeContainerRule = new GenericContainer<>("localhost/pipeline-maven/maven-home").withExposedPorts(22)) { + try (GenericContainer mavenWithMavenHomeContainerRule = + new GenericContainer<>("localhost/pipeline-maven/maven-home").withExposedPorts(22)) { mavenWithMavenHomeContainerRule.start(); registerAgentForContainer(mavenWithMavenHomeContainerRule); - //@formatter:off - WorkflowRun run = runPipeline("" + + // @formatter:off + WorkflowRun run = runPipeline( "node('" + AGENT_NAME + "') {\n" + " sh 'echo $MAVEN_HOME'\n" + " withMaven(traceability: true) {\n" + " sh \"mvn --version\"\n" + " }\n" + - "}"); - //@formatter:on + "}" + ); + // @formatter:on jenkinsRule.assertLogContains("Apache Maven 3.6.0", run); - jenkinsRule.assertLogContains("using Maven installation provided by the build agent with the environment variable MAVEN_HOME=/usr/share/maven", + jenkinsRule.assertLogContains( + "using Maven installation provided by the build agent with the environment variable MAVEN_HOME=/usr/share/maven", run); } } @@ -179,13 +190,17 @@ private void registerAgentForContainer(GenericContainer slaveContainer) throw } private void addTestSshCredentials() { - Credentials credentials = new UsernamePasswordCredentialsImpl(CredentialsScope.GLOBAL, SSH_CREDENTIALS_ID, null, "test", "test"); + Credentials credentials = + new UsernamePasswordCredentialsImpl(CredentialsScope.GLOBAL, SSH_CREDENTIALS_ID, null, "test", "test"); - SystemCredentialsProvider.getInstance().getDomainCredentialsMap().put(Domain.global(), Collections.singletonList(credentials)); + SystemCredentialsProvider.getInstance() + .getDomainCredentialsMap() + .put(Domain.global(), Collections.singletonList(credentials)); } private void registerAgentForSlaveContainer(GenericContainer slaveContainer) throws Exception { - SSHLauncher sshLauncher = new SSHLauncher(slaveContainer.getHost(), slaveContainer.getMappedPort(22), SSH_CREDENTIALS_ID); + SSHLauncher sshLauncher = + new SSHLauncher(slaveContainer.getHost(), slaveContainer.getMappedPort(22), SSH_CREDENTIALS_ID); DumbSlave agent = new DumbSlave(AGENT_NAME, SLAVE_BASE_PATH, sshLauncher); agent.setNumExecutors(1); @@ -208,7 +223,11 @@ private void updateAvailableMavenVersions() throws Exception { } private String getLatestMavenVersion() throws Exception { - return getMavenDownloadable().getData().getJSONArray("list").getJSONObject(0).getString("id"); + return getMavenDownloadable() + .getData() + .getJSONArray("list") + .getJSONObject(0) + .getString("id"); } private DownloadService.Downloadable getMavenDownloadable() { @@ -218,9 +237,10 @@ private DownloadService.Downloadable getMavenDownloadable() { private void registerMavenVersionAsGlobalTool(String version) throws Exception { String mavenHome = "maven-" + version.replace(".", "-"); - InstallSourceProperty installSourceProperty = new InstallSourceProperty(Collections.singletonList(new Maven.MavenInstaller(version))); - MavenInstallation mavenInstallation = new MavenInstallation(MAVEN_GLOBAL_TOOL_NAME, mavenHome, Collections.singletonList(installSourceProperty)); + InstallSourceProperty installSourceProperty = + new InstallSourceProperty(Collections.singletonList(new Maven.MavenInstaller(version))); + MavenInstallation mavenInstallation = new MavenInstallation( + MAVEN_GLOBAL_TOOL_NAME, mavenHome, Collections.singletonList(installSourceProperty)); jenkinsRule.jenkins.getDescriptorByType(Maven.DescriptorImpl.class).setInstallations(mavenInstallation); } - } diff --git a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/WithMavenStepNoOptionalsTest.java b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/WithMavenStepNoOptionalsTest.java index 868c5338..1439e756 100644 --- a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/WithMavenStepNoOptionalsTest.java +++ b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/WithMavenStepNoOptionalsTest.java @@ -1,12 +1,21 @@ package org.jenkinsci.plugins.pipeline.maven; +import hudson.FilePath; +import hudson.model.Result; +import hudson.model.Slave; +import hudson.tasks.Maven; import java.net.URL; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; import java.util.List; - +import jenkins.model.Jenkins; +import jenkins.mvn.DefaultGlobalSettingsProvider; +import jenkins.mvn.DefaultSettingsProvider; +import jenkins.mvn.GlobalMavenConfig; +import jenkins.plugins.git.GitSampleRepoRule; +import jenkins.scm.impl.mock.GitSampleRepoRuleUtils; import org.jenkinsci.plugins.pipeline.maven.publishers.PipelineGraphPublisher; import org.jenkinsci.plugins.workflow.cps.CpsFlowDefinition; import org.jenkinsci.plugins.workflow.job.WorkflowJob; @@ -18,17 +27,6 @@ import org.jvnet.hudson.test.JenkinsRule; import org.jvnet.hudson.test.RealJenkinsRule; -import hudson.FilePath; -import hudson.model.Result; -import hudson.model.Slave; -import hudson.tasks.Maven; -import jenkins.model.Jenkins; -import jenkins.mvn.DefaultGlobalSettingsProvider; -import jenkins.mvn.DefaultSettingsProvider; -import jenkins.mvn.GlobalMavenConfig; -import jenkins.plugins.git.GitSampleRepoRule; -import jenkins.scm.impl.mock.GitSampleRepoRuleUtils; - // Migrate to full JUnit5 impossible because of RealJenkinsRule public class WithMavenStepNoOptionalsTest { @@ -39,14 +37,34 @@ public class WithMavenStepNoOptionalsTest { public GitSampleRepoRule gitRepoRule = new GitSampleRepoRule(); @Rule - public RealJenkinsRule jenkinsRule = new RealJenkinsRule().omitPlugins("commons-lang3-api", "mysql-api", "postgresql-api", "maven-plugin", - "flaky-test-handler", "htmlpublisher", "jacoco", "jgiven", "junit", "junit-attachments", "matrix-project", "maven-invoker-plugin", - "pipeline-build-step", "findbugs", "tasks"); + public RealJenkinsRule jenkinsRule = new RealJenkinsRule() + .omitPlugins( + "commons-lang3-api", + "mysql-api", + "postgresql-api", + "maven-plugin", + "flaky-test-handler", + "htmlpublisher", + "jacoco", + "jgiven", + "junit", + "junit-attachments", + "matrix-project", + "maven-invoker-plugin", + "pipeline-build-step", + "findbugs", + "tasks"); @Test public void maven_build_jar_project_on_master_succeeds() throws Throwable { loadMavenJarProjectInGitRepo(gitRepoRule); - jenkinsRule.extraEnv("MAVEN_ZIP_PATH", Paths.get("target", "apache-maven-3.6.3-bin.zip").toAbsolutePath().toString()).extraEnv("MAVEN_VERSION", "3.6.3") + jenkinsRule + .extraEnv( + "MAVEN_ZIP_PATH", + Paths.get("target", "apache-maven-3.6.3-bin.zip") + .toAbsolutePath() + .toString()) + .extraEnv("MAVEN_VERSION", "3.6.3") .then(WithMavenStepNoOptionalsTest::setup, new Build(gitRepoRule.toString())); } @@ -60,7 +78,7 @@ private static class Build implements RealJenkinsRule.Step { @Override public void run(JenkinsRule r) throws Throwable { - //@formatter:off + // @formatter:off String pipelineScript = "node('mock') {\n" + " git($/" + repoUrl + "/$)\n" + " withMaven() {\n" + @@ -71,7 +89,7 @@ public void run(JenkinsRule r) throws Throwable { " }\n" + " }\n" + "}"; - //@formatter:on + // @formatter:on WorkflowJob pipeline = r.createProject(WorkflowJob.class, "build-on-master"); pipeline.setDefinition(new CpsFlowDefinition(pipelineScript, true)); @@ -85,14 +103,16 @@ private static void setup(final JenkinsRule r) throws Throwable { r.waitOnline(agent); String mavenVersion = "3.6.3"; - FilePath buildDirectory = agent.getRootPath(); // No need of MasterToSlaveCallable because agent is a dumb, thus sharing file - // system with controller + FilePath buildDirectory = + agent.getRootPath(); // No need of MasterToSlaveCallable because agent is a dumb, thus sharing file + // system with controller FilePath mvnHome = buildDirectory.child("apache-maven-" + mavenVersion); FilePath mvn = buildDirectory.createTempFile("maven", "zip"); - mvn.copyFrom(new URL( - "https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/" + mavenVersion + "/apache-maven-" + mavenVersion + "-bin.tar.gz")); + mvn.copyFrom(new URL("https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/" + mavenVersion + + "/apache-maven-" + mavenVersion + "-bin.tar.gz")); mvn.untar(buildDirectory, FilePath.TarCompression.GZIP); - Maven.MavenInstallation mavenInstallation = new Maven.MavenInstallation("default", mvnHome.getRemote(), JenkinsRule.NO_PROPERTIES); + Maven.MavenInstallation mavenInstallation = + new Maven.MavenInstallation("default", mvnHome.getRemote(), JenkinsRule.NO_PROPERTIES); Jenkins.get().getDescriptorByType(Maven.DescriptorImpl.class).setInstallations(mavenInstallation); GlobalMavenConfig globalMavenConfig = r.get(GlobalMavenConfig.class); @@ -109,12 +129,12 @@ private static void setup(final JenkinsRule r) throws Throwable { private void loadMavenJarProjectInGitRepo(GitSampleRepoRule gitRepo) throws Exception { gitRepo.init(); - Path mavenProjectRoot = Paths - .get(WithMavenStepOnMasterTest.class.getResource("/org/jenkinsci/plugins/pipeline/maven/test/test_maven_projects/maven_jar_project/").toURI()); + Path mavenProjectRoot = Paths.get(WithMavenStepOnMasterTest.class + .getResource("/org/jenkinsci/plugins/pipeline/maven/test/test_maven_projects/maven_jar_project/") + .toURI()); if (!Files.exists(mavenProjectRoot)) { throw new IllegalStateException("Folder '" + mavenProjectRoot + "' not found"); } GitSampleRepoRuleUtils.addFilesAndCommit(mavenProjectRoot, gitRepo); } - } diff --git a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/WithMavenStepOnMasterTest.java b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/WithMavenStepOnMasterTest.java index c7422366..83991c0e 100644 --- a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/WithMavenStepOnMasterTest.java +++ b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/WithMavenStepOnMasterTest.java @@ -26,13 +26,21 @@ import static org.assertj.core.api.Assertions.assertThat; import static org.junit.jupiter.api.condition.OS.LINUX; +import com.cloudbees.hudson.plugins.folder.Folder; +import hudson.model.Result; +import hudson.plugins.tasks.TasksResultAction; +import hudson.tasks.junit.TestResultAction; +import hudson.tasks.junit.pipeline.JUnitResultsStepTest; import java.io.File; import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.Collection; import java.util.logging.Level; import java.util.logging.Logger; - +import java.util.stream.Stream; +import jenkins.mvn.FilePathGlobalSettingsProvider; +import jenkins.mvn.FilePathSettingsProvider; +import jenkins.mvn.GlobalMavenConfig; import org.apache.commons.io.FileUtils; import org.jenkinsci.Symbol; import org.jenkinsci.plugins.configfiles.GlobalConfigFiles; @@ -48,6 +56,8 @@ import org.jenkinsci.plugins.pipeline.maven.publishers.JGivenTestsPublisher; import org.jenkinsci.plugins.pipeline.maven.publishers.JacocoReportPublisher; import org.jenkinsci.plugins.pipeline.maven.publishers.JunitTestsPublisher; +import org.jenkinsci.plugins.pipeline.maven.publishers.MavenLinkerPublisher2; +import org.jenkinsci.plugins.pipeline.maven.publishers.PipelineGraphPublisher; import org.jenkinsci.plugins.pipeline.maven.publishers.SpotBugsAnalysisPublisher; import org.jenkinsci.plugins.pipeline.maven.publishers.TasksScannerPublisher; import org.jenkinsci.plugins.workflow.cps.CpsFlowDefinition; @@ -57,18 +67,11 @@ import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.condition.EnabledOnOs; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; import org.jvnet.hudson.test.Issue; -import com.cloudbees.hudson.plugins.folder.Folder; - -import hudson.model.Result; -import hudson.plugins.tasks.TasksResultAction; -import hudson.tasks.junit.TestResultAction; -import hudson.tasks.junit.pipeline.JUnitResultsStepTest; -import jenkins.mvn.FilePathGlobalSettingsProvider; -import jenkins.mvn.FilePathSettingsProvider; -import jenkins.mvn.GlobalMavenConfig; - /** * TODO migrate to {@link WithMavenStepTest} once we have implemented a * GitRepoRule that can be used on remote agents @@ -96,7 +99,7 @@ public void after() { public void maven_build_on_master_with_specified_maven_installation_succeeds() throws Exception { loadMavenJarProjectInGitRepo(this.gitRepoRule); - //@// @formatter:off + // @formatter:off String pipelineScript = "node() {\n" + " git($/" + gitRepoRule.toString() + "/$)\n" + " withMaven(traceability: true, maven: 'apache-maven-3.6.3') {\n" + @@ -107,7 +110,7 @@ public void maven_build_on_master_with_specified_maven_installation_succeeds() t " }\n" + " }\n" + "}"; - //@// @formatter:on + // @formatter:on WorkflowJob pipeline = jenkinsRule.createProject(WorkflowJob.class, "build-on-master-with-tool-provided-maven"); pipeline.setDefinition(new CpsFlowDefinition(pipelineScript, true)); @@ -119,17 +122,21 @@ public void maven_build_on_master_with_specified_maven_installation_succeeds() t // verify .pom is archived and fingerprinted // "[withMaven] Archive ... under // jenkins/mvn/test/mono-module-maven-app/0.1-SNAPSHOT/mono-module-maven-app-0.1-SNAPSHOT.pom" - jenkinsRule.assertLogContains("under jenkins/mvn/test/mono-module-maven-app/0.1-SNAPSHOT/mono-module-maven-app-0.1-SNAPSHOT.pom", build); + jenkinsRule.assertLogContains( + "under jenkins/mvn/test/mono-module-maven-app/0.1-SNAPSHOT/mono-module-maven-app-0.1-SNAPSHOT.pom", + build); // verify .jar is archived and fingerprinted - jenkinsRule.assertLogContains("under jenkins/mvn/test/mono-module-maven-app/0.1-SNAPSHOT/mono-module-maven-app-0.1-SNAPSHOT.jar", build); + jenkinsRule.assertLogContains( + "under jenkins/mvn/test/mono-module-maven-app/0.1-SNAPSHOT/mono-module-maven-app-0.1-SNAPSHOT.jar", + build); } @Test public void maven_build_on_master_with_missing_specified_maven_installation_fails() throws Exception { loadMavenJarProjectInGitRepo(this.gitRepoRule); - //@// @formatter:off + // @formatter:off String pipelineScript = "node() {\n" + " git($/" + gitRepoRule.toString() + "/$)\n" + " withMaven(maven: 'install-does-not-exist') {\n" + @@ -140,7 +147,7 @@ public void maven_build_on_master_with_missing_specified_maven_installation_fail " }\n" + " }\n" + "}"; - //@formatter:on + // @formatter:on WorkflowJob pipeline = jenkinsRule.createProject(WorkflowJob.class, "build-on-master-with-tool-provided-maven"); pipeline.setDefinition(new CpsFlowDefinition(pipelineScript, true)); @@ -152,7 +159,7 @@ public void maven_build_on_master_with_missing_specified_maven_installation_fail public void maven_build_no_traceability() throws Exception { loadMavenJarProjectInGitRepo(this.gitRepoRule); - //@formatter:off + // @formatter:off String pipelineScript = "node() {\n" + " git($/" + gitRepoRule.toString() + "/$)\n" + " withMaven() {\n" + @@ -163,7 +170,7 @@ public void maven_build_no_traceability() throws Exception { " }\n" + " }\n" + "}"; - //@formatter:on + // @formatter:on WorkflowJob pipeline = jenkinsRule.createProject(WorkflowJob.class, "build-on-master"); pipeline.setDefinition(new CpsFlowDefinition(pipelineScript, true)); @@ -177,16 +184,27 @@ public void maven_build_no_traceability() throws Exception { // verify .pom is archived and fingerprinted // "[withMaven] Archive ... under // jenkins/mvn/test/mono-module-maven-app/0.1-SNAPSHOT/mono-module-maven-app-0.1-SNAPSHOT.pom" - jenkinsRule.assertLogContains("under jenkins/mvn/test/mono-module-maven-app/0.1-SNAPSHOT/mono-module-maven-app-0.1-SNAPSHOT.pom", build); + jenkinsRule.assertLogContains( + "under jenkins/mvn/test/mono-module-maven-app/0.1-SNAPSHOT/mono-module-maven-app-0.1-SNAPSHOT.pom", + build); // verify .jar is archived and fingerprinted - jenkinsRule.assertLogContains("under jenkins/mvn/test/mono-module-maven-app/0.1-SNAPSHOT/mono-module-maven-app-0.1-SNAPSHOT.jar", build); + jenkinsRule.assertLogContains( + "under jenkins/mvn/test/mono-module-maven-app/0.1-SNAPSHOT/mono-module-maven-app-0.1-SNAPSHOT.jar", + build); Collection artifactsFileNames = TestUtils.artifactsToArtifactsFileNames(build.getArtifacts()); - assertThat(artifactsFileNames).contains("mono-module-maven-app-0.1-SNAPSHOT.pom", "mono-module-maven-app-0.1-SNAPSHOT.jar"); - - verifyFileIsFingerPrinted(pipeline, build, "jenkins/mvn/test/mono-module-maven-app/0.1-SNAPSHOT/mono-module-maven-app-0.1-SNAPSHOT.jar"); - verifyFileIsFingerPrinted(pipeline, build, "jenkins/mvn/test/mono-module-maven-app/0.1-SNAPSHOT/mono-module-maven-app-0.1-SNAPSHOT.pom"); + assertThat(artifactsFileNames) + .contains("mono-module-maven-app-0.1-SNAPSHOT.pom", "mono-module-maven-app-0.1-SNAPSHOT.jar"); + + verifyFileIsFingerPrinted( + pipeline, + build, + "jenkins/mvn/test/mono-module-maven-app/0.1-SNAPSHOT/mono-module-maven-app-0.1-SNAPSHOT.jar"); + verifyFileIsFingerPrinted( + pipeline, + build, + "jenkins/mvn/test/mono-module-maven-app/0.1-SNAPSHOT/mono-module-maven-app-0.1-SNAPSHOT.pom"); // verify Junit Archiver is called for maven-surefire-plugin jenkinsRule.assertLogContains( @@ -205,7 +223,8 @@ public void maven_build_no_traceability() throws Exception { build); // verify Task Scanner is called for jenkins.mvn.test:mono-module-maven-app - jenkinsRule.assertLogContains("[withMaven] openTasksPublisher - Scan Tasks for Maven artifact jenkins.mvn.test:mono-module-maven-app:jar:0.1-SNAPSHOT", + jenkinsRule.assertLogContains( + "[withMaven] openTasksPublisher - Scan Tasks for Maven artifact jenkins.mvn.test:mono-module-maven-app:jar:0.1-SNAPSHOT", build); TasksResultAction tasksResultAction = build.getAction(TasksResultAction.class); assertThat(tasksResultAction.getProjectActions()).hasSize(1); @@ -219,7 +238,7 @@ public void maven_build_global_traceability() throws Exception { loadMavenJarProjectInGitRepo(this.gitRepoRule); - //@formatter:off + // @formatter:off String pipelineScript = "node() {\n" + " git($/" + gitRepoRule.toString() + "/$)\n" + " withMaven() {\n" + @@ -230,7 +249,7 @@ public void maven_build_global_traceability() throws Exception { " }\n" + " }\n" + "}"; - //@// @formatter:on + // @formatter:on WorkflowJob pipeline = jenkinsRule.createProject(WorkflowJob.class, "build-on-master"); pipeline.setDefinition(new CpsFlowDefinition(pipelineScript, true)); @@ -250,7 +269,7 @@ public void maven_build_global_traceability_disabled_for_one_step() throws Excep loadMavenJarProjectInGitRepo(this.gitRepoRule); - //@formatter:off + // @formatter:off String pipelineScript = "node() {\n" + " git($/" + gitRepoRule.toString() + "/$)\n" + " withMaven(traceability: false) {\n" + @@ -261,7 +280,7 @@ public void maven_build_global_traceability_disabled_for_one_step() throws Excep " }\n" + " }\n" + "}"; - //@formatter:on + // @formatter:on WorkflowJob pipeline = jenkinsRule.createProject(WorkflowJob.class, "build-on-master"); pipeline.setDefinition(new CpsFlowDefinition(pipelineScript, true)); @@ -277,7 +296,7 @@ public void maven_build_global_traceability_disabled_for_one_step() throws Excep public void maven_build_jar_project_on_master_succeeds() throws Exception { loadMavenJarProjectInGitRepo(this.gitRepoRule); - //@formatter:off + // @formatter:off String pipelineScript = "node() {\n" + " git($/" + gitRepoRule.toString() + "/$)\n" + " withMaven(traceability: true) {\n" + @@ -288,7 +307,7 @@ public void maven_build_jar_project_on_master_succeeds() throws Exception { " }\n" + " }\n" + "}"; - //@formatter:on + // @formatter:on WorkflowJob pipeline = jenkinsRule.createProject(WorkflowJob.class, "build-on-master"); pipeline.setDefinition(new CpsFlowDefinition(pipelineScript, true)); @@ -302,16 +321,27 @@ public void maven_build_jar_project_on_master_succeeds() throws Exception { // verify .pom is archived and fingerprinted // "[withMaven] Archive ... under // jenkins/mvn/test/mono-module-maven-app/0.1-SNAPSHOT/mono-module-maven-app-0.1-SNAPSHOT.pom" - jenkinsRule.assertLogContains("under jenkins/mvn/test/mono-module-maven-app/0.1-SNAPSHOT/mono-module-maven-app-0.1-SNAPSHOT.pom", build); + jenkinsRule.assertLogContains( + "under jenkins/mvn/test/mono-module-maven-app/0.1-SNAPSHOT/mono-module-maven-app-0.1-SNAPSHOT.pom", + build); // verify .jar is archived and fingerprinted - jenkinsRule.assertLogContains("under jenkins/mvn/test/mono-module-maven-app/0.1-SNAPSHOT/mono-module-maven-app-0.1-SNAPSHOT.jar", build); + jenkinsRule.assertLogContains( + "under jenkins/mvn/test/mono-module-maven-app/0.1-SNAPSHOT/mono-module-maven-app-0.1-SNAPSHOT.jar", + build); Collection artifactsFileNames = TestUtils.artifactsToArtifactsFileNames(build.getArtifacts()); - assertThat(artifactsFileNames).contains("mono-module-maven-app-0.1-SNAPSHOT.pom", "mono-module-maven-app-0.1-SNAPSHOT.jar"); - - verifyFileIsFingerPrinted(pipeline, build, "jenkins/mvn/test/mono-module-maven-app/0.1-SNAPSHOT/mono-module-maven-app-0.1-SNAPSHOT.jar"); - verifyFileIsFingerPrinted(pipeline, build, "jenkins/mvn/test/mono-module-maven-app/0.1-SNAPSHOT/mono-module-maven-app-0.1-SNAPSHOT.pom"); + assertThat(artifactsFileNames) + .contains("mono-module-maven-app-0.1-SNAPSHOT.pom", "mono-module-maven-app-0.1-SNAPSHOT.jar"); + + verifyFileIsFingerPrinted( + pipeline, + build, + "jenkins/mvn/test/mono-module-maven-app/0.1-SNAPSHOT/mono-module-maven-app-0.1-SNAPSHOT.jar"); + verifyFileIsFingerPrinted( + pipeline, + build, + "jenkins/mvn/test/mono-module-maven-app/0.1-SNAPSHOT/mono-module-maven-app-0.1-SNAPSHOT.pom"); // verify Junit Archiver is called for maven-surefire-plugin jenkinsRule.assertLogContains( @@ -330,7 +360,8 @@ public void maven_build_jar_project_on_master_succeeds() throws Exception { build); // verify Task Scanner is called for jenkins.mvn.test:mono-module-maven-app - jenkinsRule.assertLogContains("[withMaven] openTasksPublisher - Scan Tasks for Maven artifact jenkins.mvn.test:mono-module-maven-app:jar:0.1-SNAPSHOT", + jenkinsRule.assertLogContains( + "[withMaven] openTasksPublisher - Scan Tasks for Maven artifact jenkins.mvn.test:mono-module-maven-app:jar:0.1-SNAPSHOT", build); TasksResultAction tasksResultAction = build.getAction(TasksResultAction.class); assertThat(tasksResultAction.getProjectActions()).hasSize(1); @@ -341,7 +372,7 @@ public void maven_build_jar_project_on_master_succeeds() throws Exception { public void maven_build_jar_project_with_whitespace_char_in_name() throws Exception { loadMavenJarProjectInGitRepo(this.gitRepoRule); - //@formatter:off + // @formatter:off String pipelineScript = "node() {\n" + " git($/" + gitRepoRule.toString() + "/$)\n" + " withMaven(traceability: true) {\n" + @@ -364,8 +395,9 @@ public void maven_build_jar_project_with_whitespace_char_in_name() throws Except " \n" + " \n" + "\n"; - //@formatter:on - MavenSettingsConfig mavenSettingsConfig = new MavenSettingsConfig("maven-config-test", "maven-config-test", "", mavenSettings, false, null); + // @formatter:on + MavenSettingsConfig mavenSettingsConfig = + new MavenSettingsConfig("maven-config-test", "maven-config-test", "", mavenSettings, false, null); GlobalConfigFiles.get().save(mavenSettingsConfig); GlobalMavenConfig.get().setSettingsProvider(new MvnSettingsProvider(mavenSettingsConfig.id)); @@ -374,66 +406,34 @@ public void maven_build_jar_project_with_whitespace_char_in_name() throws Except WorkflowJob pipeline = jenkinsRule.createProject(WorkflowJob.class, "build on master with spaces"); pipeline.setDefinition(new CpsFlowDefinition(pipelineScript, true)); WorkflowRun build = jenkinsRule.assertBuildStatus(Result.SUCCESS, pipeline.scheduleBuild2(0)); - jenkinsRule.assertLogContains("[withMaven] using Maven settings provided by the Jenkins global configuration", build); + jenkinsRule.assertLogContains( + "[withMaven] using Maven settings provided by the Jenkins global configuration", build); jenkinsRule.assertLogContains("id-settings-test-through-config-file-provider", build); } finally { GlobalMavenConfig.get().setSettingsProvider(null); } } - @Test - public void maven_build_jar_project_on_master_findbugs_publisher_desactivation() throws Exception { - maven_build_jar_project_on_master_with_disabled_publisher_param_succeeds(new FindbugsAnalysisPublisher.DescriptorImpl(), "findbugsPublisher"); - } - - @Test - public void maven_build_jar_project_on_master_spotbugs_publisher_desactivation() throws Exception { - maven_build_jar_project_on_master_with_disabled_publisher_param_succeeds(new SpotBugsAnalysisPublisher.DescriptorImpl(), "spotbugsPublisher"); - } - - @Test - public void maven_build_jar_project_on_master_tasks_publisher_desactivation() throws Exception { - maven_build_jar_project_on_master_with_disabled_publisher_param_succeeds(new TasksScannerPublisher.DescriptorImpl(), "openTasksPublisher"); - } - - @Test - public void maven_build_jar_project_on_master_concordion_publisher_desactivation() throws Exception { - maven_build_jar_project_on_master_with_disabled_publisher_param_succeeds(new ConcordionTestsPublisher.DescriptorImpl(), "concordionPublisher"); - } - - @Test - public void maven_build_jar_project_on_master_dependencies_fingerprint_publisher_desactivation() throws Exception { - maven_build_jar_project_on_master_with_disabled_publisher_param_succeeds(new DependenciesFingerprintPublisher.DescriptorImpl(), - "dependenciesFingerprintPublisher"); - } - - @Test - public void maven_build_jar_project_on_master_generated_artifacts_publisher_desactivation() throws Exception { - maven_build_jar_project_on_master_with_disabled_publisher_param_succeeds(new GeneratedArtifactsPublisher.DescriptorImpl(), "artifactsPublisher"); - } - - @Test - public void maven_build_jar_project_on_master_invoker_publisher_desactivation() throws Exception { - maven_build_jar_project_on_master_with_disabled_publisher_param_succeeds(new InvokerRunsPublisher.DescriptorImpl(), "invokerPublisher"); - } - - @Test - public void maven_build_jar_project_on_master_jacoco_publisher_desactivation() throws Exception { - maven_build_jar_project_on_master_with_disabled_publisher_param_succeeds(new JacocoReportPublisher.DescriptorImpl(), "jacocoPublisher"); - } - - @Test - public void maven_build_jar_project_on_master_jgiven_publisher_desactivation() throws Exception { - maven_build_jar_project_on_master_with_disabled_publisher_param_succeeds(new JGivenTestsPublisher.DescriptorImpl(), "jgivenPublisher"); + private static Stream mavenPublisherDescriptors() { + return Stream.of( + Arguments.of(new FindbugsAnalysisPublisher.DescriptorImpl(), "findbugsPublisher"), + Arguments.of(new SpotBugsAnalysisPublisher.DescriptorImpl(), "spotbugsPublisher"), + Arguments.of(new TasksScannerPublisher.DescriptorImpl(), "openTasksPublisher"), + Arguments.of(new ConcordionTestsPublisher.DescriptorImpl(), "concordionPublisher"), + Arguments.of(new DependenciesFingerprintPublisher.DescriptorImpl(), "dependenciesFingerprintPublisher"), + Arguments.of(new GeneratedArtifactsPublisher.DescriptorImpl(), "artifactsPublisher"), + Arguments.of(new InvokerRunsPublisher.DescriptorImpl(), "invokerPublisher"), + Arguments.of(new JacocoReportPublisher.DescriptorImpl(), "jacocoPublisher"), + Arguments.of(new JGivenTestsPublisher.DescriptorImpl(), "jgivenPublisher"), + Arguments.of(new JunitTestsPublisher.DescriptorImpl(), "junitPublisher"), + Arguments.of(new MavenLinkerPublisher2.DescriptorImpl(), "mavenLinkerPublisher"), + Arguments.of(new PipelineGraphPublisher.DescriptorImpl(), "pipelineGraphPublisher")); } - @Test - public void maven_build_jar_project_on_master_junit_publisher_desactivation() throws Exception { - maven_build_jar_project_on_master_with_disabled_publisher_param_succeeds(new JunitTestsPublisher.DescriptorImpl(), "junitPublisher"); - } - - private void maven_build_jar_project_on_master_with_disabled_publisher_param_succeeds(MavenPublisher.DescriptorImpl descriptor, String symbol) - throws Exception { + @ParameterizedTest + @MethodSource("mavenPublisherDescriptors") + public void maven_build_jar_project_on_master_with_disabled_publisher_param_succeeds( + MavenPublisher.DescriptorImpl descriptor, String symbol) throws Exception { Logger logger = Logger.getLogger(MavenSpyLogProcessor.class.getName()); Level level = logger.getLevel(); logger.setLevel(Level.FINE); @@ -442,25 +442,26 @@ private void maven_build_jar_project_on_master_with_disabled_publisher_param_suc Symbol symbolAnnotation = descriptor.getClass().getAnnotation(Symbol.class); String[] symbols = symbolAnnotation.value(); - assertThat(new String[] { symbol }).isEqualTo(symbols); + assertThat(new String[] {symbol}).isEqualTo(symbols); loadMavenJarProjectInGitRepo(this.gitRepoRule); for (Boolean disabled : Arrays.asList(Boolean.TRUE, Boolean.FALSE)) { - //@formatter:off + // @formatter:off String pipelineScript = "node() {\n" + - " git($/" + gitRepoRule.toString() + "/$)\n" + - " withMaven(options:[" + symbol + "(disabled:" + disabled + ")]) {\n" + - " if (isUnix()) {\n" + - " sh 'mvn package verify'\n" + - " } else {\n" + - " bat 'mvn package verify'\n" + - " }\n" + - " }\n" + - "}"; - //@formatter:on - - WorkflowJob pipeline = jenkinsRule.createProject(WorkflowJob.class, "build-on-master-" + symbol + "-publisher-disabled-" + disabled); + " git($/" + gitRepoRule.toString() + "/$)\n" + + " withMaven(options:[" + symbol + "(disabled:" + disabled + ")]) {\n" + + " if (isUnix()) {\n" + + " sh 'mvn package verify'\n" + + " } else {\n" + + " bat 'mvn package verify'\n" + + " }\n" + + " }\n" + + "}"; + // @formatter:on + + WorkflowJob pipeline = jenkinsRule.createProject( + WorkflowJob.class, "build-on-master-" + symbol + "-publisher-disabled-" + disabled); pipeline.setDefinition(new CpsFlowDefinition(pipelineScript, true)); WorkflowRun build = jenkinsRule.assertBuildStatus(Result.SUCCESS, pipeline.scheduleBuild2(0)); @@ -484,11 +485,11 @@ public void maven_build_jar_project_on_master_with_open_task_scanner_config_succ Symbol symbolAnnotation = descriptor.getClass().getAnnotation(Symbol.class); String[] symbols = symbolAnnotation.value(); - assertThat(new String[] { "openTasksPublisher" }).isEqualTo(symbols); + assertThat(new String[] {"openTasksPublisher"}).isEqualTo(symbols); loadMavenJarProjectInGitRepo(this.gitRepoRule); - //@formatter:off + // @formatter:off String pipelineScript = "node() {\n" + " git($/" + gitRepoRule.toString() + "/$)\n" + " withMaven(options:[openTasksPublisher(" + @@ -503,9 +504,10 @@ public void maven_build_jar_project_on_master_with_open_task_scanner_config_succ " }\n" + " }\n" + "}"; - //@formatter:on + // @formatter:on - WorkflowJob pipeline = jenkinsRule.createProject(WorkflowJob.class, "build-on-master-openTasksPublisher-publisher-config"); + WorkflowJob pipeline = + jenkinsRule.createProject(WorkflowJob.class, "build-on-master-openTasksPublisher-publisher-config"); pipeline.setDefinition(new CpsFlowDefinition(pipelineScript, true)); WorkflowRun build = jenkinsRule.assertBuildStatus(Result.SUCCESS, pipeline.scheduleBuild2(0)); @@ -515,9 +517,11 @@ public void maven_build_jar_project_on_master_with_open_task_scanner_config_succ @Test public void maven_build_maven_jar_with_flatten_pom_project_on_master_succeeds() throws Exception { - loadSourceCodeInGitRepository(this.gitRepoRule, "/org/jenkinsci/plugins/pipeline/maven/test/test_maven_projects/maven_jar_with_flatten_pom_project/"); + loadSourceCodeInGitRepository( + this.gitRepoRule, + "/org/jenkinsci/plugins/pipeline/maven/test/test_maven_projects/maven_jar_with_flatten_pom_project/"); - //@formatter:off + // @formatter:off String pipelineScript = "node() {\n" + " git($/" + gitRepoRule.toString() + "/$)\n" + " withMaven(traceability: true) {\n" + @@ -528,13 +532,15 @@ public void maven_build_maven_jar_with_flatten_pom_project_on_master_succeeds() " }\n" + " }\n" + "}"; - //@formatter:on + // @formatter:on - WorkflowJob pipeline = jenkinsRule.createProject(WorkflowJob.class, "build-jar-with-flatten-pom-project-on-master"); + WorkflowJob pipeline = + jenkinsRule.createProject(WorkflowJob.class, "build-jar-with-flatten-pom-project-on-master"); pipeline.setDefinition(new CpsFlowDefinition(pipelineScript, true)); WorkflowRun build = jenkinsRule.assertBuildStatus(Result.SUCCESS, pipeline.scheduleBuild2(0)); - jenkinsRule.assertLogNotContains("[jenkins-maven-event-spy] WARNING: unexpected Maven project file name '.flattened-pom.xml', problems may occur", + jenkinsRule.assertLogNotContains( + "[jenkins-maven-event-spy] WARNING: unexpected Maven project file name '.flattened-pom.xml', problems may occur", build); // verify Maven installation provided by the build agent is used @@ -543,16 +549,29 @@ public void maven_build_maven_jar_with_flatten_pom_project_on_master_succeeds() jenkinsRule.assertLogContains("[withMaven] using Maven installation provided by the build agent with", build); // verify .pom is archived and fingerprinted - jenkinsRule.assertLogContains("under jenkins/mvn/test/maven-jar-with-flattened-pom/0.1-SNAPSHOT/maven-jar-with-flattened-pom-0.1-SNAPSHOT.pom", build); + jenkinsRule.assertLogContains( + "under jenkins/mvn/test/maven-jar-with-flattened-pom/0.1-SNAPSHOT/maven-jar-with-flattened-pom-0.1-SNAPSHOT.pom", + build); // verify .jar is archived and fingerprinted - jenkinsRule.assertLogContains("under jenkins/mvn/test/maven-jar-with-flattened-pom/0.1-SNAPSHOT/maven-jar-with-flattened-pom-0.1-SNAPSHOT.jar", build); + jenkinsRule.assertLogContains( + "under jenkins/mvn/test/maven-jar-with-flattened-pom/0.1-SNAPSHOT/maven-jar-with-flattened-pom-0.1-SNAPSHOT.jar", + build); Collection artifactsFileNames = TestUtils.artifactsToArtifactsFileNames(build.getArtifacts()); - assertThat(artifactsFileNames).contains("maven-jar-with-flattened-pom-0.1-SNAPSHOT.pom", "maven-jar-with-flattened-pom-0.1-SNAPSHOT.jar"); - - verifyFileIsFingerPrinted(pipeline, build, "jenkins/mvn/test/maven-jar-with-flattened-pom/0.1-SNAPSHOT/maven-jar-with-flattened-pom-0.1-SNAPSHOT.jar"); - verifyFileIsFingerPrinted(pipeline, build, "jenkins/mvn/test/maven-jar-with-flattened-pom/0.1-SNAPSHOT/maven-jar-with-flattened-pom-0.1-SNAPSHOT.pom"); + assertThat(artifactsFileNames) + .contains( + "maven-jar-with-flattened-pom-0.1-SNAPSHOT.pom", + "maven-jar-with-flattened-pom-0.1-SNAPSHOT.jar"); + + verifyFileIsFingerPrinted( + pipeline, + build, + "jenkins/mvn/test/maven-jar-with-flattened-pom/0.1-SNAPSHOT/maven-jar-with-flattened-pom-0.1-SNAPSHOT.jar"); + verifyFileIsFingerPrinted( + pipeline, + build, + "jenkins/mvn/test/maven-jar-with-flattened-pom/0.1-SNAPSHOT/maven-jar-with-flattened-pom-0.1-SNAPSHOT.pom"); // verify Junit Archiver is called for // jenkins.mvn.test:maven-jar-with-flattened-pom @@ -569,9 +588,10 @@ public void maven_build_maven_jar_with_flatten_pom_project_on_master_succeeds() @Test public void maven_build_maven_hpi_project_on_master_succeeds() throws Exception { - loadSourceCodeInGitRepository(this.gitRepoRule, "/org/jenkinsci/plugins/pipeline/maven/test/test_maven_projects/maven_hpi_project/"); + loadSourceCodeInGitRepository( + this.gitRepoRule, "/org/jenkinsci/plugins/pipeline/maven/test/test_maven_projects/maven_hpi_project/"); - //@formatter:off + // @formatter:off String pipelineScript = "node() {\n" + " git($/" + gitRepoRule.toString() + "/$)\n" + " withMaven(traceability: true) {\n" + @@ -582,7 +602,7 @@ public void maven_build_maven_hpi_project_on_master_succeeds() throws Exception " }\n" + " }\n" + "}"; - //@formatter:on + // @formatter:on WorkflowJob pipeline = jenkinsRule.createProject(WorkflowJob.class, "build-on-master"); pipeline.setDefinition(new CpsFlowDefinition(pipelineScript, true)); @@ -594,26 +614,38 @@ public void maven_build_maven_hpi_project_on_master_succeeds() throws Exception jenkinsRule.assertLogContains("[withMaven] using Maven installation provided by the build agent with", build); // verify .pom is archived and fingerprinted - jenkinsRule.assertLogContains("under jenkins/mvn/test/test-jenkins-hpi/0.1-SNAPSHOT/test-jenkins-hpi-0.1-SNAPSHOT.pom", build); + jenkinsRule.assertLogContains( + "under jenkins/mvn/test/test-jenkins-hpi/0.1-SNAPSHOT/test-jenkins-hpi-0.1-SNAPSHOT.pom", build); // verify .jar and .hpi is archived and fingerprinted - jenkinsRule.assertLogContains("under jenkins/mvn/test/test-jenkins-hpi/0.1-SNAPSHOT/test-jenkins-hpi-0.1-SNAPSHOT.hpi", build); - jenkinsRule.assertLogContains("under jenkins/mvn/test/test-jenkins-hpi/0.1-SNAPSHOT/test-jenkins-hpi-0.1-SNAPSHOT.jar", build); + jenkinsRule.assertLogContains( + "under jenkins/mvn/test/test-jenkins-hpi/0.1-SNAPSHOT/test-jenkins-hpi-0.1-SNAPSHOT.hpi", build); + jenkinsRule.assertLogContains( + "under jenkins/mvn/test/test-jenkins-hpi/0.1-SNAPSHOT/test-jenkins-hpi-0.1-SNAPSHOT.jar", build); Collection artifactsFileNames = TestUtils.artifactsToArtifactsFileNames(build.getArtifacts()); - assertThat(artifactsFileNames).contains("test-jenkins-hpi-0.1-SNAPSHOT.pom", "test-jenkins-hpi-0.1-SNAPSHOT.jar", "test-jenkins-hpi-0.1-SNAPSHOT.hpi"); - - verifyFileIsFingerPrinted(pipeline, build, "jenkins/mvn/test/test-jenkins-hpi/0.1-SNAPSHOT/test-jenkins-hpi-0.1-SNAPSHOT.hpi"); - verifyFileIsFingerPrinted(pipeline, build, "jenkins/mvn/test/test-jenkins-hpi/0.1-SNAPSHOT/test-jenkins-hpi-0.1-SNAPSHOT.jar"); - verifyFileIsFingerPrinted(pipeline, build, "jenkins/mvn/test/test-jenkins-hpi/0.1-SNAPSHOT/test-jenkins-hpi-0.1-SNAPSHOT.pom"); + assertThat(artifactsFileNames) + .contains( + "test-jenkins-hpi-0.1-SNAPSHOT.pom", + "test-jenkins-hpi-0.1-SNAPSHOT.jar", + "test-jenkins-hpi-0.1-SNAPSHOT.hpi"); + + verifyFileIsFingerPrinted( + pipeline, build, "jenkins/mvn/test/test-jenkins-hpi/0.1-SNAPSHOT/test-jenkins-hpi-0.1-SNAPSHOT.hpi"); + verifyFileIsFingerPrinted( + pipeline, build, "jenkins/mvn/test/test-jenkins-hpi/0.1-SNAPSHOT/test-jenkins-hpi-0.1-SNAPSHOT.jar"); + verifyFileIsFingerPrinted( + pipeline, build, "jenkins/mvn/test/test-jenkins-hpi/0.1-SNAPSHOT/test-jenkins-hpi-0.1-SNAPSHOT.pom"); // verify Junit Archiver is called for jenkins.mvn.test:test-jenkins-hpi jenkinsRule.assertLogContains( - "[withMaven] junitPublisher - Archive test results for Maven artifact jenkins.mvn.test:test-jenkins-hpi:hpi:0.1-SNAPSHOT generated by", build); + "[withMaven] junitPublisher - Archive test results for Maven artifact jenkins.mvn.test:test-jenkins-hpi:hpi:0.1-SNAPSHOT generated by", + build); // verify Task Scanner is called for jenkins.mvn.test:test-jenkins-hpi jenkinsRule.assertLogContains( - "[withMaven] openTasksPublisher - Scan Tasks for Maven artifact jenkins.mvn.test:test-jenkins-hpi:hpi:0.1-SNAPSHOT in source directory", build); + "[withMaven] openTasksPublisher - Scan Tasks for Maven artifact jenkins.mvn.test:test-jenkins-hpi:hpi:0.1-SNAPSHOT in source directory", + build); } @Issue("JENKINS-43678") @@ -621,7 +653,7 @@ public void maven_build_maven_hpi_project_on_master_succeeds() throws Exception public void maven_build_on_master_with_no_generated_jar_succeeds() throws Exception { loadMavenJarProjectInGitRepo(this.gitRepoRule); - //@formatter:off + // @formatter:off String pipelineScript = "node() {\n" + " git($/" + gitRepoRule.toString() + "/$)\n" + " withMaven() {\n" + @@ -632,14 +664,16 @@ public void maven_build_on_master_with_no_generated_jar_succeeds() throws Except " }\n" + " }\n" + "}"; - //@formatter:on + // @formatter:on WorkflowJob pipeline = jenkinsRule.createProject(WorkflowJob.class, "test-on-master"); pipeline.setDefinition(new CpsFlowDefinition(pipelineScript, true)); WorkflowRun build = jenkinsRule.assertBuildStatus(Result.SUCCESS, pipeline.scheduleBuild2(0)); // don't try to archive the artifact as it has not been generated - jenkinsRule.assertLogNotContains("under jenkins/mvn/test/mono-module-maven-app/0.1-SNAPSHOT/mono-module-maven-app-0.1-SNAPSHOT.jar", build); + jenkinsRule.assertLogNotContains( + "under jenkins/mvn/test/mono-module-maven-app/0.1-SNAPSHOT/mono-module-maven-app-0.1-SNAPSHOT.jar", + build); Collection artifactsFileNames = TestUtils.artifactsToArtifactsFileNames(build.getArtifacts()); assertThat(artifactsFileNames).contains("mono-module-maven-app-0.1-SNAPSHOT.pom"); @@ -651,23 +685,24 @@ public void maven_build_on_master_with_no_generated_jar_succeeds() throws Except public void maven_global_settings_path_defined_through_jenkins_global_config() throws Exception { File mavenGlobalSettingsFile = new File(jenkinsRule.jenkins.getRootDir(), "maven-global-settings.xml"); - //@formatter:off + // @formatter:off String mavenGlobalSettings = "\n" + "\n" + " \n" + - " \n" + - " id-global-settings-test\n" + - " \n" + + " \n" + + " id-global-settings-test\n" + + " \n" + " \n" + "\n"; - //@formatter:on + // @formatter:on FileUtils.writeStringToFile(mavenGlobalSettingsFile, mavenGlobalSettings, StandardCharsets.UTF_8); - //@formatter:off - String pipelineScript = "node () {\n" + + // @formatter:off + String pipelineScript = + "node () {\n" + " writeFile file: 'pom.xml', text: '''\n" + "id-global-settings-test", build); } finally { GlobalMavenConfig.get().setGlobalSettingsProvider(null); @@ -704,27 +743,29 @@ public void maven_global_settings_path_defined_through_jenkins_global_config() t @Test @EnabledOnOs(LINUX) // bat step get stuck on Windows 2019 CI agents - public void maven_global_settings_defined_through_jenkins_global_config_and_config_file_provider() throws Exception { + public void maven_global_settings_defined_through_jenkins_global_config_and_config_file_provider() + throws Exception { - //@formatter:off + // @formatter:off String mavenGlobalSettings = "\n" + "\n" + " \n" + - " \n" + - " id-global-settings-test-from-config-file-provider\n" + - " \n" + + " \n" + + " id-global-settings-test-from-config-file-provider\n" + + " \n" + " \n" + "\n"; - //@formatter:on + // @formatter:on - GlobalMavenSettingsConfig mavenGlobalSettingsConfig = new GlobalMavenSettingsConfig("maven-global-config-test", "maven-global-config-test", "", - mavenGlobalSettings); + GlobalMavenSettingsConfig mavenGlobalSettingsConfig = new GlobalMavenSettingsConfig( + "maven-global-config-test", "maven-global-config-test", "", mavenGlobalSettings); - //@formatter:off - String pipelineScript = "node () {\n" + + // @formatter:off + String pipelineScript = + "node () {\n" + " writeFile file: 'pom.xml', text: '''\n" + "id-global-settings-test-from-config-file-provider", build); } finally { GlobalMavenConfig.get().setGlobalSettingsProvider(null); @@ -767,7 +810,7 @@ public void maven_global_settings_defined_through_jenkins_global_config_and_conf @EnabledOnOs(LINUX) // bat step get stuck on Windows 2019 CI agents public void maven_global_settings_defined_through_folder_config_and_config_file_provider() throws Exception { - //@formatter:off + // @formatter:off String mavenGlobalSettings = "\n" + "\n" + " \n" + "\n"; - //@formatter:on + // @formatter:on - GlobalMavenSettingsConfig mavenGlobalSettingsConfig = new GlobalMavenSettingsConfig("maven-global-config-test-folder", - "maven-global-config-test-folder", "", mavenGlobalSettings); + GlobalMavenSettingsConfig mavenGlobalSettingsConfig = new GlobalMavenSettingsConfig( + "maven-global-config-test-folder", "maven-global-config-test-folder", "", mavenGlobalSettings); - //@formatter:off - String pipelineScript = "node () {\n" + + // @formatter:off + String pipelineScript = + "node () {\n" + " writeFile file: 'pom.xml', text: '''\n" + "id-global-settings-test-from-config-file-provider-on-a-folder", build); + jenkinsRule.assertLogContains( + "id-global-settings-test-from-config-file-provider-on-a-folder", build); } finally { GlobalMavenConfig.get().setGlobalSettingsProvider(null); GlobalConfigFiles.get().remove(mavenGlobalSettingsConfig.id); @@ -838,7 +884,7 @@ public void maven_global_settings_defined_through_folder_config_and_config_file_ @EnabledOnOs(LINUX) // bat step get stuck on Windows 2019 CI agents public void maven_global_settings_path_defined_through_pipeline_attribute() throws Exception { - //@formatter:off + // @formatter:off String pipelineScript = "node () {\n" + " writeFile file: 'maven-global-settings.xml', text: '''\n" + "\n" + " \n" + - " \n" + - " id-global-settings-test\n" + - " \n" + + " \n" + + " id-global-settings-test\n" + + " \n" + " \n" + "'''\n" + "\n" + @@ -872,9 +918,10 @@ public void maven_global_settings_path_defined_through_pipeline_attribute() thro " }\n" + " }\n" + "}\n"; - //@formatter:on + // @formatter:on - WorkflowJob pipeline = jenkinsRule.createProject(WorkflowJob.class, "build-on-master-with-maven-global-settings-defined-in-pipeline"); + WorkflowJob pipeline = jenkinsRule.createProject( + WorkflowJob.class, "build-on-master-with-maven-global-settings-defined-in-pipeline"); pipeline.setDefinition(new CpsFlowDefinition(pipelineScript, true)); WorkflowRun build = jenkinsRule.assertBuildStatus(Result.SUCCESS, pipeline.scheduleBuild2(0)); jenkinsRule.assertLogContains("[withMaven] using Maven global settings provided on the build agent", build); @@ -886,7 +933,7 @@ public void maven_global_settings_path_defined_through_pipeline_attribute() thro @EnabledOnOs(LINUX) // bat step get stuck on Windows 2019 CI agents public void maven_settings_path_defined_through_pipeline_attribute() throws Exception { - //@formatter:off + // @formatter:off String pipelineScript = "node () {\n" + " writeFile file: 'maven-settings.xml', text: '''\n" + "\n" + " \n" + - " \n" + - " id-settings-test\n" + - " \n" + + " \n" + + " id-settings-test\n" + + " \n" + " \n" + "'''\n" + "\n" + @@ -920,9 +967,10 @@ public void maven_settings_path_defined_through_pipeline_attribute() throws Exce " }\n" + " }\n" + "}\n"; - //@formatter:on + // @formatter:on - WorkflowJob pipeline = jenkinsRule.createProject(WorkflowJob.class, "build-on-master-with-maven-settings-defined-in-pipeline"); + WorkflowJob pipeline = + jenkinsRule.createProject(WorkflowJob.class, "build-on-master-with-maven-settings-defined-in-pipeline"); pipeline.setDefinition(new CpsFlowDefinition(pipelineScript, true)); WorkflowRun build = jenkinsRule.assertBuildStatus(Result.SUCCESS, pipeline.scheduleBuild2(0)); jenkinsRule.assertLogContains("[withMaven] using Maven settings provided on the build agent", build); @@ -934,23 +982,24 @@ public void maven_settings_path_defined_through_pipeline_attribute() throws Exce public void maven_settings_defined_through_jenkins_global_config() throws Exception { File mavenSettingsFile = new File(jenkinsRule.jenkins.getRootDir(), "maven-settings.xml"); - //@formatter:off + // @formatter:off String mavenSettings = "\n" + "\n" + " \n" + - " \n" + - " id-settings-test\n" + - " \n" + + " \n" + + " id-settings-test\n" + + " \n" + " \n" + "\n"; - //@formatter:on + // @formatter:on FileUtils.writeStringToFile(mavenSettingsFile, mavenSettings, StandardCharsets.UTF_8); - //@formatter:off - String pipelineScript = "node () {\n" + + // @formatter:off + String pipelineScript = + "node () {\n" + " writeFile file: 'pom.xml', text: '''\n" + "id-settings-test", build); } finally { GlobalMavenConfig.get().setSettingsProvider(null); @@ -989,7 +1040,7 @@ public void maven_settings_defined_through_jenkins_global_config() throws Except @EnabledOnOs(LINUX) // bat step get stuck on Windows 2019 CI agents public void maven_settings_defined_through_jenkins_global_config_and_config_file_provider() throws Exception { - //@formatter:off + // @formatter:off String mavenSettings = "\n" + "\n" + " \n" + "\n"; - //@formatter:on - MavenSettingsConfig mavenSettingsConfig = new MavenSettingsConfig("maven-config-test", "maven-config-test", "", mavenSettings, false, null); + // @formatter:on + MavenSettingsConfig mavenSettingsConfig = + new MavenSettingsConfig("maven-config-test", "maven-config-test", "", mavenSettings, false, null); - //@formatter:off - String pipelineScript = "node () {\n" + + // @formatter:off + String pipelineScript = + "node () {\n" + " writeFile file: 'pom.xml', text: '''\n" + "id-settings-test-through-config-file-provider", build); } finally { GlobalMavenConfig.get().setSettingsProvider(null); @@ -1046,7 +1101,7 @@ public void maven_settings_defined_through_jenkins_global_config_and_config_file @EnabledOnOs(LINUX) // bat step get stuck on Windows 2019 CI agents public void maven_settings_defined_through_folder_config_and_config_file_provider() throws Exception { - //@formatter:off + // @formatter:off String mavenSettings = "\n" + "\n" + " \n" + "\n"; - //@formatter:on - MavenSettingsConfig mavenSettingsConfig = new MavenSettingsConfig("maven-config-test-folder", "maven-config-test-folder", "", mavenSettings, false, - null); + // @formatter:on + MavenSettingsConfig mavenSettingsConfig = new MavenSettingsConfig( + "maven-config-test-folder", "maven-config-test-folder", "", mavenSettings, false, null); - //@formatter:off - String pipelineScript = "node () {\n" + + // @formatter:off + String pipelineScript = + "node () {\n" + " writeFile file: 'pom.xml', text: '''\n" + "\n" + " \n" + - " \n" + - " id-settings-test-from-pipeline-attribute-and-config-file-provider\n" + - " \n" + + " \n" + + " id-settings-test-from-pipeline-attribute-and-config-file-provider\n" + + " \n" + " \n" + "\n"; - //@formatter:on - - MavenSettingsConfig mavenSettingsConfig = new MavenSettingsConfig("maven-config-test-from-pipeline-attribute", - "maven-config-test-from-pipeline-attribute", "", mavenSettings, false, null); + // @formatter:on + + MavenSettingsConfig mavenSettingsConfig = new MavenSettingsConfig( + "maven-config-test-from-pipeline-attribute", + "maven-config-test-from-pipeline-attribute", + "", + mavenSettings, + false, + null); - //@formatter:off - String pipelineScript = "node () {\n" + + // @formatter:off + String pipelineScript = + "node () {\n" + " writeFile file: 'pom.xml', text: '''\n" + "id-settings-test-from-pipeline-attribute-and-config-file-provider", build); + "[withMaven] using Maven settings provided by the Jenkins Managed Configuration File 'maven-config-test-from-pipeline-attribute'", + build); + jenkinsRule.assertLogContains( + "id-settings-test-from-pipeline-attribute-and-config-file-provider", build); } finally { GlobalConfigFiles.get().remove(mavenSettingsConfig.id); } @@ -1174,7 +1240,7 @@ public void maven_settings_defined_through_pipeline_attribute_and_config_file_pr public void maven_build_test_results_by_stage_and_branch() throws Exception { loadMavenJarProjectInGitRepo(this.gitRepoRule); - //@formatter:off + // @formatter:off String pipelineScript = "stage('first') {\n" + " parallel(a: {\n" + " node() {\n" + @@ -1201,7 +1267,7 @@ public void maven_build_test_results_by_stage_and_branch() throws Exception { " }\n" + " })\n" + "}"; - //@formatter:on + // @formatter:on WorkflowJob pipeline = jenkinsRule.createProject(WorkflowJob.class, "build-on-master"); pipeline.setDefinition(new CpsFlowDefinition(pipelineScript, true)); @@ -1216,5 +1282,4 @@ public void maven_build_test_results_by_stage_and_branch() throws Exception { JUnitResultsStepTest.assertBranchResults(build, 2, 2, 0, "a", "first", null); JUnitResultsStepTest.assertBranchResults(build, 2, 2, 0, "b", "first", null); } - } diff --git a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/WithMavenStepRestartTest.java b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/WithMavenStepRestartTest.java index 88ed7089..8859f06a 100644 --- a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/WithMavenStepRestartTest.java +++ b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/WithMavenStepRestartTest.java @@ -30,7 +30,6 @@ import java.io.File; import java.io.IOException; import java.nio.charset.StandardCharsets; - import org.apache.commons.io.FileUtils; import org.jenkinsci.plugins.workflow.cps.CpsFlowDefinition; import org.jenkinsci.plugins.workflow.job.WorkflowJob; @@ -98,7 +97,8 @@ public void resume() throws Throwable { WorkflowRun b = p.getBuildByNumber(1); @SuppressWarnings("deprecation") Class deprecatedClass = org.jenkinsci.plugins.workflow.support.pickles.FilePathPickle.class; - assertThat(FileUtils.readFileToString(new File(b.getRootDir(), "program.dat"), StandardCharsets.ISO_8859_1)).doesNotContain(deprecatedClass.getName()); + assertThat(FileUtils.readFileToString(new File(b.getRootDir(), "program.dat"), StandardCharsets.ISO_8859_1)) + .doesNotContain(deprecatedClass.getName()); SemaphoreStep.success("wait/1", null); jenkinsRule.assertBuildStatusSuccess(jenkinsRule.waitForCompletion(b)); SemaphoreStep.success("wait/2", null); diff --git a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/WithMavenStepTest.java b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/WithMavenStepTest.java index 80ae51e0..41bae2e4 100644 --- a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/WithMavenStepTest.java +++ b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/WithMavenStepTest.java @@ -26,13 +26,25 @@ import static org.assertj.core.api.Assertions.assertThat; import static org.junit.jupiter.params.provider.Arguments.arguments; +import com.cloudbees.plugins.credentials.Credentials; +import com.cloudbees.plugins.credentials.CredentialsScope; +import com.cloudbees.plugins.credentials.SystemCredentialsProvider; +import com.cloudbees.plugins.credentials.domains.Domain; +import com.cloudbees.plugins.credentials.impl.UsernamePasswordCredentialsImpl; +import hudson.model.Fingerprint; +import hudson.model.FingerprintMap; +import hudson.model.JDK; +import hudson.model.Result; +import hudson.plugins.sshslaves.SSHLauncher; +import hudson.slaves.DumbSlave; +import hudson.slaves.RetentionStrategy; +import hudson.tools.ToolLocationNodeProperty; import java.io.File; import java.nio.charset.StandardCharsets; import java.util.Collection; import java.util.Collections; import java.util.Objects; import java.util.stream.Stream; - import org.apache.commons.io.FileUtils; import org.jenkinsci.plugins.workflow.cps.CpsFlowDefinition; import org.jenkinsci.plugins.workflow.job.WorkflowJob; @@ -46,21 +58,6 @@ import org.testcontainers.junit.jupiter.Testcontainers; import org.testcontainers.utility.MountableFile; -import com.cloudbees.plugins.credentials.Credentials; -import com.cloudbees.plugins.credentials.CredentialsScope; -import com.cloudbees.plugins.credentials.SystemCredentialsProvider; -import com.cloudbees.plugins.credentials.domains.Domain; -import com.cloudbees.plugins.credentials.impl.UsernamePasswordCredentialsImpl; - -import hudson.model.Fingerprint; -import hudson.model.FingerprintMap; -import hudson.model.JDK; -import hudson.model.Result; -import hudson.plugins.sshslaves.SSHLauncher; -import hudson.slaves.DumbSlave; -import hudson.slaves.RetentionStrategy; -import hudson.tools.ToolLocationNodeProperty; - @Testcontainers(disabledWithoutDocker = true) // Testcontainers does not support docker on Windows 2019 servers public class WithMavenStepTest extends AbstractIntegrationTest { @@ -72,7 +69,8 @@ public class WithMavenStepTest extends AbstractIntegrationTest { @Issue("SECURITY-441") @Test public void testMavenBuildOnRemoteAgentWithSettingsFileOnMasterFails() throws Exception { - try (GenericContainer mavenContainerRule = new GenericContainer<>("localhost/pipeline-maven/java-maven-git").withExposedPorts(22)) { + try (GenericContainer mavenContainerRule = + new GenericContainer<>("localhost/pipeline-maven/java-maven-git").withExposedPorts(22)) { mavenContainerRule.start(); registerAgentForContainer(mavenContainerRule); @@ -80,14 +78,16 @@ public void testMavenBuildOnRemoteAgentWithSettingsFileOnMasterFails() throws Ex String secret = "secret content on master"; FileUtils.writeStringToFile(onMaster, secret, StandardCharsets.UTF_8); - //@formatter:off - WorkflowRun run = runPipeline(Result.FAILURE, "" + + // @formatter:off + WorkflowRun run = runPipeline( + Result.FAILURE, "node('remote') {\n" + " withMaven(mavenSettingsFilePath: '" + onMaster + "') {\n" + " echo readFile(MVN_SETTINGS)\n" + " }\n" + - "}"); - //@formatter:on + "}" + ); + // @formatter:on jenkinsRule.assertLogNotContains(secret, run); } @@ -95,20 +95,23 @@ public void testMavenBuildOnRemoteAgentWithSettingsFileOnMasterFails() throws Ex @Test public void testDisableAllPublishers() throws Exception { - try (GenericContainer mavenContainerRule = new GenericContainer<>("localhost/pipeline-maven/java-maven-git").withExposedPorts(22)) { + try (GenericContainer mavenContainerRule = + new GenericContainer<>("localhost/pipeline-maven/java-maven-git").withExposedPorts(22)) { mavenContainerRule.start(); registerAgentForContainer(mavenContainerRule); loadMonoDependencyMavenProjectInGitRepo(this.gitRepoRule); - //@formatter:off - runPipeline(Result.SUCCESS, "" + + // @formatter:off + runPipeline( + Result.SUCCESS, "node() {\n" + " git($/" + gitRepoRule.toString() + "/$)\n" + " withMaven(publisherStrategy: 'EXPLICIT') {\n" + " sh 'mvn package'\n" + " }\n" + - "}"); - //@formatter:on + "}" + ); + // @formatter:on assertFingerprintDoesNotExist(COMMONS_LANG3_FINGERPRINT); } @@ -123,33 +126,41 @@ private static Stream jdkMapProvider() { @MethodSource("jdkMapProvider") @Issue("JENKINS-71949") public void tesWithDifferentJavasForBuild(String jdkName, String jdkPath) throws Exception { - try (GenericContainer javasContainerRule = new GenericContainer<>("localhost/pipeline-maven/javas").withExposedPorts(22)) { + try (GenericContainer javasContainerRule = + new GenericContainer<>("localhost/pipeline-maven/javas").withExposedPorts(22)) { javasContainerRule.start(); loadMonoDependencyMavenProjectInGitRepo(this.gitRepoRule); String gitRepoPath = this.gitRepoRule.toString(); javasContainerRule.copyFileToContainer(MountableFile.forHostPath(gitRepoPath), "/tmp/gitrepo"); javasContainerRule.execInContainer("chmod", "-R", "777", "/tmp/gitrepo"); registerAgentForContainer(javasContainerRule); - ToolLocationNodeProperty.ToolLocation toolLocation = new ToolLocationNodeProperty.ToolLocation(new JDK.DescriptorImpl(), jdkName, jdkPath); + ToolLocationNodeProperty.ToolLocation toolLocation = + new ToolLocationNodeProperty.ToolLocation(new JDK.DescriptorImpl(), jdkName, jdkPath); ToolLocationNodeProperty toolLocationNodeProperty = new ToolLocationNodeProperty(toolLocation); - Objects.requireNonNull(jenkinsRule.jenkins.getNode(AGENT_NAME)).getNodeProperties().add(toolLocationNodeProperty); + Objects.requireNonNull(jenkinsRule.jenkins.getNode(AGENT_NAME)) + .getNodeProperties() + .add(toolLocationNodeProperty); jenkinsRule.jenkins.getJDKs().add(new JDK(jdkName, jdkPath)); - //@formatter:off - WorkflowRun run = runPipeline(Result.SUCCESS, + // @formatter:off + WorkflowRun run = runPipeline( + Result.SUCCESS, "node('" + AGENT_NAME + "') {\n" + " git('/tmp/gitrepo')\n" + " withMaven(jdk: '" + jdkName + "') {\n" + " sh 'mvn package'\n" + " }\n" + - "}"); - //@formatter:on - jenkinsRule.assertLogContains("artifactsPublisher - Archive artifact target/mono-dependency-maven-project-0.1-SNAPSHOT.jar", run); + "}" + ); + // @formatter:on + jenkinsRule.assertLogContains( + "artifactsPublisher - Archive artifact target/mono-dependency-maven-project-0.1-SNAPSHOT.jar", run); Collection archives = run.pickArtifactManager().root().list("**/**.jar", "", true); assertThat(archives).hasSize(1); - assertThat(archives.iterator().next().endsWith("mono-dependency-maven-project-0.1-SNAPSHOT.jar")).isTrue(); + assertThat(archives.iterator().next().endsWith("mono-dependency-maven-project-0.1-SNAPSHOT.jar")) + .isTrue(); } } @@ -172,18 +183,21 @@ private void registerAgentForContainer(GenericContainer container) throws Exc } private void addTestSshCredentials() { - Credentials credentials = new UsernamePasswordCredentialsImpl(CredentialsScope.GLOBAL, SSH_CREDENTIALS_ID, null, "test", "test"); + Credentials credentials = + new UsernamePasswordCredentialsImpl(CredentialsScope.GLOBAL, SSH_CREDENTIALS_ID, null, "test", "test"); - SystemCredentialsProvider.getInstance().getDomainCredentialsMap().put(Domain.global(), Collections.singletonList(credentials)); + SystemCredentialsProvider.getInstance() + .getDomainCredentialsMap() + .put(Domain.global(), Collections.singletonList(credentials)); } private void registerAgentForSlaveContainer(GenericContainer slaveContainer) throws Exception { - SSHLauncher sshLauncher = new SSHLauncher(slaveContainer.getHost(), slaveContainer.getMappedPort(22), SSH_CREDENTIALS_ID); + SSHLauncher sshLauncher = + new SSHLauncher(slaveContainer.getHost(), slaveContainer.getMappedPort(22), SSH_CREDENTIALS_ID); DumbSlave agent = new DumbSlave(AGENT_NAME, SLAVE_BASE_PATH, sshLauncher); agent.setNumExecutors(1); agent.setRetentionStrategy(RetentionStrategy.INSTANCE); jenkinsRule.jenkins.addNode(agent); } - } diff --git a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/cause/MavenDependencyCauseHelperTest.java b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/cause/MavenDependencyCauseHelperTest.java index 5af41a12..01bb14e7 100644 --- a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/cause/MavenDependencyCauseHelperTest.java +++ b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/cause/MavenDependencyCauseHelperTest.java @@ -5,7 +5,6 @@ import java.util.Arrays; import java.util.Collections; import java.util.List; - import org.jenkinsci.plugins.pipeline.maven.MavenArtifact; import org.junit.jupiter.api.Test; @@ -19,8 +18,8 @@ public void isSameCause_singleArtifact_noBaseVersion_sameSnapshot_false() { MavenArtifact firstArtifact = new MavenArtifact("com.example:my-jar:jar:1.0-SNAPSHOT"); MavenArtifact secondArtifact = new MavenArtifact("com.example:my-jar:jar:1.0-SNAPSHOT"); - List matchingArtifacts = MavenDependencyCauseHelper.isSameCause(new MavenDependencyTestCause(firstArtifact), - new MavenDependencyTestCause(secondArtifact)); + List matchingArtifacts = MavenDependencyCauseHelper.isSameCause( + new MavenDependencyTestCause(firstArtifact), new MavenDependencyTestCause(secondArtifact)); assertThat(matchingArtifacts).isEmpty(); } @@ -30,8 +29,8 @@ public void isSameCause_singleArtifact_noBaseVersion_false() { MavenArtifact firstArtifact = new MavenArtifact("com.example:my-jar:jar:1.0-SNAPSHOT"); MavenArtifact secondArtifact = new MavenArtifact("com.example:my-jar:jar:1.1-SNAPSHOT"); - List matchingArtifacts = MavenDependencyCauseHelper.isSameCause(new MavenDependencyTestCause(firstArtifact), - new MavenDependencyTestCause(secondArtifact)); + List matchingArtifacts = MavenDependencyCauseHelper.isSameCause( + new MavenDependencyTestCause(firstArtifact), new MavenDependencyTestCause(secondArtifact)); assertThat(matchingArtifacts).isEmpty(); } @@ -43,8 +42,8 @@ public void isSameCause_singleArtifact_withBaseVersion_true() { MavenArtifact secondArtifact = new MavenArtifact("com.example:my-jar:jar:1.0-20100529-1213-1"); secondArtifact.setBaseVersion("1.0-SNAPSHOT"); - List matchingArtifacts = MavenDependencyCauseHelper.isSameCause(new MavenDependencyTestCause(firstArtifact), - new MavenDependencyTestCause(secondArtifact)); + List matchingArtifacts = MavenDependencyCauseHelper.isSameCause( + new MavenDependencyTestCause(firstArtifact), new MavenDependencyTestCause(secondArtifact)); assertThat(matchingArtifacts).isNotEmpty(); } @@ -56,8 +55,8 @@ public void isSameCause_singleArtifact_withBaseVersion_false() { MavenArtifact secondArtifact = new MavenArtifact("com.example:my-jar:jar:1.0-20100530-2101-3"); secondArtifact.setBaseVersion("1.0-SNAPSHOT"); - List matchingArtifacts = MavenDependencyCauseHelper.isSameCause(new MavenDependencyTestCause(firstArtifact), - new MavenDependencyTestCause(secondArtifact)); + List matchingArtifacts = MavenDependencyCauseHelper.isSameCause( + new MavenDependencyTestCause(firstArtifact), new MavenDependencyTestCause(secondArtifact)); assertThat(matchingArtifacts).isEmpty(); } @@ -68,8 +67,8 @@ public void isSameCause_singleArtifact_mixedBaseVersion_false() { firstArtifact.setBaseVersion("1.0-SNAPSHOT"); MavenArtifact secondArtifact = new MavenArtifact("com.example:my-jar:jar:1.0-20100530-2101-1"); - List matchingArtifacts = MavenDependencyCauseHelper.isSameCause(new MavenDependencyTestCause(firstArtifact), - new MavenDependencyTestCause(secondArtifact)); + List matchingArtifacts = MavenDependencyCauseHelper.isSameCause( + new MavenDependencyTestCause(firstArtifact), new MavenDependencyTestCause(secondArtifact)); assertThat(matchingArtifacts).isEmpty(); } @@ -85,7 +84,8 @@ public void isSameCause_singleArtifact_multiClassifiers_on_firstCause_withBaseVe MavenArtifact secondArtifact = new MavenArtifact("com.example:my-jar:jar:1.0-20100529-1213-1"); secondArtifact.setBaseVersion("1.0-SNAPSHOT"); - List matchingArtifacts = MavenDependencyCauseHelper.isSameCause(new MavenDependencyTestCause(firstArtifact, firstArtifactSources), + List matchingArtifacts = MavenDependencyCauseHelper.isSameCause( + new MavenDependencyTestCause(firstArtifact, firstArtifactSources), new MavenDependencyTestCause(secondArtifact)); assertThat(matchingArtifacts).isNotEmpty(); @@ -102,7 +102,8 @@ public void isSameCause_singleArtifact_multiClassifiers_on_secondCause_withBaseV secondArtifactSources.setBaseVersion("1.0-SNAPSHOT"); secondArtifactSources.setClassifier("sources"); - List matchingArtifacts = MavenDependencyCauseHelper.isSameCause(new MavenDependencyTestCause(firstArtifact), + List matchingArtifacts = MavenDependencyCauseHelper.isSameCause( + new MavenDependencyTestCause(firstArtifact), new MavenDependencyTestCause(secondArtifact, secondArtifactSources)); assertThat(matchingArtifacts).isNotEmpty(); @@ -123,7 +124,8 @@ public void isSameCause_multiArtifact_multiClassifiers_on_firstCause_withBaseVer sameAsFirstArtifact.setBaseVersion("1.0-SNAPSHOT"); List matchingArtifacts = MavenDependencyCauseHelper.isSameCause( - new MavenDependencyTestCause(firstArtifact, firstArtifactSources, secondArtifact), new MavenDependencyTestCause(sameAsFirstArtifact)); + new MavenDependencyTestCause(firstArtifact, firstArtifactSources, secondArtifact), + new MavenDependencyTestCause(sameAsFirstArtifact)); assertThat(matchingArtifacts).isNotEmpty(); } @@ -143,7 +145,8 @@ public void isSameCause_multiArtifact_multiClassifiers_on_secondCause_withBaseVe sameAsFirstArtifactSources.setBaseVersion("1.0-SNAPSHOT"); sameAsFirstArtifactSources.setClassifier("sources"); - List matchingArtifacts = MavenDependencyCauseHelper.isSameCause(new MavenDependencyTestCause(firstArtifact), + List matchingArtifacts = MavenDependencyCauseHelper.isSameCause( + new MavenDependencyTestCause(firstArtifact), new MavenDependencyTestCause(sameAsFirstArtifact, sameAsFirstArtifactSources, secondArtifact)); assertThat(matchingArtifacts).isNotEmpty(); diff --git a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/console/MaskPasswordsConsoleLogFilterTest.java b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/console/MaskPasswordsConsoleLogFilterTest.java index 68a0eb10..d76461b7 100644 --- a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/console/MaskPasswordsConsoleLogFilterTest.java +++ b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/console/MaskPasswordsConsoleLogFilterTest.java @@ -1,8 +1,12 @@ package org.jenkinsci.plugins.pipeline.maven.console; +import com.cloudbees.plugins.credentials.CredentialsProvider; +import hudson.ExtensionList; +import hudson.model.Result; import java.util.ArrayList; import java.util.List; - +import jenkins.model.Jenkins; +import jenkins.mvn.GlobalMavenConfig; import org.jenkinsci.plugins.configfiles.GlobalConfigFiles; import org.jenkinsci.plugins.configfiles.maven.GlobalMavenSettingsConfig; import org.jenkinsci.plugins.configfiles.maven.job.MvnGlobalSettingsProvider; @@ -16,20 +20,13 @@ import org.junit.jupiter.params.provider.ValueSource; import org.jvnet.hudson.test.Issue; -import com.cloudbees.plugins.credentials.CredentialsProvider; - -import hudson.ExtensionList; -import hudson.model.Result; -import jenkins.model.Jenkins; -import jenkins.mvn.GlobalMavenConfig; - public class MaskPasswordsConsoleLogFilterTest extends AbstractIntegrationTest { @Issue("SECURITY-3257") @ParameterizedTest - @ValueSource(booleans = { true, false }) + @ValueSource(booleans = {true, false}) public void should_hide_server_username_and_password(boolean usernameIsSecret) throws Exception { - //@formatter:off + // @formatter:off String pipelineScript = "node() {\n" + " withMaven(traceability: true, globalMavenSettingsConfig: 'maven-global-config-test') {\n" + " if (isUnix()) {\n" + @@ -41,7 +38,7 @@ public void should_hide_server_username_and_password(boolean usernameIsSecret) t "}"; String serverId = "server-id"; - //@formatter:off + // @formatter:off String mavenGlobalSettings = "\n" + "\n" + " \n" + "\n"; - //@formatter:on + // @formatter:on String credentialsId = "creds-id"; - ExtensionList extensionList = Jenkins.getInstance().getExtensionList(CredentialsProvider.class); - extensionList.add(extensionList.size(), new FakeCredentialsProvider(credentialsId, "aUser", "aPass", usernameIsSecret)); + ExtensionList extensionList = + Jenkins.getInstance().getExtensionList(CredentialsProvider.class); + extensionList.add( + extensionList.size(), new FakeCredentialsProvider(credentialsId, "aUser", "aPass", usernameIsSecret)); List mappings = new ArrayList<>(); mappings.add(new ServerCredentialMapping(serverId, credentialsId)); - GlobalMavenSettingsConfig mavenGlobalSettingsConfig = new GlobalMavenSettingsConfig("maven-global-config-test", "maven-global-config-test", "", - mavenGlobalSettings, true, mappings); + GlobalMavenSettingsConfig mavenGlobalSettingsConfig = new GlobalMavenSettingsConfig( + "maven-global-config-test", "maven-global-config-test", "", mavenGlobalSettings, true, mappings); GlobalConfigFiles.get().save(mavenGlobalSettingsConfig); GlobalMavenConfig.get().setGlobalSettingsProvider(new MvnGlobalSettingsProvider(mavenGlobalSettingsConfig.id)); @@ -86,5 +85,4 @@ public void should_hide_server_username_and_password(boolean usernameIsSecret) t GlobalMavenConfig.get().setSettingsProvider(null); } } - } diff --git a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/dao/CustomTypePipelineMavenPluginDaoDecoratorTest.java b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/dao/CustomTypePipelineMavenPluginDaoDecoratorTest.java index 949a67ca..b36ecfc8 100644 --- a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/dao/CustomTypePipelineMavenPluginDaoDecoratorTest.java +++ b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/dao/CustomTypePipelineMavenPluginDaoDecoratorTest.java @@ -5,7 +5,6 @@ import java.util.Arrays; import java.util.List; import java.util.stream.Stream; - import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; @@ -35,7 +34,8 @@ public void setupMockDelegateAndDecorator() { @ParameterizedTest @MethodSource - public void testHandlingOfCustomJarTypes(String type, String extension, List additionalExpectedReportedTypes) { + public void testHandlingOfCustomJarTypes( + String type, String extension, List additionalExpectedReportedTypes) { recordGeneratedArtifact(type, extension); verifyRecordGeneratedArtifactCalled(type, extension); @@ -49,21 +49,29 @@ public void testHandlingOfCustomJarTypes(String type, String extension, List testHandlingOfCustomJarTypes() { return Stream.of( // simple cases - createTestParameters("pom", "pom"), createTestParameters("jar", "jar"), createTestParameters("war", "war"), createTestParameters("ear", "ear"), + createTestParameters("pom", "pom"), + createTestParameters("jar", "jar"), + createTestParameters("war", "war"), + createTestParameters("ear", "ear"), createTestParameters("rar", "rar"), // known types with different extension - createTestParameters("test-jar", "jar"), createTestParameters("maven-plugin", "jar"), createTestParameters("ejb", "jar"), - createTestParameters("ejb-client", "jar"), createTestParameters("java-source", "jar"), createTestParameters("javadoc", "jar"), + createTestParameters("test-jar", "jar"), + createTestParameters("maven-plugin", "jar"), + createTestParameters("ejb", "jar"), + createTestParameters("ejb-client", "jar"), + createTestParameters("java-source", "jar"), + createTestParameters("javadoc", "jar"), // unknown types with different extension createTestParameters("nbm", "jar", "jar"), // JENKINS-52303 createTestParameters("bundle", "jar", "jar"), // JENKINS-47069 createTestParameters("docker-info", "jar", "jar") // JENKINS-59500 - ); + ); } - private static Arguments createTestParameters(String type, String extension, String... additionalExpectedReportedTypes) { + private static Arguments createTestParameters( + String type, String extension, String... additionalExpectedReportedTypes) { return Arguments.of(type, extension, Arrays.asList(additionalExpectedReportedTypes)); } @@ -76,8 +84,17 @@ private void verifyRecordGeneratedArtifactCalled(String type, String extension) } private void recordGeneratedArtifact(PipelineMavenPluginDao dao, String type, String extension) { - dao.recordGeneratedArtifact(JOB_FULL_NAME, BUILD_NUMBER, GROUP_ID, ARTIFACT_ID, VERSION, type, BASE_VERSION, REPOSITORY_URL, SKIP_DOWNSTREAM_TRIGGERS, - extension, CLASSIFIER); + dao.recordGeneratedArtifact( + JOB_FULL_NAME, + BUILD_NUMBER, + GROUP_ID, + ARTIFACT_ID, + VERSION, + type, + BASE_VERSION, + REPOSITORY_URL, + SKIP_DOWNSTREAM_TRIGGERS, + extension, + CLASSIFIER); } - -} \ No newline at end of file +} diff --git a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/dao/MonitoringPipelineMavenPluginDaoDecoratorTest.java b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/dao/MonitoringPipelineMavenPluginDaoDecoratorTest.java index a5811abe..c9629ed3 100644 --- a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/dao/MonitoringPipelineMavenPluginDaoDecoratorTest.java +++ b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/dao/MonitoringPipelineMavenPluginDaoDecoratorTest.java @@ -14,7 +14,8 @@ public class MonitoringPipelineMavenPluginDaoDecoratorTest { private PipelineMavenPluginDao delegate = mock(PipelineMavenPluginDao.class); - private MonitoringPipelineMavenPluginDaoDecorator decorator = new MonitoringPipelineMavenPluginDaoDecorator(delegate); + private MonitoringPipelineMavenPluginDaoDecorator decorator = + new MonitoringPipelineMavenPluginDaoDecorator(delegate); @AfterEach public void checkDelegate() { @@ -156,5 +157,4 @@ public void shoudIncrementWriteWhenCleanup() { verify(delegate).cleanup(); } - } diff --git a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/dao/UpstreamMemoryTest.java b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/dao/UpstreamMemoryTest.java index f352ea68..0b352501 100644 --- a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/dao/UpstreamMemoryTest.java +++ b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/dao/UpstreamMemoryTest.java @@ -7,7 +7,6 @@ import static org.mockito.Mockito.when; import java.util.Map; - import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.InjectMocks; diff --git a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/docker/JavaGitContainerTest.java b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/docker/JavaGitContainerTest.java index 47753afa..7a578567 100644 --- a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/docker/JavaGitContainerTest.java +++ b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/docker/JavaGitContainerTest.java @@ -36,12 +36,14 @@ public class JavaGitContainerTest extends AbstractIntegrationTest { @Container - public GenericContainer containerRule = new GenericContainer<>("localhost/pipeline-maven/java-git").withExposedPorts(22); + public GenericContainer containerRule = + new GenericContainer<>("localhost/pipeline-maven/java-git").withExposedPorts(22); @Test public void smokes() throws Exception { - assertThat(containerRule.execInContainer("java", "-version").getStderr()).contains("openjdk version \"11"); - assertThat(containerRule.execInContainer("git", "--version").getStdout()).contains("git version 2."); + assertThat(containerRule.execInContainer("java", "-version").getStderr()) + .contains("openjdk version \"11"); + assertThat(containerRule.execInContainer("git", "--version").getStdout()) + .contains("git version 2."); } - } diff --git a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/listeners/DaoHelperTest.java b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/listeners/DaoHelperTest.java index bb4e91ad..c77928c2 100644 --- a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/listeners/DaoHelperTest.java +++ b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/listeners/DaoHelperTest.java @@ -11,7 +11,6 @@ import java.util.Map; import java.util.SortedSet; import java.util.TreeSet; - import org.jenkinsci.plugins.pipeline.maven.GlobalPipelineMavenConfig; import org.jenkinsci.plugins.pipeline.maven.MavenArtifact; import org.jenkinsci.plugins.pipeline.maven.dao.PipelineMavenPluginDao; diff --git a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/listeners/DatabaseSyncRunListenerTest.java b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/listeners/DatabaseSyncRunListenerTest.java index 58c36a0e..b7f39697 100644 --- a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/listeners/DatabaseSyncRunListenerTest.java +++ b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/listeners/DatabaseSyncRunListenerTest.java @@ -4,9 +4,9 @@ import static org.mockito.Mockito.verifyNoInteractions; import static org.mockito.Mockito.when; +import hudson.model.Result; import java.util.Arrays; import java.util.Collections; - import org.jenkinsci.plugins.pipeline.maven.GlobalPipelineMavenConfig; import org.jenkinsci.plugins.pipeline.maven.WithMavenStep; import org.jenkinsci.plugins.pipeline.maven.dao.PipelineMavenPluginDao; @@ -21,8 +21,6 @@ import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; -import hudson.model.Result; - @ExtendWith(MockitoExtension.class) public class DatabaseSyncRunListenerTest { diff --git a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/listeners/DownstreamPipelineTriggerRunListenerIntegrationTest.java b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/listeners/DownstreamPipelineTriggerRunListenerIntegrationTest.java index f5a026e4..f440ee4d 100644 --- a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/listeners/DownstreamPipelineTriggerRunListenerIntegrationTest.java +++ b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/listeners/DownstreamPipelineTriggerRunListenerIntegrationTest.java @@ -3,10 +3,11 @@ import static java.util.Arrays.asList; import static org.assertj.core.api.Assertions.assertThat; +import hudson.ExtensionList; +import hudson.model.Result; import java.io.File; import java.util.ArrayList; import java.util.List; - import org.jenkinsci.plugins.pipeline.maven.AbstractIntegrationTest; import org.jenkinsci.plugins.pipeline.maven.GlobalPipelineMavenConfig; import org.jenkinsci.plugins.pipeline.maven.MavenArtifact; @@ -21,9 +22,6 @@ import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -import hudson.ExtensionList; -import hudson.model.Result; - /** * We need some tests. Unfortunately, it is very hard to do unit tests because * Jenkins APIs are almost impossible to mock. @@ -37,7 +35,8 @@ public class DownstreamPipelineTriggerRunListenerIntegrationTest extends Abstrac @BeforeEach public void setup() throws Exception { - ExtensionList.lookupSingleton(GlobalPipelineMavenConfig.class).setDaoClass(PipelineMavenPluginH2Dao.class.getName()); + ExtensionList.lookupSingleton(GlobalPipelineMavenConfig.class) + .setDaoClass(PipelineMavenPluginH2Dao.class.getName()); String jdbcUrl = "jdbc:h2:file:" + new File("target", getClass().getName() + "-h2").getAbsolutePath() + ";" + "AUTO_SERVER=TRUE;MULTI_THREADED=1;QUERY_CACHE_SIZE=25;JMX=TRUE"; ExtensionList.lookupSingleton(GlobalPipelineMavenConfig.class).setJdbcUrl(jdbcUrl); @@ -62,8 +61,10 @@ public void setup() throws Exception { @Test public void test_infinite_loop() throws Exception { - loadSourceCodeInGitRepository(this.gitRepoRule, "/org/jenkinsci/plugins/pipeline/maven/test/test_maven_projects/multi_module_maven_project/"); - //@formatter:off + loadSourceCodeInGitRepository( + this.gitRepoRule, + "/org/jenkinsci/plugins/pipeline/maven/test/test_maven_projects/multi_module_maven_project/"); + // @formatter:off String pipelineScript = "node() {\n" + " git($/" + gitRepoRule.toString() + "/$)\n" + " withMaven() {\n" + @@ -74,7 +75,7 @@ public void test_infinite_loop() throws Exception { " }\n" + " }\n" + "}"; - //@formatter:on + // @formatter:on WorkflowJob pipeline1 = jenkinsRule.createProject(WorkflowJob.class, "pipeline-1"); pipeline1.setDefinition(new CpsFlowDefinition(pipelineScript, true)); @@ -85,19 +86,28 @@ public void test_infinite_loop() throws Exception { WorkflowRun pipeline2Build1 = jenkinsRule.assertBuildStatus(Result.SUCCESS, pipeline2.scheduleBuild2(0)); for (WorkflowRun run : asList(pipeline1Build1, pipeline2Build1)) { - List dependencies = GlobalPipelineMavenConfig.get().getDao().listDependencies(run.getParent().getFullName(), run.number); - assertThat(dependencies).contains(dep("jenkins.mvn.test.multimodule", "shared-core", "jar", "0.0.1-SNAPSHOT", "compile"), - dep("junit", "junit", "jar", "4.13.1", "test"), dep("org.hamcrest", "hamcrest-core", "jar", "1.3", "test")); + List dependencies = GlobalPipelineMavenConfig.get() + .getDao() + .listDependencies(run.getParent().getFullName(), run.number); + assertThat(dependencies) + .contains( + dep("jenkins.mvn.test.multimodule", "shared-core", "jar", "0.0.1-SNAPSHOT", "compile"), + dep("junit", "junit", "jar", "4.13.1", "test"), + dep("org.hamcrest", "hamcrest-core", "jar", "1.3", "test")); - List generatedArtifacts = GlobalPipelineMavenConfig.get().getDao().getGeneratedArtifacts(run.getParent().getFullName(), run.number); - assertThat(generatedArtifacts).contains(artifact("jenkins.mvn.test.multimodule:demo-1:jar:0.0.1-SNAPSHOT"), - artifact("jenkins.mvn.test.multimodule:demo-1:pom:0.0.1-SNAPSHOT"), artifact("jenkins.mvn.test.multimodule:demo-2:jar:0.0.1-SNAPSHOT"), - artifact("jenkins.mvn.test.multimodule:demo-2:pom:0.0.1-SNAPSHOT"), - artifact("jenkins.mvn.test.multimodule:multimodule-parent:pom:0.0.1-SNAPSHOT"), - artifact("jenkins.mvn.test.multimodule:shared-core:jar:0.0.1-SNAPSHOT"), - artifact("jenkins.mvn.test.multimodule:shared-core:pom:0.0.1-SNAPSHOT")); + List generatedArtifacts = GlobalPipelineMavenConfig.get() + .getDao() + .getGeneratedArtifacts(run.getParent().getFullName(), run.number); + assertThat(generatedArtifacts) + .contains( + artifact("jenkins.mvn.test.multimodule:demo-1:jar:0.0.1-SNAPSHOT"), + artifact("jenkins.mvn.test.multimodule:demo-1:pom:0.0.1-SNAPSHOT"), + artifact("jenkins.mvn.test.multimodule:demo-2:jar:0.0.1-SNAPSHOT"), + artifact("jenkins.mvn.test.multimodule:demo-2:pom:0.0.1-SNAPSHOT"), + artifact("jenkins.mvn.test.multimodule:multimodule-parent:pom:0.0.1-SNAPSHOT"), + artifact("jenkins.mvn.test.multimodule:shared-core:jar:0.0.1-SNAPSHOT"), + artifact("jenkins.mvn.test.multimodule:shared-core:pom:0.0.1-SNAPSHOT")); } - } private MavenDependency dep(String groupId, String artifactId, String type, String version, String scope) { @@ -115,5 +125,4 @@ private MavenArtifact artifact(String identifier) { result.setBaseVersion(result.getVersion()); return result; } - } diff --git a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/listeners/DownstreamPipelineTriggerRunListenerTest.java b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/listeners/DownstreamPipelineTriggerRunListenerTest.java index 7764839b..6ffba773 100644 --- a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/listeners/DownstreamPipelineTriggerRunListenerTest.java +++ b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/listeners/DownstreamPipelineTriggerRunListenerTest.java @@ -11,6 +11,14 @@ import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; +import hudson.model.Cause; +import hudson.model.Job; +import hudson.model.Queue; +import hudson.model.Queue.Item; +import hudson.model.Queue.Task; +import hudson.model.Result; +import hudson.model.TaskListener; +import hudson.model.queue.ScheduleResult; import java.io.PrintStream; import java.util.Arrays; import java.util.Collections; @@ -18,7 +26,7 @@ import java.util.Map; import java.util.SortedSet; import java.util.TreeSet; - +import jenkins.model.Jenkins; import org.jenkinsci.plugins.pipeline.maven.GlobalPipelineMavenConfig; import org.jenkinsci.plugins.pipeline.maven.MavenArtifact; import org.jenkinsci.plugins.pipeline.maven.WithMavenStep; @@ -42,16 +50,6 @@ import org.mockito.junit.jupiter.MockitoSettings; import org.mockito.quality.Strictness; -import hudson.model.Cause; -import hudson.model.Job; -import hudson.model.Queue; -import hudson.model.Result; -import hudson.model.TaskListener; -import hudson.model.Queue.Item; -import hudson.model.Queue.Task; -import hudson.model.queue.ScheduleResult; -import jenkins.model.Jenkins; - @ExtendWith(MockitoExtension.class) @MockitoSettings(strictness = Strictness.LENIENT) public class DownstreamPipelineTriggerRunListenerTest { @@ -251,8 +249,10 @@ public void test_wanted_result_with_downstream_without_job() { when(build.getResult()).thenReturn(Result.SUCCESS); when(build.getParent()).thenReturn(job); when(build.getNumber()).thenReturn(42); - when(dao.listDownstreamJobsByArtifact("pipeline", 42)).thenReturn( - Collections.singletonMap(new MavenArtifact("groupId:artifactId:jar:version"), new TreeSet<>(Collections.singleton("downstream")))); + when(dao.listDownstreamJobsByArtifact("pipeline", 42)) + .thenReturn(Collections.singletonMap( + new MavenArtifact("groupId:artifactId:jar:version"), + new TreeSet<>(Collections.singleton("downstream")))); listener.onCompleted(build, taskListener); @@ -277,8 +277,10 @@ public void test_wanted_result_with_downstream_without_artifacts() { when(build.getResult()).thenReturn(Result.SUCCESS); when(build.getParent()).thenReturn(job); when(build.getNumber()).thenReturn(42); - when(dao.listDownstreamJobsByArtifact("pipeline", 42)).thenReturn( - Collections.singletonMap(new MavenArtifact("groupId:artifactId:jar:version"), new TreeSet<>(Collections.singleton("downstream")))); + when(dao.listDownstreamJobsByArtifact("pipeline", 42)) + .thenReturn(Collections.singletonMap( + new MavenArtifact("groupId:artifactId:jar:version"), + new TreeSet<>(Collections.singleton("downstream")))); when(jenkins.getItemByFullName("downstream", Job.class)).thenReturn(downstream); listener.onCompleted(build, taskListener); @@ -312,15 +314,20 @@ public void test_wanted_result_with_downstream() { when(build.getResult()).thenReturn(Result.SUCCESS); when(build.getParent()).thenReturn(job); when(build.getNumber()).thenReturn(42); - when(dao.listDownstreamJobsByArtifact("pipeline", 42)).thenReturn( - Collections.singletonMap(new MavenArtifact("groupId:upstreamArtifactId:jar:version"), new TreeSet<>(Collections.singleton("downstream")))); + when(dao.listDownstreamJobsByArtifact("pipeline", 42)) + .thenReturn(Collections.singletonMap( + new MavenArtifact("groupId:upstreamArtifactId:jar:version"), + new TreeSet<>(Collections.singleton("downstream")))); when(jenkins.getItemByFullName("downstream", Job.class)).thenReturn(downstream); when(dao.getGeneratedArtifacts("downstream", 4242)) - .thenReturn(Collections.singletonList(new MavenArtifact("groupId:downstreamArtifactId:jar:version"))); + .thenReturn( + Collections.singletonList(new MavenArtifact("groupId:downstreamArtifactId:jar:version"))); when(dao.listDownstreamJobsByArtifact("downstream", 4242)) - .thenReturn(Collections.singletonMap(new MavenArtifact("groupId:downstreamArtifactId:jar:version"), new TreeSet<>())); + .thenReturn(Collections.singletonMap( + new MavenArtifact("groupId:downstreamArtifactId:jar:version"), new TreeSet<>())); when(service.isDownstreamVisibleByUpstreamBuildAuth(downstream)).thenReturn(true); - when(service.isUpstreamBuildVisibleByDownstreamBuildAuth(job, downstream)).thenReturn(true); + when(service.isUpstreamBuildVisibleByDownstreamBuildAuth(job, downstream)) + .thenReturn(true); listener.onCompleted(build, taskListener); @@ -347,8 +354,12 @@ public void test_wanted_result_with_multiple_downstreams() { when(queueResult.getItem()).thenReturn(queuedItem); Map> downstreamJobs = new HashMap<>(); - downstreamJobs.put(new MavenArtifact("groupId:upstreamArtifactId1:jar:version"), new TreeSet<>(Collections.singleton("downstream"))); - downstreamJobs.put(new MavenArtifact("groupId:upstreamArtifactId2:jar:version"), new TreeSet<>(Collections.singleton("downstream"))); + downstreamJobs.put( + new MavenArtifact("groupId:upstreamArtifactId1:jar:version"), + new TreeSet<>(Collections.singleton("downstream"))); + downstreamJobs.put( + new MavenArtifact("groupId:upstreamArtifactId2:jar:version"), + new TreeSet<>(Collections.singleton("downstream"))); MavenDependencyUpstreamCause cause = mock(MavenDependencyUpstreamCause.class); WorkflowJob job = mock(WorkflowJob.class); when(job.getFullName()).thenReturn("pipeline"); @@ -364,11 +375,14 @@ public void test_wanted_result_with_multiple_downstreams() { when(dao.listDownstreamJobsByArtifact("pipeline", 42)).thenReturn(downstreamJobs); when(jenkins.getItemByFullName("downstream", Job.class)).thenReturn(downstream); when(dao.getGeneratedArtifacts("downstream", 4242)) - .thenReturn(Collections.singletonList(new MavenArtifact("groupId:downstreamArtifactId:jar:version"))); + .thenReturn( + Collections.singletonList(new MavenArtifact("groupId:downstreamArtifactId:jar:version"))); when(dao.listDownstreamJobsByArtifact("downstream", 4242)) - .thenReturn(Collections.singletonMap(new MavenArtifact("groupId:downstreamArtifactId:jar:version"), new TreeSet<>())); + .thenReturn(Collections.singletonMap( + new MavenArtifact("groupId:downstreamArtifactId:jar:version"), new TreeSet<>())); when(service.isDownstreamVisibleByUpstreamBuildAuth(downstream)).thenReturn(true); - when(service.isUpstreamBuildVisibleByDownstreamBuildAuth(job, downstream)).thenReturn(true); + when(service.isUpstreamBuildVisibleByDownstreamBuildAuth(job, downstream)) + .thenReturn(true); listener.onCompleted(build, taskListener); @@ -385,5 +399,4 @@ public void test_wanted_result_with_multiple_downstreams() { verifyNoMoreInteractions(dao, service, trigger, queue); } } - } diff --git a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/publishers/DependenciesListerTest.java b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/publishers/DependenciesListerTest.java index d7478b5c..78151326 100644 --- a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/publishers/DependenciesListerTest.java +++ b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/publishers/DependenciesListerTest.java @@ -4,9 +4,7 @@ import java.io.InputStream; import java.util.List; - import javax.xml.parsers.DocumentBuilderFactory; - import org.jenkinsci.plugins.pipeline.maven.MavenDependency; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -31,9 +29,13 @@ public void listArtifactDependencies() throws Exception { List mavenArtifacts = DependenciesLister.listDependencies(doc.getDocumentElement(), null); assertThat(mavenArtifacts).hasSize(2); - assertThat(mavenArtifacts).anyMatch(dep -> "spring-test".equals(dep.getArtifactId()) - && "/path/to/spring-petclinic/spring-test/3.2.16.RELEASE/spring-test-3.2.16.RELEASE.jar".equals(dep.getFile())); - assertThat(mavenArtifacts).anyMatch(dep -> "spring-core".equals(dep.getArtifactId()) - && "/path/to/spring-petclinic/3.2.16.RELEASE/spring-core-3.2.16.RELEASE.jar".equals(dep.getFile())); + assertThat(mavenArtifacts) + .anyMatch(dep -> "spring-test".equals(dep.getArtifactId()) + && "/path/to/spring-petclinic/spring-test/3.2.16.RELEASE/spring-test-3.2.16.RELEASE.jar" + .equals(dep.getFile())); + assertThat(mavenArtifacts) + .anyMatch(dep -> "spring-core".equals(dep.getArtifactId()) + && "/path/to/spring-petclinic/3.2.16.RELEASE/spring-core-3.2.16.RELEASE.jar" + .equals(dep.getFile())); } } diff --git a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/publishers/GeneratedArtifactsReporterTest.java b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/publishers/GeneratedArtifactsReporterTest.java index e77b0dc2..6005fdd8 100644 --- a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/publishers/GeneratedArtifactsReporterTest.java +++ b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/publishers/GeneratedArtifactsReporterTest.java @@ -2,12 +2,11 @@ import static org.assertj.core.api.Assertions.assertThat; +import hudson.FilePath; import java.io.File; import java.io.InputStream; import java.util.List; - import javax.xml.parsers.DocumentBuilderFactory; - import org.jenkinsci.plugins.pipeline.maven.MavenArtifact; import org.jenkinsci.plugins.pipeline.maven.util.XmlUtils; import org.junit.jupiter.api.BeforeEach; @@ -15,8 +14,6 @@ import org.w3c.dom.Document; import org.w3c.dom.Element; -import hudson.FilePath; - /** * @author Cyrille Le Clerc */ @@ -39,20 +36,24 @@ public class GeneratedArtifactsReporterTest { public void before() throws Exception { { String mavenSpyLogsOnMacOSXPath = "org/jenkinsci/plugins/pipeline/maven/maven-spy.xml"; - InputStream inMacOSX = Thread.currentThread().getContextClassLoader().getResourceAsStream(mavenSpyLogsOnMacOSXPath); - mavenSpyLogsOnMacOSX = DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(inMacOSX); + InputStream inMacOSX = + Thread.currentThread().getContextClassLoader().getResourceAsStream(mavenSpyLogsOnMacOSXPath); + mavenSpyLogsOnMacOSX = + DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(inMacOSX); } { String mavenSpyLogsOnWindowsPath = "org/jenkinsci/plugins/pipeline/maven/maven-spy-windows.xml"; - InputStream inWindows = Thread.currentThread().getContextClassLoader().getResourceAsStream(mavenSpyLogsOnWindowsPath); - mavenSpyLogsOnWindows = DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(inWindows); - + InputStream inWindows = + Thread.currentThread().getContextClassLoader().getResourceAsStream(mavenSpyLogsOnWindowsPath); + mavenSpyLogsOnWindows = + DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(inWindows); } } @Test public void testListArtifactsMacOSX() throws Exception { - List mavenArtifacts = XmlUtils.listGeneratedArtifacts(this.mavenSpyLogsOnMacOSX.getDocumentElement(), false); + List mavenArtifacts = + XmlUtils.listGeneratedArtifacts(this.mavenSpyLogsOnMacOSX.getDocumentElement(), false); System.out.println(mavenArtifacts); assertThat(mavenArtifacts).hasSize(2); @@ -61,22 +62,28 @@ public void testListArtifactsMacOSX() throws Exception { assertThat(pomArtifact.getFile()).isEqualTo("/path/to/spring-petclinic/pom.xml"); assertThat(pomArtifact.getFileName()).isEqualTo("spring-petclinic-" + PETCLINIC_VERSION + ".pom"); - Element projectStartedElt = XmlUtils.getExecutionEvents(this.mavenSpyLogsOnMacOSX.getDocumentElement(), "ProjectStarted").get(0); - String workspace = XmlUtils.getUniqueChildElement(projectStartedElt, "project").getAttribute("baseDir"); + Element projectStartedElt = XmlUtils.getExecutionEvents( + this.mavenSpyLogsOnMacOSX.getDocumentElement(), "ProjectStarted") + .get(0); + String workspace = + XmlUtils.getUniqueChildElement(projectStartedElt, "project").getAttribute("baseDir"); - String pomPathInWorkspace = XmlUtils.getPathInWorkspace(pomArtifact.getFile(), new FilePath(new File(workspace))); + String pomPathInWorkspace = + XmlUtils.getPathInWorkspace(pomArtifact.getFile(), new FilePath(new File(workspace))); System.out.println("workspace: " + workspace); System.out.println("pomPathInWorkspace: " + pomPathInWorkspace); MavenArtifact mavenArtifact = mavenArtifacts.get(1); assertThat(mavenArtifact.getArtifactId()).isEqualTo("spring-petclinic"); - assertThat(mavenArtifact.getFile()).isEqualTo("/path/to/spring-petclinic/target/spring-petclinic-" + PETCLINIC_VERSION + ".jar"); + assertThat(mavenArtifact.getFile()) + .isEqualTo("/path/to/spring-petclinic/target/spring-petclinic-" + PETCLINIC_VERSION + ".jar"); assertThat(mavenArtifact.getFileName()).isEqualTo("spring-petclinic-" + PETCLINIC_VERSION + ".jar"); } @Test public void testListArtifactsWindows() throws Exception { - List mavenArtifacts = XmlUtils.listGeneratedArtifacts(this.mavenSpyLogsOnWindows.getDocumentElement(), false); + List mavenArtifacts = + XmlUtils.listGeneratedArtifacts(this.mavenSpyLogsOnWindows.getDocumentElement(), false); System.out.println(mavenArtifacts); assertThat(mavenArtifacts).hasSize(2); @@ -85,39 +92,48 @@ public void testListArtifactsWindows() throws Exception { assertThat(pomArtifact.getFile()).isEqualTo("C:\\path\\to\\spring-petclinic\\pom.xml"); assertThat(pomArtifact.getFileName()).isEqualTo("spring-petclinic-" + PETCLINIC_VERSION + ".pom"); - Element projectStartedElt = XmlUtils.getExecutionEvents(this.mavenSpyLogsOnWindows.getDocumentElement(), "ProjectStarted").get(0); - String workspace = XmlUtils.getUniqueChildElement(projectStartedElt, "project").getAttribute("baseDir"); + Element projectStartedElt = XmlUtils.getExecutionEvents( + this.mavenSpyLogsOnWindows.getDocumentElement(), "ProjectStarted") + .get(0); + String workspace = + XmlUtils.getUniqueChildElement(projectStartedElt, "project").getAttribute("baseDir"); - String pomPathInWorkspace = XmlUtils.getPathInWorkspace(pomArtifact.getFile(), new FilePath(new File(workspace))); + String pomPathInWorkspace = + XmlUtils.getPathInWorkspace(pomArtifact.getFile(), new FilePath(new File(workspace))); System.out.println("workspace: " + workspace); System.out.println("pomPathInWorkspace: " + pomPathInWorkspace); MavenArtifact mavenArtifact = mavenArtifacts.get(1); assertThat(mavenArtifact.getArtifactId()).isEqualTo("spring-petclinic"); - assertThat(mavenArtifact.getFile()).isEqualTo("C:\\path\\to\\spring-petclinic\\target\\spring-petclinic-" + PETCLINIC_VERSION + ".jar"); + assertThat(mavenArtifact.getFile()) + .isEqualTo("C:\\path\\to\\spring-petclinic\\target\\spring-petclinic-" + PETCLINIC_VERSION + ".jar"); assertThat(mavenArtifact.getFileName()).isEqualTo("spring-petclinic-" + PETCLINIC_VERSION + ".jar"); } @Test public void testListAttachedArtifactsMacOSX() throws Exception { - List mavenArtifacts = XmlUtils.listGeneratedArtifacts(this.mavenSpyLogsOnMacOSX.getDocumentElement(), true); + List mavenArtifacts = + XmlUtils.listGeneratedArtifacts(this.mavenSpyLogsOnMacOSX.getDocumentElement(), true); assertThat(mavenArtifacts).hasSize(3); MavenArtifact mavenArtifact = mavenArtifacts.get(2); // 1st is pom, 2nd is jar, 3rd is sources System.out.println(mavenArtifacts); assertThat(mavenArtifact.getArtifactId()).isEqualTo("spring-petclinic"); assertThat(mavenArtifact.getClassifier()).isEqualTo("sources"); - assertThat(mavenArtifact.getFile()).isEqualTo("/path/to/spring-petclinic/target/spring-petclinic-" + PETCLINIC_VERSION + "-sources.jar"); + assertThat(mavenArtifact.getFile()) + .isEqualTo("/path/to/spring-petclinic/target/spring-petclinic-" + PETCLINIC_VERSION + "-sources.jar"); } @Test public void testListAttachedArtifactsWindows() throws Exception { - List mavenArtifacts = XmlUtils.listGeneratedArtifacts(this.mavenSpyLogsOnWindows.getDocumentElement(), true); + List mavenArtifacts = + XmlUtils.listGeneratedArtifacts(this.mavenSpyLogsOnWindows.getDocumentElement(), true); assertThat(mavenArtifacts).hasSize(3); MavenArtifact mavenArtifact = mavenArtifacts.get(2); // 1st is pom, 2nd is jar, 3rd is sources System.out.println(mavenArtifacts); assertThat(mavenArtifact.getArtifactId()).isEqualTo("spring-petclinic"); assertThat(mavenArtifact.getClassifier()).isEqualTo("sources"); - assertThat(mavenArtifact.getFile()).isEqualTo("C:\\path\\to\\spring-petclinic\\target\\spring-petclinic-" + PETCLINIC_VERSION + "-sources.jar"); + assertThat(mavenArtifact.getFile()) + .isEqualTo("C:\\path\\to\\spring-petclinic\\target\\spring-petclinic-" + PETCLINIC_VERSION + + "-sources.jar"); } - } diff --git a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/publishers/InvokerRunsPublisherInternalTest.java b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/publishers/InvokerRunsPublisherInternalTest.java index 917e3fce..13eab53f 100644 --- a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/publishers/InvokerRunsPublisherInternalTest.java +++ b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/publishers/InvokerRunsPublisherInternalTest.java @@ -2,35 +2,35 @@ import static org.assertj.core.api.Assertions.assertThat; +import hudson.FilePath; +import hudson.model.TaskListener; +import hudson.util.StreamTaskListener; import java.io.File; import java.io.InputStream; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.List; - import javax.xml.parsers.DocumentBuilderFactory; - import org.jenkinsci.plugins.pipeline.maven.util.XmlUtils; import org.junit.jupiter.api.Test; import org.w3c.dom.Document; import org.w3c.dom.Element; -import hudson.FilePath; -import hudson.model.TaskListener; -import hudson.util.StreamTaskListener; - public class InvokerRunsPublisherInternalTest { @Test public void test_relative_path_and_absolute_path_and_variabilized_path_run_goal() throws Exception { String mavenSpyLogs = "org/jenkinsci/plugins/pipeline/maven/maven-spy-maven-invoker-plugin-run.xml"; - test_relative_path_and_absolute_path_and_variabilized_path_run_goal(mavenSpyLogs, InvokerRunsPublisher.RUN_GOAL); + test_relative_path_and_absolute_path_and_variabilized_path_run_goal( + mavenSpyLogs, InvokerRunsPublisher.RUN_GOAL); } @Test public void test_relative_path_and_absolute_path_and_variabilized_path_integration_test_goal() throws Exception { - String mavenSpyLogs = "org/jenkinsci/plugins/pipeline/maven/maven-spy-maven-invoker-plugin-integration-test.xml"; - test_relative_path_and_absolute_path_and_variabilized_path_run_goal(mavenSpyLogs, InvokerRunsPublisher.INTEGRATION_TEST_GOAL); + String mavenSpyLogs = + "org/jenkinsci/plugins/pipeline/maven/maven-spy-maven-invoker-plugin-integration-test.xml"; + test_relative_path_and_absolute_path_and_variabilized_path_run_goal( + mavenSpyLogs, InvokerRunsPublisher.INTEGRATION_TEST_GOAL); } /** @@ -38,12 +38,17 @@ public void test_relative_path_and_absolute_path_and_variabilized_path_integrati * /path/to/khmarbaise/maui/src/main/resources/mp-it-1/target/it -> absolute * path reportsDirectory = ${invoker.reportsDirectory} -> variabilized path */ - private void test_relative_path_and_absolute_path_and_variabilized_path_run_goal(String mavenSpyLogs, String goal) throws Exception { + private void test_relative_path_and_absolute_path_and_variabilized_path_run_goal(String mavenSpyLogs, String goal) + throws Exception { InputStream in = Thread.currentThread().getContextClassLoader().getResourceAsStream(mavenSpyLogs); Document doc = DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(in); InvokerRunsPublisher invokerRunsPublisher = new InvokerRunsPublisher(); - List invokerRunEvents = XmlUtils.getExecutionEventsByPlugin(doc.getDocumentElement(), InvokerRunsPublisher.GROUP_ID, - InvokerRunsPublisher.ARTIFACT_ID, goal, "MojoSucceeded"); + List invokerRunEvents = XmlUtils.getExecutionEventsByPlugin( + doc.getDocumentElement(), + InvokerRunsPublisher.GROUP_ID, + InvokerRunsPublisher.ARTIFACT_ID, + goal, + "MojoSucceeded"); FilePath workspace = new FilePath(new File("/path/to/khmarbaise/maui/src/main/resources/mp-it-1")); TaskListener listener = new StreamTaskListener(System.out, StandardCharsets.UTF_8); @@ -64,15 +69,14 @@ private void test_relative_path_and_absolute_path_and_variabilized_path_run_goal Element cloneProjectsToElt = XmlUtils.getUniqueChildElementOrNull(pluginElt, "cloneProjectsTo"); Element projectsDirectoryElt = XmlUtils.getUniqueChildElementOrNull(pluginElt, "projectsDirectory"); - String reportsDirectory = invokerRunsPublisher.expandAndRelativize(reportsDirectoryElt, "reportsDirectory", invokerRunSucceedEvent, projectElt, - workspace, listener); + String reportsDirectory = invokerRunsPublisher.expandAndRelativize( + reportsDirectoryElt, "reportsDirectory", invokerRunSucceedEvent, projectElt, workspace, listener); assertThat(reportsDirectory).isEqualTo("target" + File.separator + "invoker-reports"); - String projectsDirectory = invokerRunsPublisher.expandAndRelativize(projectsDirectoryElt, "projectsDirectory", invokerRunSucceedEvent, projectElt, - workspace, listener); + String projectsDirectory = invokerRunsPublisher.expandAndRelativize( + projectsDirectoryElt, "projectsDirectory", invokerRunSucceedEvent, projectElt, workspace, listener); assertThat(projectsDirectory).isEqualTo("src" + File.separator + "it"); - String cloneProjectsTo = invokerRunsPublisher.expandAndRelativize(cloneProjectsToElt, "cloneProjectsTo", invokerRunSucceedEvent, projectElt, workspace, - listener); + String cloneProjectsTo = invokerRunsPublisher.expandAndRelativize( + cloneProjectsToElt, "cloneProjectsTo", invokerRunSucceedEvent, projectElt, workspace, listener); assertThat(cloneProjectsTo).isEqualTo("target" + File.separator + "it"); } - } diff --git a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/publishers/InvokerRunsPublisherTest.java b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/publishers/InvokerRunsPublisherTest.java index 42cd77b9..bc1a8d93 100644 --- a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/publishers/InvokerRunsPublisherTest.java +++ b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/publishers/InvokerRunsPublisherTest.java @@ -2,8 +2,8 @@ import static org.assertj.core.api.Assertions.assertThat; +import hudson.model.Result; import java.util.Collection; - import org.jenkinsci.plugins.pipeline.maven.AbstractIntegrationTest; import org.jenkinsci.plugins.pipeline.maven.TestUtils; import org.jenkinsci.plugins.workflow.cps.CpsFlowDefinition; @@ -12,16 +12,16 @@ import org.junit.jupiter.api.Test; import org.jvnet.hudson.test.Issue; -import hudson.model.Result; - public class InvokerRunsPublisherTest extends AbstractIntegrationTest { @Issue("JENKINS-70561") @Test public void maven_build_maven_plugin_project_on_master_succeeds() throws Exception { - loadSourceCodeInGitRepository(this.gitRepoRule, "/org/jenkinsci/plugins/pipeline/maven/test/test_maven_projects/maven_plugin_project/"); + loadSourceCodeInGitRepository( + this.gitRepoRule, + "/org/jenkinsci/plugins/pipeline/maven/test/test_maven_projects/maven_plugin_project/"); - //@formatter:off + // @formatter:off String pipelineScript = "node() {\n" + " git($/" + gitRepoRule.toString() + "/$)\n" + " withMaven() {\n" + @@ -32,23 +32,32 @@ public void maven_build_maven_plugin_project_on_master_succeeds() throws Excepti " }\n" + " }\n" + "}"; - //@formatter:on + // @formatter:on WorkflowJob pipeline = jenkinsRule.createProject(WorkflowJob.class, "build-on-master"); pipeline.setDefinition(new CpsFlowDefinition(pipelineScript, true)); WorkflowRun build = jenkinsRule.assertBuildStatus(Result.SUCCESS, pipeline.scheduleBuild2(0)); // verify .pom is archived and fingerprinted - jenkinsRule.assertLogContains("under jenkins/mvn/test/hello-maven-plugin/1.0-SNAPSHOT/hello-maven-plugin-1.0-SNAPSHOT.pom", build); + jenkinsRule.assertLogContains( + "under jenkins/mvn/test/hello-maven-plugin/1.0-SNAPSHOT/hello-maven-plugin-1.0-SNAPSHOT.pom", build); // verify .jar is archived and fingerprinted - jenkinsRule.assertLogContains("under jenkins/mvn/test/hello-maven-plugin/1.0-SNAPSHOT/hello-maven-plugin-1.0-SNAPSHOT.jar", build); + jenkinsRule.assertLogContains( + "under jenkins/mvn/test/hello-maven-plugin/1.0-SNAPSHOT/hello-maven-plugin-1.0-SNAPSHOT.jar", build); Collection artifactsFileNames = TestUtils.artifactsToArtifactsFileNames(build.getArtifacts()); - assertThat(artifactsFileNames).contains("hello-maven-plugin-1.0-SNAPSHOT.pom", "hello-maven-plugin-1.0-SNAPSHOT.jar"); - - verifyFileIsFingerPrinted(pipeline, build, "jenkins/mvn/test/hello-maven-plugin/1.0-SNAPSHOT/hello-maven-plugin-1.0-SNAPSHOT.jar"); - verifyFileIsFingerPrinted(pipeline, build, "jenkins/mvn/test/hello-maven-plugin/1.0-SNAPSHOT/hello-maven-plugin-1.0-SNAPSHOT.pom"); + assertThat(artifactsFileNames) + .contains("hello-maven-plugin-1.0-SNAPSHOT.pom", "hello-maven-plugin-1.0-SNAPSHOT.jar"); + + verifyFileIsFingerPrinted( + pipeline, + build, + "jenkins/mvn/test/hello-maven-plugin/1.0-SNAPSHOT/hello-maven-plugin-1.0-SNAPSHOT.jar"); + verifyFileIsFingerPrinted( + pipeline, + build, + "jenkins/mvn/test/hello-maven-plugin/1.0-SNAPSHOT/hello-maven-plugin-1.0-SNAPSHOT.pom"); // verify Invoker Archiver is called jenkinsRule.assertLogContains( @@ -59,10 +68,11 @@ public void maven_build_maven_plugin_project_on_master_succeeds() throws Excepti @Issue("JENKINS-70561") @Test public void maven_build_maven_plugin_project_with_invoker_as_junit_on_master_succeeds() throws Exception { - loadSourceCodeInGitRepository(this.gitRepoRule, + loadSourceCodeInGitRepository( + this.gitRepoRule, "/org/jenkinsci/plugins/pipeline/maven/test/test_maven_projects/maven_plugin_project_with_invoker_as_junit/"); - //@formatter:off + // @formatter:off String pipelineScript = "node() {\n" + " git($/" + gitRepoRule.toString() + "/$)\n" + " withMaven() {\n" + @@ -73,28 +83,36 @@ public void maven_build_maven_plugin_project_with_invoker_as_junit_on_master_suc " }\n" + " }\n" + "}"; - //@formatter:on + // @formatter:on WorkflowJob pipeline = jenkinsRule.createProject(WorkflowJob.class, "build-on-master"); pipeline.setDefinition(new CpsFlowDefinition(pipelineScript, true)); WorkflowRun build = jenkinsRule.assertBuildStatus(Result.SUCCESS, pipeline.scheduleBuild2(0)); // verify .pom is archived and fingerprinted - jenkinsRule.assertLogContains("under jenkins/mvn/test/hello-maven-plugin/1.0-SNAPSHOT/hello-maven-plugin-1.0-SNAPSHOT.pom", build); + jenkinsRule.assertLogContains( + "under jenkins/mvn/test/hello-maven-plugin/1.0-SNAPSHOT/hello-maven-plugin-1.0-SNAPSHOT.pom", build); // verify .jar is archived and fingerprinted - jenkinsRule.assertLogContains("under jenkins/mvn/test/hello-maven-plugin/1.0-SNAPSHOT/hello-maven-plugin-1.0-SNAPSHOT.jar", build); + jenkinsRule.assertLogContains( + "under jenkins/mvn/test/hello-maven-plugin/1.0-SNAPSHOT/hello-maven-plugin-1.0-SNAPSHOT.jar", build); Collection artifactsFileNames = TestUtils.artifactsToArtifactsFileNames(build.getArtifacts()); - assertThat(artifactsFileNames).contains("hello-maven-plugin-1.0-SNAPSHOT.pom", "hello-maven-plugin-1.0-SNAPSHOT.jar"); - - verifyFileIsFingerPrinted(pipeline, build, "jenkins/mvn/test/hello-maven-plugin/1.0-SNAPSHOT/hello-maven-plugin-1.0-SNAPSHOT.jar"); - verifyFileIsFingerPrinted(pipeline, build, "jenkins/mvn/test/hello-maven-plugin/1.0-SNAPSHOT/hello-maven-plugin-1.0-SNAPSHOT.pom"); + assertThat(artifactsFileNames) + .contains("hello-maven-plugin-1.0-SNAPSHOT.pom", "hello-maven-plugin-1.0-SNAPSHOT.jar"); + + verifyFileIsFingerPrinted( + pipeline, + build, + "jenkins/mvn/test/hello-maven-plugin/1.0-SNAPSHOT/hello-maven-plugin-1.0-SNAPSHOT.jar"); + verifyFileIsFingerPrinted( + pipeline, + build, + "jenkins/mvn/test/hello-maven-plugin/1.0-SNAPSHOT/hello-maven-plugin-1.0-SNAPSHOT.pom"); // verify Invoker Archiver is called jenkinsRule.assertLogContains( "[withMaven] invokerPublisher - Archive test results for Maven artifact jenkins.mvn.test:hello-maven-plugin:maven-plugin:1.0-SNAPSHOT generated by", build); } - } diff --git a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/publishers/JacocoReportPublisherTest.java b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/publishers/JacocoReportPublisherTest.java index 7d3f9d2f..24ac7551 100644 --- a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/publishers/JacocoReportPublisherTest.java +++ b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/publishers/JacocoReportPublisherTest.java @@ -2,9 +2,11 @@ import static org.assertj.core.api.Assertions.assertThat; +import hudson.model.Result; +import hudson.plugins.jacoco.JacocoBuildAction; +import hudson.tasks.junit.TestResultAction; import java.util.Collection; import java.util.List; - import org.jenkinsci.plugins.pipeline.maven.AbstractIntegrationTest; import org.jenkinsci.plugins.pipeline.maven.TestUtils; import org.jenkinsci.plugins.workflow.cps.CpsFlowDefinition; @@ -12,17 +14,15 @@ import org.jenkinsci.plugins.workflow.job.WorkflowRun; import org.junit.jupiter.api.Test; -import hudson.model.Result; -import hudson.plugins.jacoco.JacocoBuildAction; -import hudson.tasks.junit.TestResultAction; - public class JacocoReportPublisherTest extends AbstractIntegrationTest { @Test public void maven_build_jar_with_jacoco_succeeds() throws Exception { - loadSourceCodeInGitRepository(this.gitRepoRule, "/org/jenkinsci/plugins/pipeline/maven/test/test_maven_projects/maven_jar_with_jacoco_project/"); + loadSourceCodeInGitRepository( + this.gitRepoRule, + "/org/jenkinsci/plugins/pipeline/maven/test/test_maven_projects/maven_jar_with_jacoco_project/"); - //@formatter:off + // @formatter:off String pipelineScript = "node() {\n" + " git($/" + gitRepoRule.toString() + "/$)\n" + " withMaven() {\n" + @@ -33,7 +33,7 @@ public void maven_build_jar_with_jacoco_succeeds() throws Exception { " }\n" + " }\n" + "}"; - //@formatter:on + // @formatter:on WorkflowJob pipeline = jenkinsRule.createProject(WorkflowJob.class, "jar-with-jacoco"); pipeline.setDefinition(new CpsFlowDefinition(pipelineScript, true)); @@ -42,8 +42,10 @@ public void maven_build_jar_with_jacoco_succeeds() throws Exception { Collection artifactsFileNames = TestUtils.artifactsToArtifactsFileNames(build.getArtifacts()); assertThat(artifactsFileNames).contains("jar-with-jacoco-0.1-SNAPSHOT.pom", "jar-with-jacoco-0.1-SNAPSHOT.jar"); - verifyFileIsFingerPrinted(pipeline, build, "jenkins/mvn/test/jar-with-jacoco/0.1-SNAPSHOT/jar-with-jacoco-0.1-SNAPSHOT.jar"); - verifyFileIsFingerPrinted(pipeline, build, "jenkins/mvn/test/jar-with-jacoco/0.1-SNAPSHOT/jar-with-jacoco-0.1-SNAPSHOT.pom"); + verifyFileIsFingerPrinted( + pipeline, build, "jenkins/mvn/test/jar-with-jacoco/0.1-SNAPSHOT/jar-with-jacoco-0.1-SNAPSHOT.jar"); + verifyFileIsFingerPrinted( + pipeline, build, "jenkins/mvn/test/jar-with-jacoco/0.1-SNAPSHOT/jar-with-jacoco-0.1-SNAPSHOT.pom"); List testResultActions = build.getActions(TestResultAction.class); assertThat(testResultActions).hasSize(1); @@ -56,5 +58,4 @@ public void maven_build_jar_with_jacoco_succeeds() throws Exception { JacocoBuildAction jacocoBuildAction = jacocoBuildActions.get(0); assertThat(jacocoBuildAction.getProjectActions()).hasSize(1); } - } diff --git a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/publishers/JunitTestsPublisherTest.java b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/publishers/JunitTestsPublisherTest.java index cc1d4b2b..ba91dc79 100644 --- a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/publishers/JunitTestsPublisherTest.java +++ b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/publishers/JunitTestsPublisherTest.java @@ -6,50 +6,50 @@ import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; +import com.google.jenkins.flakyTestHandler.junit.FlakyTestResult; +import com.google.jenkins.flakyTestHandler.plugin.FlakyTestResultCollector; +import hudson.EnvVars; +import hudson.FilePath; +import hudson.Launcher; +import hudson.model.Run; +import hudson.remoting.Channel; +import hudson.tasks.junit.TestResultAction; import java.io.File; import java.io.InputStream; import java.util.Calendar; - import javax.xml.parsers.DocumentBuilderFactory; - import org.jenkinsci.plugins.workflow.graph.FlowNode; import org.jenkinsci.plugins.workflow.steps.StepContext; import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import org.w3c.dom.Document; -import com.google.jenkins.flakyTestHandler.junit.FlakyTestResult; -import com.google.jenkins.flakyTestHandler.plugin.FlakyTestResultCollector; - -import hudson.EnvVars; -import hudson.FilePath; -import hudson.Launcher; -import hudson.model.Run; -import hudson.remoting.Channel; -import hudson.tasks.junit.TestResultAction; - /* java.lang.IllegalStateException: Expected 1 instance of io.jenkins.plugins.junit.storage.JunitTestResultStorageConfiguration but got 0 - at hudson.ExtensionList.lookupSingleton(ExtensionList.java:451) - at io.jenkins.plugins.junit.storage.JunitTestResultStorageConfiguration.get(JunitTestResultStorageConfiguration.java:44) - at io.jenkins.plugins.junit.storage.JunitTestResultStorage.find(JunitTestResultStorage.java:62) - at hudson.tasks.junit.TestResultAction.(TestResultAction.java:89) - at hudson.tasks.junit.JUnitResultArchiver.parseAndAttach(JUnitResultArchiver.java:188) - at org.jenkinsci.plugins.pipeline.maven.publishers.JunitTestsPublisher.executeReporter(JunitTestsPublisher.java:329) - at org.jenkinsci.plugins.pipeline.maven.publishers.JunitTestsPublisher.process(JunitTestsPublisher.java:215) - at org.jenkinsci.plugins.pipeline.maven.publishers.JunitTestsPublisherTest.test_surefire_plugin(JunitTestsPublisherTest.java:52) + at hudson.ExtensionList.lookupSingleton(ExtensionList.java:451) + at io.jenkins.plugins.junit.storage.JunitTestResultStorageConfiguration.get(JunitTestResultStorageConfiguration.java:44) + at io.jenkins.plugins.junit.storage.JunitTestResultStorage.find(JunitTestResultStorage.java:62) + at hudson.tasks.junit.TestResultAction.(TestResultAction.java:89) + at hudson.tasks.junit.JUnitResultArchiver.parseAndAttach(JUnitResultArchiver.java:188) + at org.jenkinsci.plugins.pipeline.maven.publishers.JunitTestsPublisher.executeReporter(JunitTestsPublisher.java:329) + at org.jenkinsci.plugins.pipeline.maven.publishers.JunitTestsPublisher.process(JunitTestsPublisher.java:215) + at org.jenkinsci.plugins.pipeline.maven.publishers.JunitTestsPublisherTest.test_surefire_plugin(JunitTestsPublisherTest.java:52) */ -@Disabled("TODO adapt to https://github.com/jenkinsci/junit-plugin/pull/155 (mock JunitTestResultStorageConfiguration.get) or stop using mock frameworks") +@Disabled( + "TODO adapt to https://github.com/jenkinsci/junit-plugin/pull/155 (mock JunitTestResultStorageConfiguration.get) or stop using mock frameworks") public class JunitTestsPublisherTest { @Test public void test_surefire_plugin() throws Exception { - final InputStream in = Thread.currentThread().getContextClassLoader() + final InputStream in = Thread.currentThread() + .getContextClassLoader() .getResourceAsStream("org/jenkinsci/plugins/pipeline/maven/maven-spy-maven-surefire-plugin.xml"); - final Document doc = DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(in); + final Document doc = + DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(in); final Calendar calendar = Calendar.getInstance(); - calendar.setTimeInMillis( - new File("src/test/resources/org/jenkinsci/plugins/pipeline/maven/surefire-reports/TEST-some.groupid.AnArtifactTest.xml").lastModified()); + calendar.setTimeInMillis(new File( + "src/test/resources/org/jenkinsci/plugins/pipeline/maven/surefire-reports/TEST-some.groupid.AnArtifactTest.xml") + .lastModified()); final StepContext context = mock(StepContext.class); final Run run = mock(Run.class); final Launcher launcher = mock(Launcher.class); @@ -64,7 +64,8 @@ public void test_surefire_plugin() throws Exception { when(run.getEnvironment(any())).thenReturn(envvars); when(run.getTimestamp()).thenReturn(calendar); when(run.getRootDir()).thenReturn(new File(getProperty("java.io.tmpdir"))); - when(envvars.expand(any())).thenReturn("src/test/resources/org/jenkinsci/plugins/pipeline/maven/surefire-reports/*.xml"); + when(envvars.expand(any())) + .thenReturn("src/test/resources/org/jenkinsci/plugins/pipeline/maven/surefire-reports/*.xml"); when(launcher.getChannel()).thenReturn(channel); when(channel.call(any(FlakyTestResultCollector.class))).thenReturn(new FlakyTestResult()); @@ -75,12 +76,15 @@ public void test_surefire_plugin() throws Exception { @Test public void test_failsafe_plugin() throws Exception { - final InputStream in = Thread.currentThread().getContextClassLoader() + final InputStream in = Thread.currentThread() + .getContextClassLoader() .getResourceAsStream("org/jenkinsci/plugins/pipeline/maven/maven-spy-maven-failsafe-plugin.xml"); - final Document doc = DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(in); + final Document doc = + DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(in); final Calendar calendar = Calendar.getInstance(); - calendar.setTimeInMillis( - new File("src/test/resources/org/jenkinsci/plugins/pipeline/maven/failsafe-reports/TEST-some.groupid.AnArtifactTest.xml").lastModified()); + calendar.setTimeInMillis(new File( + "src/test/resources/org/jenkinsci/plugins/pipeline/maven/failsafe-reports/TEST-some.groupid.AnArtifactTest.xml") + .lastModified()); final StepContext context = mock(StepContext.class); final Run run = mock(Run.class); final Launcher launcher = mock(Launcher.class); @@ -95,7 +99,8 @@ public void test_failsafe_plugin() throws Exception { when(run.getEnvironment(any())).thenReturn(envvars); when(run.getTimestamp()).thenReturn(calendar); when(run.getRootDir()).thenReturn(new File(getProperty("java.io.tmpdir"))); - when(envvars.expand(any())).thenReturn("src/test/resources/org/jenkinsci/plugins/pipeline/maven/failsafe-reports/*.xml"); + when(envvars.expand(any())) + .thenReturn("src/test/resources/org/jenkinsci/plugins/pipeline/maven/failsafe-reports/*.xml"); when(launcher.getChannel()).thenReturn(channel); when(channel.call(any(FlakyTestResultCollector.class))).thenReturn(new FlakyTestResult()); @@ -106,11 +111,15 @@ public void test_failsafe_plugin() throws Exception { @Test public void test_karma_plugin() throws Exception { - final InputStream in = Thread.currentThread().getContextClassLoader() + final InputStream in = Thread.currentThread() + .getContextClassLoader() .getResourceAsStream("org/jenkinsci/plugins/pipeline/maven/maven-spy-maven-karma-plugin.xml"); - final Document doc = DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(in); + final Document doc = + DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(in); final Calendar calendar = Calendar.getInstance(); - calendar.setTimeInMillis(new File("src/test/resources/org/jenkinsci/plugins/pipeline/maven/karma-reports/TEST-karma.xml").lastModified()); + calendar.setTimeInMillis( + new File("src/test/resources/org/jenkinsci/plugins/pipeline/maven/karma-reports/TEST-karma.xml") + .lastModified()); final StepContext context = mock(StepContext.class); final Run run = mock(Run.class); final Launcher launcher = mock(Launcher.class); @@ -125,7 +134,8 @@ public void test_karma_plugin() throws Exception { when(run.getEnvironment(any())).thenReturn(envvars); when(run.getTimestamp()).thenReturn(calendar); when(run.getRootDir()).thenReturn(new File(getProperty("java.io.tmpdir"))); - when(envvars.expand(any())).thenReturn("src/test/resources/org/jenkinsci/plugins/pipeline/maven/karma-reports/*.xml"); + when(envvars.expand(any())) + .thenReturn("src/test/resources/org/jenkinsci/plugins/pipeline/maven/karma-reports/*.xml"); when(launcher.getChannel()).thenReturn(channel); when(channel.call(any(FlakyTestResultCollector.class))).thenReturn(new FlakyTestResult()); @@ -136,11 +146,15 @@ public void test_karma_plugin() throws Exception { @Test public void test_frontend_plugin() throws Exception { - final InputStream in = Thread.currentThread().getContextClassLoader() + final InputStream in = Thread.currentThread() + .getContextClassLoader() .getResourceAsStream("org/jenkinsci/plugins/pipeline/maven/maven-spy-maven-frontend-plugin.xml"); - final Document doc = DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(in); + final Document doc = + DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(in); final Calendar calendar = Calendar.getInstance(); - calendar.setTimeInMillis(new File("src/test/resources/org/jenkinsci/plugins/pipeline/maven/karma-reports/TEST-karma.xml").lastModified()); + calendar.setTimeInMillis( + new File("src/test/resources/org/jenkinsci/plugins/pipeline/maven/karma-reports/TEST-karma.xml") + .lastModified()); final StepContext context = mock(StepContext.class); final Run run = mock(Run.class); final Launcher launcher = mock(Launcher.class); @@ -155,7 +169,8 @@ public void test_frontend_plugin() throws Exception { when(run.getEnvironment(any())).thenReturn(envvars); when(run.getTimestamp()).thenReturn(calendar); when(run.getRootDir()).thenReturn(new File(getProperty("java.io.tmpdir"))); - when(envvars.expand(any())).thenReturn("src/test/resources/org/jenkinsci/plugins/pipeline/maven/karma-reports/*.xml"); + when(envvars.expand(any())) + .thenReturn("src/test/resources/org/jenkinsci/plugins/pipeline/maven/karma-reports/*.xml"); when(launcher.getChannel()).thenReturn(channel); when(channel.call(any(FlakyTestResultCollector.class))).thenReturn(new FlakyTestResult()); diff --git a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/trigger/WorkflowJobDependencyTriggerTest.java b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/trigger/WorkflowJobDependencyTriggerTest.java index e8980d58..5919422a 100644 --- a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/trigger/WorkflowJobDependencyTriggerTest.java +++ b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/trigger/WorkflowJobDependencyTriggerTest.java @@ -2,10 +2,10 @@ import static org.assertj.core.api.Assertions.assertThat; +import hudson.triggers.Trigger; import java.io.File; import java.io.IOException; import java.util.List; - import org.jenkinsci.plugins.workflow.cps.CpsFlowDefinition; import org.jenkinsci.plugins.workflow.job.WorkflowJob; import org.jenkinsci.plugins.workflow.job.WorkflowRun; @@ -21,8 +21,6 @@ import org.jvnet.hudson.test.TemporaryDirectoryAllocator; import org.jvnet.hudson.test.junit.jupiter.WithJenkins; -import hudson.triggers.Trigger; - @WithJenkins @TestMethodOrder(OrderAnnotation.class) public class WorkflowJobDependencyTriggerTest { @@ -48,13 +46,14 @@ public void configureJenkins(JenkinsRule r) throws Throwable { @Order(1) public void jobConfiguration() throws Exception { WorkflowJob p = rule.jenkins.createProject(WorkflowJob.class, "p"); - //@formatter:off + // @formatter:off p.setDefinition(new CpsFlowDefinition( "node {\n" + "semaphore 'config'\n" + "properties([ pipelineTriggers([ snapshotDependencies() ]) ])\n" + - "}", true)); - //@formatter:on + "}", + true)); + // @formatter:on WorkflowRun b = p.scheduleBuild2(0).waitForStart(); SemaphoreStep.waitForStart("config/1", b); } @@ -69,7 +68,8 @@ public void jobPersisted() throws Exception { SemaphoreStep.success("config/1", null); rule.waitForCompletion(b); rule.assertBuildStatusSuccess(b); - final List> triggers = p.getProperty(PipelineTriggersJobProperty.class).getTriggers(); + final List> triggers = + p.getProperty(PipelineTriggersJobProperty.class).getTriggers(); assertThat(triggers).hasSize(1); assertThat(triggers.get(0)).isExactlyInstanceOf(WorkflowJobDependencyTrigger.class); } diff --git a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/util/FakeCredentialsProvider.java b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/util/FakeCredentialsProvider.java index dbec5380..c71faf4c 100644 --- a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/util/FakeCredentialsProvider.java +++ b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/util/FakeCredentialsProvider.java @@ -3,16 +3,13 @@ import static com.cloudbees.plugins.credentials.CredentialsScope.GLOBAL; import static java.util.Arrays.asList; -import java.util.List; - -import org.acegisecurity.Authentication; - import com.cloudbees.plugins.credentials.Credentials; import com.cloudbees.plugins.credentials.CredentialsProvider; import com.cloudbees.plugins.credentials.domains.DomainRequirement; import com.cloudbees.plugins.credentials.impl.UsernamePasswordCredentialsImpl; - import hudson.model.ItemGroup; +import java.util.List; +import org.acegisecurity.Authentication; public class FakeCredentialsProvider extends CredentialsProvider { @@ -34,7 +31,10 @@ public boolean isEnabled(Object context) { } @Override - public List getCredentials(Class type, ItemGroup itemGroup, Authentication authentication, + public List getCredentials( + Class type, + ItemGroup itemGroup, + Authentication authentication, List domainRequirements) { UsernamePasswordCredentialsImpl creds = new UsernamePasswordCredentialsImpl(GLOBAL, id, "", username, password); creds.setUsernameSecret(usernameIsSecret); @@ -42,8 +42,8 @@ public List getCredentials(Class type, ItemGroup i } @Override - public List getCredentials(Class type, ItemGroup itemGroup, Authentication authentication) { + public List getCredentials( + Class type, ItemGroup itemGroup, Authentication authentication) { return getCredentials(type, itemGroup, authentication, null); } - } diff --git a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/util/FileUtilsTest.java b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/util/FileUtilsTest.java index d0390dcc..0e3ad48c 100644 --- a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/util/FileUtilsTest.java +++ b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/util/FileUtilsTest.java @@ -31,6 +31,7 @@ public void test_isAbsolutePath_with_linux_relative_path() { @Test public void test_isAbsolutePath_with_windows_unc_absolute_path() { - assertThat(FileUtils.isAbsolutePath("\\\\myserver\\jenkins\\workspace\\")).isTrue(); + assertThat(FileUtils.isAbsolutePath("\\\\myserver\\jenkins\\workspace\\")) + .isTrue(); } } diff --git a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/util/WorkflowMultibranchProjectTestsUtils.java b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/util/WorkflowMultibranchProjectTestsUtils.java index cda04f71..907de942 100644 --- a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/util/WorkflowMultibranchProjectTestsUtils.java +++ b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/util/WorkflowMultibranchProjectTestsUtils.java @@ -2,12 +2,10 @@ import static org.assertj.core.api.Assertions.fail; -import org.jenkinsci.plugins.workflow.job.WorkflowJob; -import org.jenkinsci.plugins.workflow.multibranch.WorkflowMultiBranchProject; - import com.cloudbees.hudson.plugins.folder.computed.FolderComputation; - import edu.umd.cs.findbugs.annotations.NonNull; +import org.jenkinsci.plugins.workflow.job.WorkflowJob; +import org.jenkinsci.plugins.workflow.multibranch.WorkflowMultiBranchProject; /** * @author Cyrille Le Clerc @@ -18,7 +16,8 @@ public class WorkflowMultibranchProjectTestsUtils { * @see org.jenkinsci.plugins.workflow.multibranch.WorkflowMultiBranchProjectTest#scheduleAndFindBranchProject */ @NonNull - public static WorkflowJob scheduleAndFindBranchProject(@NonNull WorkflowMultiBranchProject mp, @NonNull String name) throws Exception { + public static WorkflowJob scheduleAndFindBranchProject(@NonNull WorkflowMultiBranchProject mp, @NonNull String name) + throws Exception { mp.scheduleBuild2(0).getFuture().get(); return findBranchProject(mp, name); } @@ -27,7 +26,8 @@ public static WorkflowJob scheduleAndFindBranchProject(@NonNull WorkflowMultiBra * @see org.jenkinsci.plugins.workflow.multibranch.WorkflowMultiBranchProjectTest#findBranchProject */ @NonNull - public static WorkflowJob findBranchProject(@NonNull WorkflowMultiBranchProject mp, @NonNull String name) throws Exception { + public static WorkflowJob findBranchProject(@NonNull WorkflowMultiBranchProject mp, @NonNull String name) + throws Exception { WorkflowJob p = mp.getItem(name); showIndexing(mp); if (p == null) { diff --git a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/util/XmlUtilsTest.java b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/util/XmlUtilsTest.java index 4089205e..b15872da 100644 --- a/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/util/XmlUtilsTest.java +++ b/pipeline-maven/src/test/java/org/jenkinsci/plugins/pipeline/maven/util/XmlUtilsTest.java @@ -4,16 +4,16 @@ import static org.assertj.core.api.Assertions.catchThrowable; import static org.assertj.core.api.Assertions.fail; +import hudson.FilePath; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.StringReader; import java.util.Arrays; import java.util.List; - import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; - +import junit.framework.AssertionFailedError; import org.jenkinsci.plugins.pipeline.maven.MavenArtifact; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -22,9 +22,6 @@ import org.xml.sax.InputSource; import org.xml.sax.SAXException; -import hudson.FilePath; -import junit.framework.AssertionFailedError; - /** * @author Cyrille Le Clerc */ @@ -80,7 +77,8 @@ public void test_getExecutionEvents_search_one_type() throws Exception { public void test_getExecutionEvents_search_two_types() throws Exception { String xml = "" + "" + ""; Element documentElement = toXml(xml); - List actualElements = XmlUtils.getExecutionEvents(documentElement, "ProjectSucceeded", "ProjectFailed"); + List actualElements = + XmlUtils.getExecutionEvents(documentElement, "ProjectSucceeded", "ProjectFailed"); assertThat(actualElements.size()).isEqualTo(1); } @@ -96,7 +94,8 @@ public void test_getExecutionEvents_return_empty_searching_one_type() throws Exc public void test_getExecutionEvents_return_empty_searching_two_types() throws Exception { String xml = "" + "" + ""; Element documentElement = toXml(xml); - List actualElements = XmlUtils.getExecutionEvents(documentElement, "ProjectSucceeded", "ProjectFailed"); + List actualElements = + XmlUtils.getExecutionEvents(documentElement, "ProjectSucceeded", "ProjectFailed"); assertThat(actualElements.size()).isEqualTo(0); } @@ -162,8 +161,10 @@ public void test_getPathInWorkspace_windows_with_mixed_separators_ok() { @Test public void test_getPathInWorkspace_windows_mixed_case_ok_JENKINS_45221() { // lowercase versus uppercase "d:\" - String workspace = "d:\\jenkins\\workspace\\d.admin_feature_Jenkinsfile-SCSMHLROYAGBAWY5ZNNG6ALR77MVLEH3F3EFF3O7XN3RO5BL6AMA"; - String absolutePath = "D:\\jenkins\\workspace\\d.admin_feature_Jenkinsfile-SCSMHLROYAGBAWY5ZNNG6ALR77MVLEH3F3EFF3O7XN3RO5BL6AMA\\admin\\xyz\\target\\pad-admin-xyz-2.4.0-SNAPSHOT-tests.jar"; + String workspace = + "d:\\jenkins\\workspace\\d.admin_feature_Jenkinsfile-SCSMHLROYAGBAWY5ZNNG6ALR77MVLEH3F3EFF3O7XN3RO5BL6AMA"; + String absolutePath = + "D:\\jenkins\\workspace\\d.admin_feature_Jenkinsfile-SCSMHLROYAGBAWY5ZNNG6ALR77MVLEH3F3EFF3O7XN3RO5BL6AMA\\admin\\xyz\\target\\pad-admin-xyz-2.4.0-SNAPSHOT-tests.jar"; String actual = XmlUtils.getPathInWorkspace(absolutePath, new FilePath(new File(workspace))); String expected = "admin\\xyz\\target\\pad-admin-xyz-2.4.0-SNAPSHOT-tests.jar"; assertThat(actual).isEqualTo(expected); @@ -227,8 +228,10 @@ public void test_getPathInWorkspace_macosx_edge_case() { // relatively to // '/var/folders/lq/50t8n2nx7l316pwm8gc_2rt40000gn/T/jenkinsTests.tmp/jenkins3845105900446934883test/workspace/build-on-master-with-tool-provided-maven' - String workspace = "/var/folders/lq/50t8n2nx7l316pwm8gc_2rt40000gn/T/jenkinsTests.tmp/jenkins3845105900446934883test/workspace/build-on-master-with-tool-provided-maven"; - String absolutePath = "/private/var/folders/lq/50t8n2nx7l316pwm8gc_2rt40000gn/T/jenkinsTests.tmp/jenkins3845105900446934883test/workspace/build-on-master-with-tool-provided-maven/pom.xml"; + String workspace = + "/var/folders/lq/50t8n2nx7l316pwm8gc_2rt40000gn/T/jenkinsTests.tmp/jenkins3845105900446934883test/workspace/build-on-master-with-tool-provided-maven"; + String absolutePath = + "/private/var/folders/lq/50t8n2nx7l316pwm8gc_2rt40000gn/T/jenkinsTests.tmp/jenkins3845105900446934883test/workspace/build-on-master-with-tool-provided-maven/pom.xml"; String actual = XmlUtils.getPathInWorkspace(absolutePath, new FilePath(new File(workspace))); String expected = "pom.xml"; assertThat(actual).isEqualTo(expected); @@ -236,35 +239,58 @@ public void test_getPathInWorkspace_macosx_edge_case() { @Test public void test_getExecutedLifecyclePhases() throws Exception { - InputStream in = Thread.currentThread().getContextClassLoader().getResourceAsStream("org/jenkinsci/plugins/pipeline/maven/maven-spy-package-jar.xml"); + InputStream in = Thread.currentThread() + .getContextClassLoader() + .getResourceAsStream("org/jenkinsci/plugins/pipeline/maven/maven-spy-package-jar.xml"); in.getClass(); // check non null - Element mavenSpyLogs = DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(in).getDocumentElement(); + Element mavenSpyLogs = DocumentBuilderFactory.newInstance() + .newDocumentBuilder() + .parse(in) + .getDocumentElement(); List executedLifecyclePhases = XmlUtils.getExecutedLifecyclePhases(mavenSpyLogs); System.out.println(executedLifecyclePhases); - assertThat(executedLifecyclePhases).contains("process-resources", "compile", "process-test-resources", "test-compile", "test", "package"); + assertThat(executedLifecyclePhases) + .contains("process-resources", "compile", "process-test-resources", "test-compile", "test", "package"); } @Test public void test_getArtifactDeployedEvent() throws Exception { - InputStream in = Thread.currentThread().getContextClassLoader().getResourceAsStream("org/jenkinsci/plugins/pipeline/maven/maven-spy-deploy-jar.xml"); + InputStream in = Thread.currentThread() + .getContextClassLoader() + .getResourceAsStream("org/jenkinsci/plugins/pipeline/maven/maven-spy-deploy-jar.xml"); in.getClass(); // check non null - Element mavenSpyLogs = DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(in).getDocumentElement(); + Element mavenSpyLogs = DocumentBuilderFactory.newInstance() + .newDocumentBuilder() + .parse(in) + .getDocumentElement(); List artifactDeployedEvents = XmlUtils.getArtifactDeployedEvents(mavenSpyLogs); assertThat(artifactDeployedEvents.size()).isEqualTo(3); - Element artifactDeployedEvent = XmlUtils.getArtifactDeployedEvent(artifactDeployedEvents, "/path/to/my-jar/target/my-jar-0.5-SNAPSHOT.jar"); - String repositoryUrl = XmlUtils.getUniqueChildElement(artifactDeployedEvent, "repository").getAttribute("url"); + Element artifactDeployedEvent = XmlUtils.getArtifactDeployedEvent( + artifactDeployedEvents, "/path/to/my-jar/target/my-jar-0.5-SNAPSHOT.jar"); + String repositoryUrl = XmlUtils.getUniqueChildElement(artifactDeployedEvent, "repository") + .getAttribute("url"); assertThat(repositoryUrl).isEqualTo("https://nexus.beescloud.com/content/repositories/snapshots/"); } @Test public void test_getExecutionEventsByPlugin() throws Exception { - InputStream in = Thread.currentThread().getContextClassLoader().getResourceAsStream("org/jenkinsci/plugins/pipeline/maven/maven-spy-deploy-jar.xml"); + InputStream in = Thread.currentThread() + .getContextClassLoader() + .getResourceAsStream("org/jenkinsci/plugins/pipeline/maven/maven-spy-deploy-jar.xml"); in.getClass(); // check non null - Element mavenSpyLogs = DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(in).getDocumentElement(); - - List executionEvents = XmlUtils.getExecutionEventsByPlugin(mavenSpyLogs, "org.apache.maven.plugins", "maven-deploy-plugin", "deploy", - "MojoSucceeded", "MojoFailed"); + Element mavenSpyLogs = DocumentBuilderFactory.newInstance() + .newDocumentBuilder() + .parse(in) + .getDocumentElement(); + + List executionEvents = XmlUtils.getExecutionEventsByPlugin( + mavenSpyLogs, + "org.apache.maven.plugins", + "maven-deploy-plugin", + "deploy", + "MojoSucceeded", + "MojoFailed"); assertThat(executionEvents.size()).isEqualTo(1); Element deployExecutionEvent = executionEvents.get(0); @@ -273,9 +299,14 @@ public void test_getExecutionEventsByPlugin() throws Exception { @Test public void test_listGeneratedArtifacts() throws Exception { - InputStream in = Thread.currentThread().getContextClassLoader().getResourceAsStream("org/jenkinsci/plugins/pipeline/maven/maven-spy-deploy-jar.xml"); + InputStream in = Thread.currentThread() + .getContextClassLoader() + .getResourceAsStream("org/jenkinsci/plugins/pipeline/maven/maven-spy-deploy-jar.xml"); in.getClass(); // check non null - Element mavenSpyLogs = DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(in).getDocumentElement(); + Element mavenSpyLogs = DocumentBuilderFactory.newInstance() + .newDocumentBuilder() + .parse(in) + .getDocumentElement(); List generatedArtifacts = XmlUtils.listGeneratedArtifacts(mavenSpyLogs, false); System.out.println(generatedArtifacts); assertThat(generatedArtifacts.size()).isEqualTo(2); // a jar file and a pom file are generated @@ -297,9 +328,14 @@ public void test_listGeneratedArtifacts() throws Exception { @Test public void test_listGeneratedArtifacts_deploy_2_8() throws Exception { - InputStream in = Thread.currentThread().getContextClassLoader().getResourceAsStream("org/jenkinsci/plugins/pipeline/maven/maven-spy-deploy-2.8.xml"); + InputStream in = Thread.currentThread() + .getContextClassLoader() + .getResourceAsStream("org/jenkinsci/plugins/pipeline/maven/maven-spy-deploy-2.8.xml"); in.getClass(); // check non null - Element mavenSpyLogs = DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(in).getDocumentElement(); + Element mavenSpyLogs = DocumentBuilderFactory.newInstance() + .newDocumentBuilder() + .parse(in) + .getDocumentElement(); List generatedArtifacts = XmlUtils.listGeneratedArtifacts(mavenSpyLogs, false); @@ -326,9 +362,14 @@ public void test_listGeneratedArtifacts_deploy_2_8() throws Exception { @Test public void test_listGeneratedArtifacts_deploy_3_0() throws Exception { - InputStream in = Thread.currentThread().getContextClassLoader().getResourceAsStream("org/jenkinsci/plugins/pipeline/maven/maven-spy-deploy-3.0.xml"); + InputStream in = Thread.currentThread() + .getContextClassLoader() + .getResourceAsStream("org/jenkinsci/plugins/pipeline/maven/maven-spy-deploy-3.0.xml"); in.getClass(); // check non null - Element mavenSpyLogs = DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(in).getDocumentElement(); + Element mavenSpyLogs = DocumentBuilderFactory.newInstance() + .newDocumentBuilder() + .parse(in) + .getDocumentElement(); List generatedArtifacts = XmlUtils.listGeneratedArtifacts(mavenSpyLogs, false); @@ -355,9 +396,14 @@ public void test_listGeneratedArtifacts_deploy_3_0() throws Exception { @Test public void test_listGeneratedArtifacts_including_generated_artifacts() throws Exception { - InputStream in = Thread.currentThread().getContextClassLoader().getResourceAsStream("org/jenkinsci/plugins/pipeline/maven/maven-spy-deploy-jar.xml"); + InputStream in = Thread.currentThread() + .getContextClassLoader() + .getResourceAsStream("org/jenkinsci/plugins/pipeline/maven/maven-spy-deploy-jar.xml"); in.getClass(); // check non null - Element mavenSpyLogs = DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(in).getDocumentElement(); + Element mavenSpyLogs = DocumentBuilderFactory.newInstance() + .newDocumentBuilder() + .parse(in) + .getDocumentElement(); List generatedArtifacts = XmlUtils.listGeneratedArtifacts(mavenSpyLogs, true); System.out.println(generatedArtifacts); assertThat(generatedArtifacts.size()).isEqualTo(3); // a jar file and a pom file are generated @@ -382,10 +428,14 @@ public void test_listGeneratedArtifacts_including_generated_artifacts() throws E @Test public void test_listGeneratedArtifacts_includeAttachedArtifacts() throws Exception { - InputStream in = Thread.currentThread().getContextClassLoader() + InputStream in = Thread.currentThread() + .getContextClassLoader() .getResourceAsStream("org/jenkinsci/plugins/pipeline/maven/maven-spy-include-attached-artifacts.log"); in.getClass(); // check non null - Element mavenSpyLogs = DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(in).getDocumentElement(); + Element mavenSpyLogs = DocumentBuilderFactory.newInstance() + .newDocumentBuilder() + .parse(in) + .getDocumentElement(); List generatedArtifacts = XmlUtils.listGeneratedArtifacts(mavenSpyLogs, true); System.out.println(generatedArtifacts); assertThat(generatedArtifacts.size()).isEqualTo(2); // pom artifact plus 1 attachment diff --git a/pom.xml b/pom.xml index 4c2d0c08..8b060468 100644 --- a/pom.xml +++ b/pom.xml @@ -72,10 +72,10 @@ + pipeline-maven pipeline-maven-api pipeline-maven-database pipeline-maven-spy - pipeline-maven @@ -94,7 +94,6 @@ 5.0.0 1.2.2 1.4.199 - 3.3.2 0.15.1 2.5 3.22 @@ -107,26 +106,18 @@ 3.2.0 3.3.9 0.3.5 - 3.3.0 3.4.2 3.8.8 3.5.1 1.4.13 3.1.0 2.0.9 + false 1.19.0 - - - - - org.codehaus.plexus - plexus-utils - ${plexus-utils.version} - io.jenkins.tools.bom @@ -135,12 +126,21 @@ pom import - - - - + + org.junit + junit-bom + ${junit.version} + pom + import + + + org.testcontainers + testcontainers-bom + ${testcontainers.version} + pom + import + + org.apache.maven maven-artifact @@ -156,14 +156,11 @@ plexus-utils ${plexus-utils.version} - - org.junit - junit-bom - ${junit.version} - pom - import + + org.jenkins-ci.plugins + pipeline-maven-api + ${project.version} - org.slf4j slf4j-api @@ -175,25 +172,12 @@ ${slf4j.version} - - org.jenkins-ci.plugins - pipeline-maven-api - ${project.version} - org.assertj assertj-core ${assertj.version} test - - - org.testcontainers - testcontainers-bom - ${testcontainers.version} - pom - import - @@ -261,47 +245,42 @@ - - com.github.ekryd.sortpom - sortpom-maven-plugin - ${maven-plugin-sortpom.version} - - false - ${project.build.sourceEncoding} - false - true - \n - false - 2 - scope,groupId,artifactId - groupId,artifactId - true - - com.spotify dockerfile-maven-plugin ${plugin-dockerfile.version} - org.codehaus.mojo - exec-maven-plugin - ${plugin-exec.version} + org.codehaus.mojo + exec-maven-plugin + ${plugin-exec.version} - com.github.ekryd.sortpom - sortpom-maven-plugin - - - - sort - - verify - - + com.diffplug.spotless + spotless-maven-plugin + + + + @formatter:off + @formatter:on + + + + + true + true + true + groupId,artifactId + + + @formatter:off + @formatter:on + + + org.apache.maven.plugins