listTransitiveUpstreamJobs(
+ String jobFullName, int buildNumber, UpstreamMemory upstreamMemory) {
return delegate.listTransitiveUpstreamJobs(jobFullName, buildNumber, upstreamMemory);
}
@@ -123,8 +177,14 @@ public String toPrettyString() {
}
@Override
- public void updateBuildOnCompletion(@NonNull String jobFullName, int buildNumber, int buildResultOrdinal, long startTimeInMillis, long durationInMillis) {
- delegate.updateBuildOnCompletion(jobFullName, buildNumber, buildResultOrdinal, startTimeInMillis, durationInMillis);
+ public void updateBuildOnCompletion(
+ @NonNull String jobFullName,
+ int buildNumber,
+ int buildResultOrdinal,
+ long startTimeInMillis,
+ long durationInMillis) {
+ delegate.updateBuildOnCompletion(
+ jobFullName, buildNumber, buildResultOrdinal, startTimeInMillis, durationInMillis);
}
@Override
diff --git a/pipeline-maven-api/src/main/java/org/jenkinsci/plugins/pipeline/maven/dao/CacheStats.java b/pipeline-maven-api/src/main/java/org/jenkinsci/plugins/pipeline/maven/dao/CacheStats.java
index 134ec160..48f12c2a 100644
--- a/pipeline-maven-api/src/main/java/org/jenkinsci/plugins/pipeline/maven/dao/CacheStats.java
+++ b/pipeline-maven-api/src/main/java/org/jenkinsci/plugins/pipeline/maven/dao/CacheStats.java
@@ -25,4 +25,4 @@ public int getHits() {
public int getMisses() {
return misses;
}
-}
\ No newline at end of file
+}
diff --git a/pipeline-maven-api/src/main/java/org/jenkinsci/plugins/pipeline/maven/dao/CustomTypePipelineMavenPluginDaoDecorator.java b/pipeline-maven-api/src/main/java/org/jenkinsci/plugins/pipeline/maven/dao/CustomTypePipelineMavenPluginDaoDecorator.java
index 5f3563fd..1d421a4c 100644
--- a/pipeline-maven-api/src/main/java/org/jenkinsci/plugins/pipeline/maven/dao/CustomTypePipelineMavenPluginDaoDecorator.java
+++ b/pipeline-maven-api/src/main/java/org/jenkinsci/plugins/pipeline/maven/dao/CustomTypePipelineMavenPluginDaoDecorator.java
@@ -18,14 +18,8 @@ public class CustomTypePipelineMavenPluginDaoDecorator extends AbstractPipelineM
*
* See https://maven.apache.org/ref/3.8.4/maven-core/artifact-handlers.html for more details.
*/
- private static final List KNOWN_JAR_TYPES_WITH_DIFFERENT_EXTENSION = Arrays.asList(
- "test-jar",
- "maven-plugin",
- "ejb",
- "ejb-client",
- "java-source",
- "javadoc"
- );
+ private static final List KNOWN_JAR_TYPES_WITH_DIFFERENT_EXTENSION =
+ Arrays.asList("test-jar", "maven-plugin", "ejb", "ejb-client", "java-source", "javadoc");
private final Logger LOGGER = Logger.getLogger(getClass().getName());
@@ -34,12 +28,48 @@ public CustomTypePipelineMavenPluginDaoDecorator(@NonNull PipelineMavenPluginDao
}
@Override
- public void recordGeneratedArtifact(@NonNull String jobFullName, int buildNumber, @NonNull String groupId, @NonNull String artifactId, @NonNull String version, @NonNull String type, @NonNull String baseVersion, @Nullable String repositoryUrl, boolean skipDownstreamTriggers, String extension, String classifier) {
- super.recordGeneratedArtifact(jobFullName, buildNumber, groupId, artifactId, version, type, baseVersion, repositoryUrl, skipDownstreamTriggers, extension, classifier);
+ public void recordGeneratedArtifact(
+ @NonNull String jobFullName,
+ int buildNumber,
+ @NonNull String groupId,
+ @NonNull String artifactId,
+ @NonNull String version,
+ @NonNull String type,
+ @NonNull String baseVersion,
+ @Nullable String repositoryUrl,
+ boolean skipDownstreamTriggers,
+ String extension,
+ String classifier) {
+ super.recordGeneratedArtifact(
+ jobFullName,
+ buildNumber,
+ groupId,
+ artifactId,
+ version,
+ type,
+ baseVersion,
+ repositoryUrl,
+ skipDownstreamTriggers,
+ extension,
+ classifier);
if (shouldReportAgainWithExtensionAsType(type, extension)) {
- LOGGER.log(Level.FINE, "Recording generated artifact " + groupId + ":" + artifactId + ":" + version + " as " + extension + " (in addition to " + type + ")");
- super.recordGeneratedArtifact(jobFullName, buildNumber, groupId, artifactId, version, extension, baseVersion, repositoryUrl, skipDownstreamTriggers, extension, classifier);
+ LOGGER.log(
+ Level.FINE,
+ "Recording generated artifact " + groupId + ":" + artifactId + ":" + version + " as " + extension
+ + " (in addition to " + type + ")");
+ super.recordGeneratedArtifact(
+ jobFullName,
+ buildNumber,
+ groupId,
+ artifactId,
+ version,
+ extension,
+ baseVersion,
+ repositoryUrl,
+ skipDownstreamTriggers,
+ extension,
+ classifier);
}
}
@@ -50,5 +80,4 @@ private boolean shouldReportAgainWithExtensionAsType(String type, String extensi
return type != null && !type.equals(extension);
}
-
}
diff --git a/pipeline-maven-api/src/main/java/org/jenkinsci/plugins/pipeline/maven/dao/MonitoringPipelineMavenPluginDaoDecorator.java b/pipeline-maven-api/src/main/java/org/jenkinsci/plugins/pipeline/maven/dao/MonitoringPipelineMavenPluginDaoDecorator.java
index 47ed96b4..31630b6e 100644
--- a/pipeline-maven-api/src/main/java/org/jenkinsci/plugins/pipeline/maven/dao/MonitoringPipelineMavenPluginDaoDecorator.java
+++ b/pipeline-maven-api/src/main/java/org/jenkinsci/plugins/pipeline/maven/dao/MonitoringPipelineMavenPluginDaoDecorator.java
@@ -1,13 +1,9 @@
package org.jenkinsci.plugins.pipeline.maven.dao;
-import org.jenkinsci.plugins.pipeline.maven.MavenArtifact;
-import org.jenkinsci.plugins.pipeline.maven.MavenDependency;
+import static java.util.Optional.ofNullable;
import edu.umd.cs.findbugs.annotations.NonNull;
import edu.umd.cs.findbugs.annotations.Nullable;
-
-import static java.util.Optional.ofNullable;
-
import java.text.NumberFormat;
import java.util.ArrayList;
import java.util.List;
@@ -17,10 +13,12 @@
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.Supplier;
+import org.jenkinsci.plugins.pipeline.maven.MavenArtifact;
+import org.jenkinsci.plugins.pipeline.maven.MavenDependency;
public class MonitoringPipelineMavenPluginDaoDecorator extends AbstractPipelineMavenPluginDaoDecorator {
- private final static List> CACHE_STATS_SUPPLIERS = new ArrayList<>();
+ private static final List> CACHE_STATS_SUPPLIERS = new ArrayList<>();
public static void registerCacheStatsSupplier(Supplier supplier) {
CACHE_STATS_SUPPLIERS.add(supplier);
@@ -36,23 +34,72 @@ public MonitoringPipelineMavenPluginDaoDecorator(@NonNull PipelineMavenPluginDao
}
@Override
- public void recordDependency(@NonNull String jobFullName, int buildNumber, @NonNull String groupId, @NonNull String artifactId, @NonNull String version, @NonNull String type, @NonNull String scope, boolean ignoreUpstreamTriggers, String classifier) {
- executeMonitored(() -> super.recordDependency(jobFullName, buildNumber, groupId, artifactId, version, type, scope, ignoreUpstreamTriggers, classifier));
- }
-
- @Override
- public void recordParentProject(@NonNull String jobFullName, int buildNumber, @NonNull String parentGroupId, @NonNull String parentArtifactId, @NonNull String parentVersion, boolean ignoreUpstreamTriggers) {
- executeMonitored(() -> super.recordParentProject(jobFullName, buildNumber, parentGroupId, parentArtifactId, parentVersion, ignoreUpstreamTriggers));
- }
-
- @Override
- public void recordGeneratedArtifact(@NonNull String jobFullName, int buildNumber, @NonNull String groupId, @NonNull String artifactId, @NonNull String version, @NonNull String type, @NonNull String baseVersion, @Nullable String repositoryUrl, boolean skipDownstreamTriggers, String extension, String classifier) {
- executeMonitored(() -> super.recordGeneratedArtifact(jobFullName, buildNumber, groupId, artifactId, version, type, baseVersion, repositoryUrl, skipDownstreamTriggers, extension, classifier));
- }
-
- @Override
- public void recordBuildUpstreamCause(String upstreamJobName, int upstreamBuildNumber, String downstreamJobName, int downstreamBuildNumber) {
- executeMonitored(() -> super.recordBuildUpstreamCause(upstreamJobName, upstreamBuildNumber, downstreamJobName, downstreamBuildNumber));
+ public void recordDependency(
+ @NonNull String jobFullName,
+ int buildNumber,
+ @NonNull String groupId,
+ @NonNull String artifactId,
+ @NonNull String version,
+ @NonNull String type,
+ @NonNull String scope,
+ boolean ignoreUpstreamTriggers,
+ String classifier) {
+ executeMonitored(() -> super.recordDependency(
+ jobFullName,
+ buildNumber,
+ groupId,
+ artifactId,
+ version,
+ type,
+ scope,
+ ignoreUpstreamTriggers,
+ classifier));
+ }
+
+ @Override
+ public void recordParentProject(
+ @NonNull String jobFullName,
+ int buildNumber,
+ @NonNull String parentGroupId,
+ @NonNull String parentArtifactId,
+ @NonNull String parentVersion,
+ boolean ignoreUpstreamTriggers) {
+ executeMonitored(() -> super.recordParentProject(
+ jobFullName, buildNumber, parentGroupId, parentArtifactId, parentVersion, ignoreUpstreamTriggers));
+ }
+
+ @Override
+ public void recordGeneratedArtifact(
+ @NonNull String jobFullName,
+ int buildNumber,
+ @NonNull String groupId,
+ @NonNull String artifactId,
+ @NonNull String version,
+ @NonNull String type,
+ @NonNull String baseVersion,
+ @Nullable String repositoryUrl,
+ boolean skipDownstreamTriggers,
+ String extension,
+ String classifier) {
+ executeMonitored(() -> super.recordGeneratedArtifact(
+ jobFullName,
+ buildNumber,
+ groupId,
+ artifactId,
+ version,
+ type,
+ baseVersion,
+ repositoryUrl,
+ skipDownstreamTriggers,
+ extension,
+ classifier));
+ }
+
+ @Override
+ public void recordBuildUpstreamCause(
+ String upstreamJobName, int upstreamBuildNumber, String downstreamJobName, int downstreamBuildNumber) {
+ executeMonitored(() -> super.recordBuildUpstreamCause(
+ upstreamJobName, upstreamBuildNumber, downstreamJobName, downstreamBuildNumber));
}
@Override
@@ -91,14 +138,17 @@ public List listDownstreamJobs(@NonNull String jobFullName, int buildNum
@NonNull
@Override
- public Map> listDownstreamJobsByArtifact(@NonNull String jobFullName, int buildNumber) {
+ public Map> listDownstreamJobsByArtifact(
+ @NonNull String jobFullName, int buildNumber) {
return executeMonitored(() -> super.listDownstreamJobsByArtifact(jobFullName, buildNumber));
}
@NonNull
@Override
- public SortedSet listDownstreamJobs(String groupId, String artifactId, String version, String baseVersion, String type, String classifier) {
- return executeMonitored(() -> super.listDownstreamJobs(groupId, artifactId, version, baseVersion, type, classifier));
+ public SortedSet listDownstreamJobs(
+ String groupId, String artifactId, String version, String baseVersion, String type, String classifier) {
+ return executeMonitored(
+ () -> super.listDownstreamJobs(groupId, artifactId, version, baseVersion, type, classifier));
}
@Override
@@ -119,16 +169,29 @@ public void cleanup() {
}
@Override
- public void updateBuildOnCompletion(@NonNull String jobFullName, int buildNumber, int buildResultOrdinal, long startTimeInMillis, long durationInMillis) {
- executeMonitored(() -> super.updateBuildOnCompletion(jobFullName, buildNumber, buildResultOrdinal, startTimeInMillis, durationInMillis));
+ public void updateBuildOnCompletion(
+ @NonNull String jobFullName,
+ int buildNumber,
+ int buildResultOrdinal,
+ long startTimeInMillis,
+ long durationInMillis) {
+ executeMonitored(() -> super.updateBuildOnCompletion(
+ jobFullName, buildNumber, buildResultOrdinal, startTimeInMillis, durationInMillis));
}
@Override
public String toPrettyString() {
- StringBuilder builder = new StringBuilder(ofNullable(super.toPrettyString()).orElse(""));
+ StringBuilder builder =
+ new StringBuilder(ofNullable(super.toPrettyString()).orElse(""));
builder.append("\r\n Performances: ");
- builder.append("\r\n\t find: totalDurationInMs=").append(TimeUnit.NANOSECONDS.toMillis(findDurationInNanos.get())).append(", count=").append(findCount.get());
- builder.append("\r\n\t write: totalDurationInMs=").append(TimeUnit.NANOSECONDS.toMillis(writeDurationInNanos.get())).append(", count=").append(writeCount.get());
+ builder.append("\r\n\t find: totalDurationInMs=")
+ .append(TimeUnit.NANOSECONDS.toMillis(findDurationInNanos.get()))
+ .append(", count=")
+ .append(findCount.get());
+ builder.append("\r\n\t write: totalDurationInMs=")
+ .append(TimeUnit.NANOSECONDS.toMillis(writeDurationInNanos.get()))
+ .append(", count=")
+ .append(writeCount.get());
builder.append("\r\n Caches: ");
CACHE_STATS_SUPPLIERS.forEach(s -> builder.append("\r\n\t ").append(cachePrettyString(s.get())));
return builder.toString();
@@ -177,5 +240,4 @@ private interface CallableWithResult {
private interface CallableWithoutResult {
void call();
}
-
}
diff --git a/pipeline-maven-api/src/main/java/org/jenkinsci/plugins/pipeline/maven/dao/PipelineMavenPluginDao.java b/pipeline-maven-api/src/main/java/org/jenkinsci/plugins/pipeline/maven/dao/PipelineMavenPluginDao.java
index ac962556..6653e33a 100644
--- a/pipeline-maven-api/src/main/java/org/jenkinsci/plugins/pipeline/maven/dao/PipelineMavenPluginDao.java
+++ b/pipeline-maven-api/src/main/java/org/jenkinsci/plugins/pipeline/maven/dao/PipelineMavenPluginDao.java
@@ -24,19 +24,18 @@
package org.jenkinsci.plugins.pipeline.maven.dao;
+import edu.umd.cs.findbugs.annotations.NonNull;
+import edu.umd.cs.findbugs.annotations.Nullable;
import hudson.model.Item;
import hudson.model.Run;
import hudson.util.FormValidation;
-import org.apache.maven.artifact.Artifact;
-import org.jenkinsci.plugins.pipeline.maven.MavenArtifact;
-import org.jenkinsci.plugins.pipeline.maven.MavenDependency;
-
-import edu.umd.cs.findbugs.annotations.NonNull;
-import edu.umd.cs.findbugs.annotations.Nullable;
import java.io.Closeable;
import java.util.List;
import java.util.Map;
import java.util.SortedSet;
+import org.apache.maven.artifact.Artifact;
+import org.jenkinsci.plugins.pipeline.maven.MavenArtifact;
+import org.jenkinsci.plugins.pipeline.maven.MavenDependency;
/**
* @author Cyrille Le Clerc
@@ -55,9 +54,16 @@ public interface PipelineMavenPluginDao extends Closeable {
* @param ignoreUpstreamTriggers see PipelineGraphPublisher#isIgnoreUpstreamTriggers()
* @param classifier Maven dependency classifier
*/
- void recordDependency(@NonNull String jobFullName, int buildNumber,
- @NonNull String groupId, @NonNull String artifactId, @NonNull String version, @NonNull String type, @NonNull String scope,
- boolean ignoreUpstreamTriggers, String classifier);
+ void recordDependency(
+ @NonNull String jobFullName,
+ int buildNumber,
+ @NonNull String groupId,
+ @NonNull String artifactId,
+ @NonNull String version,
+ @NonNull String type,
+ @NonNull String scope,
+ boolean ignoreUpstreamTriggers,
+ String classifier);
/**
* Record a Maven parent project of a pom processed by this build of a build.
@@ -69,9 +75,13 @@ void recordDependency(@NonNull String jobFullName, int buildNumber,
* @param parentVersion Maven dependency version
* @param ignoreUpstreamTriggers see PipelineGraphPublisher#isIgnoreUpstreamTriggers()
*/
- void recordParentProject(@NonNull String jobFullName, int buildNumber,
- @NonNull String parentGroupId, @NonNull String parentArtifactId, @NonNull String parentVersion,
- boolean ignoreUpstreamTriggers);
+ void recordParentProject(
+ @NonNull String jobFullName,
+ int buildNumber,
+ @NonNull String parentGroupId,
+ @NonNull String parentArtifactId,
+ @NonNull String parentVersion,
+ boolean ignoreUpstreamTriggers);
/**
* Record a Maven artifact generated in a build.
* @param jobFullName see {@link Item#getFullName()}
@@ -79,19 +89,28 @@ void recordParentProject(@NonNull String jobFullName, int buildNumber,
* @param groupId Maven artifact groupId
* @param artifactId Maven artifact artifactId
* @param version Maven artifact version, the "expanded version" for snapshots who have been "mvn deploy" or equivalent
-* (e.g. "1.1-20170808.155524-66" for "1.1-SNAPSHOT" deployed to a repo)
+ * (e.g. "1.1-20170808.155524-66" for "1.1-SNAPSHOT" deployed to a repo)
* @param type Maven artifact type (e.g. "jar", "war", "pom", hpi"...)
* @param baseVersion Maven artifact version, the NOT "expanded version" for snapshots who have been "mvn deploy" or equivalent
-* (e.g. baseVersion is "1.1-SNAPSHOT" for a "1.1-SNAPSHOT" artifact that has been deployed to a repo and expanded
-* to "1.1-20170808.155524-66")
+ * (e.g. baseVersion is "1.1-SNAPSHOT" for a "1.1-SNAPSHOT" artifact that has been deployed to a repo and expanded
+ * to "1.1-20170808.155524-66")
* @param repositoryUrl URL of the Maven repository on which the artifact is deployed ("mvn deploy"). {@code null} if the artifact was not deployed
* @param skipDownstreamTriggers see PipelineGraphPublisher#isSkipDownstreamTriggers()
* @param extension
* @param classifier
*/
- void recordGeneratedArtifact(@NonNull String jobFullName, int buildNumber,
- @NonNull String groupId, @NonNull String artifactId, @NonNull String version, @NonNull String type, @NonNull String baseVersion,
- @Nullable String repositoryUrl, boolean skipDownstreamTriggers, String extension, String classifier);
+ void recordGeneratedArtifact(
+ @NonNull String jobFullName,
+ int buildNumber,
+ @NonNull String groupId,
+ @NonNull String artifactId,
+ @NonNull String version,
+ @NonNull String type,
+ @NonNull String baseVersion,
+ @Nullable String repositoryUrl,
+ boolean skipDownstreamTriggers,
+ String extension,
+ String classifier);
/**
* TODO add {@link MavenArtifact} as org.jenkinsci.plugins.pipeline.maven.cause.MavenDependencyUpstreamCause gives these details
@@ -100,7 +119,8 @@ void recordGeneratedArtifact(@NonNull String jobFullName, int buildNumber,
* @param downstreamJobName Job that is triggered. See {@link Item#getFullName()}.
* @param downstreamBuildNumber Job that is triggered. See {@link Run#getNumber()}.
*/
- void recordBuildUpstreamCause(String upstreamJobName, int upstreamBuildNumber, String downstreamJobName, int downstreamBuildNumber);
+ void recordBuildUpstreamCause(
+ String upstreamJobName, int upstreamBuildNumber, String downstreamJobName, int downstreamBuildNumber);
/**
* Return the dependencies registered by the given build.
@@ -190,7 +210,12 @@ void recordGeneratedArtifact(@NonNull String jobFullName, int buildNumber,
* @see Item#getFullName()
*/
@NonNull
- default SortedSet listDownstreamJobs(@NonNull String groupId, @NonNull String artifactId, @NonNull String version, @Nullable String baseVersion, @NonNull String type) {
+ default SortedSet listDownstreamJobs(
+ @NonNull String groupId,
+ @NonNull String artifactId,
+ @NonNull String version,
+ @Nullable String baseVersion,
+ @NonNull String type) {
return listDownstreamJobs(groupId, artifactId, version, baseVersion, type, null);
}
@@ -207,7 +232,13 @@ default SortedSet listDownstreamJobs(@NonNull String groupId, @NonNull S
* @see Item#getFullName()
*/
@NonNull
- SortedSet listDownstreamJobs(@NonNull String groupId, @NonNull String artifactId, @NonNull String version, @Nullable String baseVersion, @NonNull String type, @Nullable String classifier);
+ SortedSet listDownstreamJobs(
+ @NonNull String groupId,
+ @NonNull String artifactId,
+ @NonNull String version,
+ @Nullable String baseVersion,
+ @NonNull String type,
+ @Nullable String classifier);
/**
* List the upstream jobs who generate an artifact that the given build depends on
@@ -222,7 +253,7 @@ default SortedSet listDownstreamJobs(@NonNull String groupId, @NonNull S
*/
@NonNull
Map listUpstreamJobs(@NonNull String jobFullName, int buildNumber);
-
+
/**
* List the upstream jobs who generate an artifact that the given build depends
* on, including transitive dependencies (build identified by the given
@@ -240,7 +271,7 @@ default SortedSet listDownstreamJobs(@NonNull String groupId, @NonNull S
* List the upstream jobs who generate an artifact that the given build depends
* on, including transitive dependencies (build identified by the given
* {@code jobFullName}, {@code buildNumber})
- *
+ *
* Use a memory for already known upstreams to boost performance
*
* @param jobFullName see {@link Item#getFullName()}
@@ -250,7 +281,8 @@ default SortedSet listDownstreamJobs(@NonNull String groupId, @NonNull S
* @see Item#getFullName()
*/
@NonNull
- Map listTransitiveUpstreamJobs(@NonNull String jobFullName, int buildNumber, UpstreamMemory upstreamMemory);
+ Map listTransitiveUpstreamJobs(
+ @NonNull String jobFullName, int buildNumber, UpstreamMemory upstreamMemory);
/**
* Routine task to cleanup the database and reclaim disk space (if possible in the underlying database).
@@ -264,7 +296,6 @@ default SortedSet listDownstreamJobs(@NonNull String groupId, @NonNull S
String getDescription();
-
/**
* Update the database with build result details.
*
@@ -274,7 +305,12 @@ default SortedSet listDownstreamJobs(@NonNull String groupId, @NonNull S
* @param startTimeInMillis see {@link Run#getStartTimeInMillis()}
* @param durationInMillis see {@link Run#getDuration()}
*/
- void updateBuildOnCompletion(@NonNull String jobFullName, int buildNumber, int buildResultOrdinal, long startTimeInMillis, long durationInMillis);
+ void updateBuildOnCompletion(
+ @NonNull String jobFullName,
+ int buildNumber,
+ int buildResultOrdinal,
+ long startTimeInMillis,
+ long durationInMillis);
/**
* Indicates if the underlying database is production grade enough for the workload.
@@ -328,7 +364,6 @@ public Config() {
PipelineMavenPluginDao build(Config config);
FormValidation validateConfiguration(Config config);
-
}
Builder getBuilder();
diff --git a/pipeline-maven-api/src/main/java/org/jenkinsci/plugins/pipeline/maven/dao/PipelineMavenPluginNullDao.java b/pipeline-maven-api/src/main/java/org/jenkinsci/plugins/pipeline/maven/dao/PipelineMavenPluginNullDao.java
index 31212042..a5f68102 100644
--- a/pipeline-maven-api/src/main/java/org/jenkinsci/plugins/pipeline/maven/dao/PipelineMavenPluginNullDao.java
+++ b/pipeline-maven-api/src/main/java/org/jenkinsci/plugins/pipeline/maven/dao/PipelineMavenPluginNullDao.java
@@ -24,13 +24,9 @@
package org.jenkinsci.plugins.pipeline.maven.dao;
+import edu.umd.cs.findbugs.annotations.NonNull;
import hudson.Extension;
import hudson.util.FormValidation;
-import org.jenkinsci.plugins.pipeline.maven.MavenArtifact;
-import org.jenkinsci.plugins.pipeline.maven.MavenDependency;
-
-import edu.umd.cs.findbugs.annotations.NonNull;
-
import java.io.IOException;
import java.util.Collections;
import java.util.List;
@@ -39,6 +35,8 @@
import java.util.TreeSet;
import java.util.logging.Level;
import java.util.logging.Logger;
+import org.jenkinsci.plugins.pipeline.maven.MavenArtifact;
+import org.jenkinsci.plugins.pipeline.maven.MavenDependency;
/**
* @author Cyrille Le Clerc
@@ -68,9 +66,22 @@ public FormValidation validateConfiguration(Config config) {
}
@Override
- public void recordDependency(String jobFullName, int buildNumber, String groupId, String artifactId, String version, String type, String scope, boolean ignoreUpstreamTriggers, String classifier) {
- LOGGER.log(Level.FINEST, "NOT recordDependency({0}#{1}, {2}:{3}:{4}:{5}, {6}, ignoreUpstreamTriggers:{7}})",
- new Object[]{jobFullName, buildNumber, groupId, artifactId, version, type, scope, ignoreUpstreamTriggers});
+ public void recordDependency(
+ String jobFullName,
+ int buildNumber,
+ String groupId,
+ String artifactId,
+ String version,
+ String type,
+ String scope,
+ boolean ignoreUpstreamTriggers,
+ String classifier) {
+ LOGGER.log(
+ Level.FINEST,
+ "NOT recordDependency({0}#{1}, {2}:{3}:{4}:{5}, {6}, ignoreUpstreamTriggers:{7}})",
+ new Object[] {
+ jobFullName, buildNumber, groupId, artifactId, version, type, scope, ignoreUpstreamTriggers
+ });
}
@NonNull
@@ -80,41 +91,69 @@ public List listDependencies(@NonNull String jobFullName, int b
}
@Override
- public void recordParentProject(@NonNull String jobFullName, int buildNumber, @NonNull String parentGroupId, @NonNull String parentArtifactId, @NonNull String parentVersion, boolean ignoreUpstreamTriggers) {
- LOGGER.log(Level.FINEST, "NOT recordParentProject({0}#{1}, {2}:{3} ignoreUpstreamTriggers:{5}})",
- new Object[]{jobFullName, buildNumber, parentGroupId, parentArtifactId, parentVersion, ignoreUpstreamTriggers});
-
- }
-
- @Override
- public void recordGeneratedArtifact(String jobFullName, int buildNumber, String groupId, String artifactId, String version, String type, String baseVersion, String repositoryUrl, boolean skipDownstreamTriggers, String extension, String classifier) {
- LOGGER.log(Level.FINEST, "NOT recordGeneratedArtifact({0}#{1}, {2}:{3}:{4}:{5}, version:{6}, repositoryUrl:{7}, skipDownstreamTriggers:{8})",
- new Object[]{jobFullName, buildNumber, groupId, artifactId, baseVersion, type, version, repositoryUrl, skipDownstreamTriggers});
-
- }
-
- @Override
- public void recordBuildUpstreamCause(String upstreamJobName, int upstreamBuildNumber, String downstreamJobName, int downstreamBuildNumber) {
- LOGGER.log(Level.FINEST, "NOT recordBuildUpstreamCause(upstreamBuild: {0}#{1}, downstreamBuild: {2}#{3})",
- new Object[]{upstreamJobName, upstreamBuildNumber, downstreamJobName, downstreamBuildNumber});
+ public void recordParentProject(
+ @NonNull String jobFullName,
+ int buildNumber,
+ @NonNull String parentGroupId,
+ @NonNull String parentArtifactId,
+ @NonNull String parentVersion,
+ boolean ignoreUpstreamTriggers) {
+ LOGGER.log(Level.FINEST, "NOT recordParentProject({0}#{1}, {2}:{3} ignoreUpstreamTriggers:{5}})", new Object[] {
+ jobFullName, buildNumber, parentGroupId, parentArtifactId, parentVersion, ignoreUpstreamTriggers
+ });
+ }
+
+ @Override
+ public void recordGeneratedArtifact(
+ String jobFullName,
+ int buildNumber,
+ String groupId,
+ String artifactId,
+ String version,
+ String type,
+ String baseVersion,
+ String repositoryUrl,
+ boolean skipDownstreamTriggers,
+ String extension,
+ String classifier) {
+ LOGGER.log(
+ Level.FINEST,
+ "NOT recordGeneratedArtifact({0}#{1}, {2}:{3}:{4}:{5}, version:{6}, repositoryUrl:{7}, skipDownstreamTriggers:{8})",
+ new Object[] {
+ jobFullName,
+ buildNumber,
+ groupId,
+ artifactId,
+ baseVersion,
+ type,
+ version,
+ repositoryUrl,
+ skipDownstreamTriggers
+ });
+ }
+
+ @Override
+ public void recordBuildUpstreamCause(
+ String upstreamJobName, int upstreamBuildNumber, String downstreamJobName, int downstreamBuildNumber) {
+ LOGGER.log(
+ Level.FINEST,
+ "NOT recordBuildUpstreamCause(upstreamBuild: {0}#{1}, downstreamBuild: {2}#{3})",
+ new Object[] {upstreamJobName, upstreamBuildNumber, downstreamJobName, downstreamBuildNumber});
}
@Override
public void renameJob(String oldFullName, String newFullName) {
- LOGGER.log(Level.FINEST, "NOT renameJob({0}, {1})", new Object[]{oldFullName, newFullName});
-
+ LOGGER.log(Level.FINEST, "NOT renameJob({0}, {1})", new Object[] {oldFullName, newFullName});
}
@Override
public void deleteJob(String jobFullName) {
- LOGGER.log(Level.FINEST, "NOT deleteJob({0})", new Object[]{jobFullName});
-
+ LOGGER.log(Level.FINEST, "NOT deleteJob({0})", new Object[] {jobFullName});
}
@Override
public void deleteBuild(String jobFullName, int buildNumber) {
- LOGGER.log(Level.FINEST, "NOT deleteBuild({0}#{1})", new Object[]{jobFullName, buildNumber});
-
+ LOGGER.log(Level.FINEST, "NOT deleteBuild({0}#{1})", new Object[] {jobFullName, buildNumber});
}
@NonNull
@@ -126,13 +165,15 @@ public List listDownstreamJobs(@NonNull String jobFullName, int buildNum
@NonNull
@Override
- public Map> listDownstreamJobsByArtifact(@NonNull String jobFullName, int buildNumber) {
+ public Map> listDownstreamJobsByArtifact(
+ @NonNull String jobFullName, int buildNumber) {
return Collections.emptyMap();
}
@NonNull
@Override
- public SortedSet listDownstreamJobs(String groupId, String artifactId, String version, String baseVersion, String type, String classifier) {
+ public SortedSet listDownstreamJobs(
+ String groupId, String artifactId, String version, String baseVersion, String type, String classifier) {
return new TreeSet<>();
}
@@ -150,8 +191,8 @@ public Map listTransitiveUpstreamJobs(String jobFullName, int b
@NonNull
@Override
- public Map listTransitiveUpstreamJobs(String jobFullName, int buildNumber,
- UpstreamMemory upstreamMemory) {
+ public Map listTransitiveUpstreamJobs(
+ String jobFullName, int buildNumber, UpstreamMemory upstreamMemory) {
return Collections.emptyMap();
}
@@ -167,9 +208,16 @@ public List getGeneratedArtifacts(@NonNull String jobFullName, in
}
@Override
- public void updateBuildOnCompletion(@NonNull String jobFullName, int buildNumber, int buildResultOrdinal, long startTimeInMillis, long durationInMillis) {
- LOGGER.log(Level.FINEST, "NOOT updateBuildOnCompletion({0}, {1}, result: {2}, startTime): {3}, duration: {4}",
- new Object[]{jobFullName, buildNumber, buildResultOrdinal, startTimeInMillis, durationInMillis});
+ public void updateBuildOnCompletion(
+ @NonNull String jobFullName,
+ int buildNumber,
+ int buildResultOrdinal,
+ long startTimeInMillis,
+ long durationInMillis) {
+ LOGGER.log(
+ Level.FINEST,
+ "NOOT updateBuildOnCompletion({0}, {1}, result: {2}, startTime): {3}, duration: {4}",
+ new Object[] {jobFullName, buildNumber, buildResultOrdinal, startTimeInMillis, durationInMillis});
}
@Override
@@ -186,5 +234,4 @@ public boolean isEnoughProductionGradeForTheWorkload() {
public void close() throws IOException {
// no op
}
-
}
diff --git a/pipeline-maven-api/src/main/java/org/jenkinsci/plugins/pipeline/maven/dao/UpstreamMemory.java b/pipeline-maven-api/src/main/java/org/jenkinsci/plugins/pipeline/maven/dao/UpstreamMemory.java
index 2d18f963..b079c7fa 100644
--- a/pipeline-maven-api/src/main/java/org/jenkinsci/plugins/pipeline/maven/dao/UpstreamMemory.java
+++ b/pipeline-maven-api/src/main/java/org/jenkinsci/plugins/pipeline/maven/dao/UpstreamMemory.java
@@ -20,7 +20,8 @@ public class UpstreamMemory {
private static final AtomicInteger MISSES = new AtomicInteger();
static {
- MonitoringPipelineMavenPluginDaoDecorator.registerCacheStatsSupplier(() -> new CacheStats("listUpstreamJobs", HITS.get(), MISSES.get()));
+ MonitoringPipelineMavenPluginDaoDecorator.registerCacheStatsSupplier(
+ () -> new CacheStats("listUpstreamJobs", HITS.get(), MISSES.get()));
}
// remember the already known upstreams
@@ -35,5 +36,4 @@ public Map listUpstreamJobs(PipelineMavenPluginDao dao, String
}
return upstreams.computeIfAbsent(key, k -> dao.listUpstreamJobs(jobFullName, buildNumber));
}
-
}
diff --git a/pipeline-maven-database/pom.xml b/pipeline-maven-database/pom.xml
index 4640ae7e..48cacf8f 100644
--- a/pipeline-maven-database/pom.xml
+++ b/pipeline-maven-database/pom.xml
@@ -7,18 +7,10 @@
${changelist}
pipeline-maven-database
- Pipeline Maven Plugin Database
hpi
+ Pipeline Maven Plugin Database
-
- org.jenkins-ci.plugins
- pipeline-maven-api
-
-
- org.jenkins-ci.plugins
- credentials
-
com.zaxxer
HikariCP
@@ -54,6 +46,14 @@
${jenkins-plugin-postgresql.version}
true
+
+ org.jenkins-ci.plugins
+ credentials
+
+
+ org.jenkins-ci.plugins
+ pipeline-maven-api
+
org.slf4j
slf4j-simple
diff --git a/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/AbstractPipelineMavenPluginDao.java b/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/AbstractPipelineMavenPluginDao.java
index d726522e..7c976e6b 100644
--- a/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/AbstractPipelineMavenPluginDao.java
+++ b/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/AbstractPipelineMavenPluginDao.java
@@ -29,6 +29,8 @@
import com.cloudbees.plugins.credentials.common.UsernamePasswordCredentials;
import com.zaxxer.hikari.HikariConfig;
import com.zaxxer.hikari.HikariDataSource;
+import edu.umd.cs.findbugs.annotations.NonNull;
+import edu.umd.cs.findbugs.annotations.Nullable;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
import hudson.ExtensionList;
import hudson.model.Item;
@@ -37,21 +39,6 @@
import hudson.security.ACL;
import hudson.util.FormValidation;
import hudson.util.Secret;
-import jenkins.model.Jenkins;
-import org.apache.commons.io.IOUtils;
-import org.apache.commons.lang.StringUtils;
-import org.h2.api.ErrorCode;
-import org.jenkinsci.plugins.pipeline.maven.MavenArtifact;
-import org.jenkinsci.plugins.pipeline.maven.MavenDependency;
-import org.jenkinsci.plugins.pipeline.maven.dao.*;
-import org.jenkinsci.plugins.pipeline.maven.db.migration.MigrationStep;
-import org.jenkinsci.plugins.pipeline.maven.db.util.ClassUtils;
-import org.jenkinsci.plugins.pipeline.maven.db.util.RuntimeIoException;
-import org.jenkinsci.plugins.pipeline.maven.db.util.RuntimeSqlException;
-
-import edu.umd.cs.findbugs.annotations.NonNull;
-import edu.umd.cs.findbugs.annotations.Nullable;
-import javax.sql.DataSource;
import java.io.Closeable;
import java.io.IOException;
import java.io.InputStream;
@@ -71,13 +58,26 @@
import java.util.Map.Entry;
import java.util.logging.Level;
import java.util.logging.Logger;
+import javax.sql.DataSource;
+import jenkins.model.Jenkins;
+import org.apache.commons.io.IOUtils;
+import org.apache.commons.lang.StringUtils;
+import org.h2.api.ErrorCode;
+import org.jenkinsci.plugins.pipeline.maven.MavenArtifact;
+import org.jenkinsci.plugins.pipeline.maven.MavenDependency;
+import org.jenkinsci.plugins.pipeline.maven.dao.*;
+import org.jenkinsci.plugins.pipeline.maven.db.migration.MigrationStep;
+import org.jenkinsci.plugins.pipeline.maven.db.util.ClassUtils;
+import org.jenkinsci.plugins.pipeline.maven.db.util.RuntimeIoException;
+import org.jenkinsci.plugins.pipeline.maven.db.util.RuntimeSqlException;
/**
* @author Cyrille Le Clerc
*/
public abstract class AbstractPipelineMavenPluginDao implements PipelineMavenPluginDao {
- private static final int OPTIMIZATION_MAX_RECURSION_DEPTH = Integer.getInteger("org.jenkinsci.plugins.pipeline.PipelineMavenPluginDao.OPTIMIZATION_MAX_RECURSION_DEPTH",3);
+ private static final int OPTIMIZATION_MAX_RECURSION_DEPTH = Integer.getInteger(
+ "org.jenkinsci.plugins.pipeline.PipelineMavenPluginDao.OPTIMIZATION_MAX_RECURSION_DEPTH", 3);
protected final Logger LOGGER = Logger.getLogger(getClass().getName());
@NonNull
@@ -118,38 +118,40 @@ public PipelineMavenPluginDao build(Config config) {
PipelineMavenPluginDao dao;
try {
String jdbcUrl = config.getJdbcUrl();
- if(StringUtils.isBlank(jdbcUrl)) {
+ if (StringUtils.isBlank(jdbcUrl)) {
// some dao such h2 can use default jdbc ur
Optional optionalPipelineMavenPluginDao =
- ExtensionList.lookup(PipelineMavenPluginDao.class)
- .stream()
- .filter(pipelineMavenPluginDao -> pipelineMavenPluginDao.getClass().getName().equals(pipelineMavenPluginDaoClass.getName()))
- .findFirst();
- if(optionalPipelineMavenPluginDao.isPresent()){
+ ExtensionList.lookup(PipelineMavenPluginDao.class).stream()
+ .filter(pipelineMavenPluginDao -> pipelineMavenPluginDao
+ .getClass()
+ .getName()
+ .equals(pipelineMavenPluginDaoClass.getName()))
+ .findFirst();
+ if (optionalPipelineMavenPluginDao.isPresent()) {
jdbcUrl = optionalPipelineMavenPluginDao.get().getDefaultJdbcUrl();
}
}
String jdbcUserName, jdbcPassword;
- if (StringUtils.isBlank(config.getCredentialsId()) && !AbstractPipelineMavenPluginDao.this.acceptNoCredentials())
+ if (StringUtils.isBlank(config.getCredentialsId())
+ && !AbstractPipelineMavenPluginDao.this.acceptNoCredentials())
throw new IllegalStateException("No credentials defined for JDBC URL '" + jdbcUrl + "'");
UsernamePasswordCredentials jdbcCredentials = null;
if (!StringUtils.isBlank(config.getCredentialsId())) {
jdbcCredentials = (UsernamePasswordCredentials) CredentialsMatchers.firstOrNull(
- CredentialsProvider.lookupCredentials(UsernamePasswordCredentials.class, j,
- ACL.SYSTEM, Collections.EMPTY_LIST),
+ CredentialsProvider.lookupCredentials(
+ UsernamePasswordCredentials.class, j, ACL.SYSTEM, Collections.EMPTY_LIST),
CredentialsMatchers.withId(config.getCredentialsId()));
}
if (jdbcCredentials == null && pipelineMavenPluginDaoClass == PipelineMavenPluginH2Dao.class) {
jdbcUserName = "sa";
jdbcPassword = "sa";
- }
- else if (jdbcCredentials == null) {
- throw new IllegalStateException("Credentials '" + config.getCredentialsId() + "' defined for JDBC URL '" + jdbcUrl + "' NOT found");
- }
- else {
+ } else if (jdbcCredentials == null) {
+ throw new IllegalStateException("Credentials '" + config.getCredentialsId()
+ + "' defined for JDBC URL '" + jdbcUrl + "' NOT found");
+ } else {
jdbcUserName = jdbcCredentials.getUsername();
jdbcPassword = Secret.toString(jdbcCredentials.getPassword());
}
@@ -157,7 +159,8 @@ else if (jdbcCredentials == null) {
HikariConfig dsConfig = createHikariConfig(config.getProperties(), jdbcUrl, jdbcUserName, jdbcPassword);
dsConfig.setAutoCommit(false);
- // TODO cleanup this quick fix for JENKINS-54587, we should have a better solution with the JDBC driver loaded by the DAO itself
+ // TODO cleanup this quick fix for JENKINS-54587, we should have a better solution with the JDBC driver
+ // loaded by the DAO itself
try {
DriverManager.getDriver(jdbcUrl);
} catch (SQLException e) {
@@ -173,13 +176,15 @@ else if (jdbcCredentials == null) {
try {
Class.forName("com.mysql.cj.jdbc.Driver");
} catch (ClassNotFoundException cnfe) {
- throw new RuntimeException("MySql driver 'com.mysql.cj.jdbc.Driver' not found. Please install the 'MySQL Database Plugin' to install the MySql driver");
+ throw new RuntimeException(
+ "MySql driver 'com.mysql.cj.jdbc.Driver' not found. Please install the 'MySQL Database Plugin' to install the MySql driver");
}
} else if (jdbcUrl.startsWith("jdbc:postgresql:")) {
try {
Class.forName("org.postgresql.Driver");
} catch (ClassNotFoundException cnfe) {
- throw new RuntimeException("PostgreSQL driver 'org.postgresql.Driver' not found. Please install the 'PostgreSQL Database Plugin' to install the PostgreSQL driver");
+ throw new RuntimeException(
+ "PostgreSQL driver 'org.postgresql.Driver' not found. Please install the 'PostgreSQL Database Plugin' to install the PostgreSQL driver");
}
} else {
throw new IllegalArgumentException("Unsupported database type in JDBC URL " + jdbcUrl);
@@ -190,18 +195,21 @@ else if (jdbcCredentials == null) {
}
}
- LOGGER.log(Level.INFO, "Connect to database {0} with username {1}", new Object[]{jdbcUrl, jdbcUserName});
+ LOGGER.log(
+ Level.INFO, "Connect to database {0} with username {1}", new Object[] {jdbcUrl, jdbcUserName});
DataSource ds = new HikariDataSource(dsConfig);
try {
- dao = new MonitoringPipelineMavenPluginDaoDecorator(
- new CustomTypePipelineMavenPluginDaoDecorator((PipelineMavenPluginDao)pipelineMavenPluginDaoClass
+ dao = new MonitoringPipelineMavenPluginDaoDecorator(new CustomTypePipelineMavenPluginDaoDecorator(
+ (PipelineMavenPluginDao) pipelineMavenPluginDaoClass
.getConstructor(DataSource.class)
.newInstance(ds)));
} catch (Exception e) {
throw new SQLException(
- "Exception connecting to '" + jdbcUrl + "' with credentials '" + config.getCredentialsId() + "' (" +
- jdbcUserName + "/***) and DAO " + getClass().getSimpleName(), e);
+ "Exception connecting to '" + jdbcUrl + "' with credentials '" + config.getCredentialsId()
+ + "' (" + jdbcUserName + "/***) and DAO "
+ + getClass().getSimpleName(),
+ e);
}
} catch (RuntimeException | SQLException e) {
LOGGER.log(Level.WARNING, "Exception creating database dao, skip", e);
@@ -234,9 +242,15 @@ public FormValidation validateConfiguration(Config config) {
Class.forName(driverClass);
} catch (ClassNotFoundException e) {
if ("com.mysql.cj.jdbc.Driver".equals(driverClass)) {
- return FormValidation.error("MySQL JDBC driver '" + driverClass + "' not found, please install the Jenkins 'MySQL API Plugin'", e);
+ return FormValidation.error(
+ "MySQL JDBC driver '" + driverClass
+ + "' not found, please install the Jenkins 'MySQL API Plugin'",
+ e);
} else if ("org.postgresql.Driver".equals(driverClass)) {
- return FormValidation.error("PostgreSQL JDBC driver '" + driverClass + "' not found, please install the Jenkins 'PostgreSQL API Plugin'" + jdbcUrl, e);
+ return FormValidation.error(
+ "PostgreSQL JDBC driver '" + driverClass
+ + "' not found, please install the Jenkins 'PostgreSQL API Plugin'" + jdbcUrl,
+ e);
} else {
throw e;
}
@@ -253,12 +267,17 @@ public FormValidation validateConfiguration(Config config) {
return FormValidation.error("No credentials specified for JDBC url '" + jdbcUrl + "'");
}
} else {
- UsernamePasswordCredentials jdbcCredentials = (UsernamePasswordCredentials) CredentialsMatchers.firstOrNull(
- CredentialsProvider.lookupCredentials(UsernamePasswordCredentials.class, Jenkins.get(),
- ACL.SYSTEM, Collections.EMPTY_LIST),
- CredentialsMatchers.withId(jdbcCredentialsId));
+ UsernamePasswordCredentials jdbcCredentials =
+ (UsernamePasswordCredentials) CredentialsMatchers.firstOrNull(
+ CredentialsProvider.lookupCredentials(
+ UsernamePasswordCredentials.class,
+ Jenkins.get(),
+ ACL.SYSTEM,
+ Collections.EMPTY_LIST),
+ CredentialsMatchers.withId(jdbcCredentialsId));
if (jdbcCredentials == null) {
- return FormValidation.error("Credentials '" + jdbcCredentialsId + "' defined for JDBC URL '" + jdbcUrl + "' not found");
+ return FormValidation.error("Credentials '" + jdbcCredentialsId + "' defined for JDBC URL '"
+ + jdbcUrl + "' not found");
}
jdbcUserName = jdbcCredentials.getUsername();
jdbcPassword = Secret.toString(jdbcCredentials.getPassword());
@@ -273,34 +292,39 @@ public FormValidation validateConfiguration(Config config) {
// * MySQL: "8.0.13"
// * Amazon Aurora: "5.6.10"
// * MariaDB: "5.5.5-10.2.20-MariaDB", "5.5.5-10.3.11-MariaDB-1:10.3.11+maria~bionic"
- String databaseVersionDescription = metaData.getDatabaseProductName() + " " + metaData.getDatabaseProductVersion();
+ String databaseVersionDescription =
+ metaData.getDatabaseProductName() + " " + metaData.getDatabaseProductVersion();
LOGGER.log(Level.INFO, "Checking JDBC connection against " + databaseVersionDescription);
- String databaseRequirement = "MySQL Server up to 8.1 or Amazon Aurora MySQL 5.6+ or MariaDB up to 11.1 or PostgreSQL up to 16 is required";
+ String databaseRequirement =
+ "MySQL Server up to 8.1 or Amazon Aurora MySQL 5.6+ or MariaDB up to 11.1 or PostgreSQL up to 16 is required";
if ("MariaDB".equals(metaData.getDatabaseProductName())) {
@Nullable
- String mariaDbVersion = PipelineMavenPluginMySqlDao.extractMariaDbVersion(metaData.getDatabaseProductVersion());
+ String mariaDbVersion = PipelineMavenPluginMySqlDao.extractMariaDbVersion(
+ metaData.getDatabaseProductVersion());
if (mariaDbVersion == null || !mariaDbVersion.matches("^(10|11)\\..*")) {
- return FormValidation.warning("Non tested MariaDB version " + metaData.getDatabaseProductVersion() + ". " + databaseRequirement);
+ return FormValidation.warning("Non tested MariaDB version "
+ + metaData.getDatabaseProductVersion() + ". " + databaseRequirement);
}
} else if ("MySQL".equals(metaData.getDatabaseProductName())) {
- @Nullable
- String amazonAuroraVersion;
+ @Nullable String amazonAuroraVersion;
try (Statement stmt = cnn.createStatement()) {
try (ResultSet rst = stmt.executeQuery("select AURORA_VERSION()")) {
rst.next();
amazonAuroraVersion = rst.getString(1);
databaseVersionDescription += " / Aurora " + rst.getString(1);
} catch (SQLException e) {
- if (e.getErrorCode() == 1305) { // com.mysql.cj.exceptions.MysqlErrorNumbers.ER_SP_DOES_NOT_EXIST
+ if (e.getErrorCode()
+ == 1305) { // com.mysql.cj.exceptions.MysqlErrorNumbers.ER_SP_DOES_NOT_EXIST
amazonAuroraVersion = null;
} else {
- LOGGER.log(Level.WARNING,"Exception checking Amazon Aurora version", e);
+ LOGGER.log(Level.WARNING, "Exception checking Amazon Aurora version", e);
amazonAuroraVersion = null;
}
}
}
@Nullable
- String mariaDbVersion = PipelineMavenPluginMySqlDao.extractMariaDbVersion(metaData.getDatabaseProductVersion());
+ String mariaDbVersion = PipelineMavenPluginMySqlDao.extractMariaDbVersion(
+ metaData.getDatabaseProductVersion());
switch (metaData.getDatabaseMajorVersion()) {
case 8:
@@ -314,24 +338,33 @@ public FormValidation validateConfiguration(Config config) {
case 6:
if (amazonAuroraVersion == null) {
// see JENKINS-54784
- return FormValidation.warning("Non validated MySQL version " + metaData.getDatabaseProductVersion() + ". " + databaseRequirement);
+ return FormValidation.warning("Non validated MySQL version "
+ + metaData.getDatabaseProductVersion() + ". "
+ + databaseRequirement);
} else {
// we have successfully tested on Amazon Aurora MySQL 5.6.10a
break;
}
case 5:
if (mariaDbVersion == null) {
- return FormValidation.warning("Non validated MySQL version " + metaData.getDatabaseProductVersion() + ". " + databaseRequirement);
+ return FormValidation.warning("Non validated MySQL version "
+ + metaData.getDatabaseProductVersion() + ". "
+ + databaseRequirement);
} else {
// JENKINS-55378 have successfully tested with "5.5.5-10.2.20-MariaDB"
- return FormValidation.ok("MariaDB version " + mariaDbVersion + " detected. Please ensure that your MariaDB version is at least version 10.2+");
+ return FormValidation.ok(
+ "MariaDB version " + mariaDbVersion
+ + " detected. Please ensure that your MariaDB version is at least version 10.2+");
}
default:
- return FormValidation.error("Non supported MySQL version " + metaData.getDatabaseProductVersion() + ". " + databaseRequirement);
+ return FormValidation.error("Non supported MySQL version "
+ + metaData.getDatabaseProductVersion() + ". "
+ + databaseRequirement);
}
break;
default:
- return FormValidation.error("Non supported MySQL version " + metaData.getDatabaseProductVersion() + ". " + databaseRequirement);
+ return FormValidation.error("Non supported MySQL version "
+ + metaData.getDatabaseProductVersion() + ". " + databaseRequirement);
}
} else if ("PostgreSQL".equals(metaData.getDatabaseProductName())) {
try (Statement stmt = cnn.createStatement()) {
@@ -342,7 +375,7 @@ public FormValidation validateConfiguration(Config config) {
} catch (SQLException e) {
// org.postgresql.util.PSQLState.UNDEFINED_FUNCTION.getState()
if (!"42883".equals(e.getSQLState())) {
- LOGGER.log(Level.WARNING,"Exception checking Amazon Aurora version", e);
+ LOGGER.log(Level.WARNING, "Exception checking Amazon Aurora version", e);
}
}
}
@@ -359,10 +392,12 @@ public FormValidation validateConfiguration(Config config) {
// OK
break;
default:
- return FormValidation.warning("Non tested PostgreSQL version " + metaData.getDatabaseProductVersion() + ". " + databaseRequirement);
+ return FormValidation.warning("Non tested PostgreSQL version "
+ + metaData.getDatabaseProductVersion() + ". " + databaseRequirement);
}
} else {
- return FormValidation.warning("Non production grade database. For production workloads, " + databaseRequirement);
+ return FormValidation.warning(
+ "Non production grade database. For production workloads, " + databaseRequirement);
}
try (Statement stmt = cnn.createStatement()) {
try (ResultSet rst = stmt.executeQuery("select 1")) {
@@ -371,14 +406,15 @@ public FormValidation validateConfiguration(Config config) {
}
}
return FormValidation.ok(databaseVersionDescription + " is a supported database");
- } catch (SQLException e ){
+ } catch (SQLException e) {
return FormValidation.error(e, "Failure to connect to the database " + jdbcUrl);
}
}
} catch (RuntimeException e) {
return FormValidation.error(e, "Failed to test JDBC connection '" + jdbcUrl + "'");
} catch (ClassNotFoundException e) {
- return FormValidation.error(e, "Failed to load JDBC driver '" + driverClass + "' for JDBC connection '" + jdbcUrl + "'");
+ return FormValidation.error(
+ e, "Failed to load JDBC driver '" + driverClass + "' for JDBC connection '" + jdbcUrl + "'");
}
}
}
@@ -388,7 +424,8 @@ public Builder getBuilder() {
return new JDBCDaoBuilder(getClass());
}
- private static HikariConfig createHikariConfig(String properties, String jdbcUrl, String jdbcUserName, String jdbcPassword) {
+ private static HikariConfig createHikariConfig(
+ String properties, String jdbcUrl, String jdbcUserName, String jdbcPassword) {
Properties p = new Properties();
// todo refactor the DAO to inject config defaults in the DAO
if (jdbcUrl.startsWith("jdbc:mysql")) {
@@ -407,7 +444,8 @@ private static HikariConfig createHikariConfig(String properties, String jdbcUrl
} else if (jdbcUrl.startsWith("jdbc:postgresql")) {
// no tuning recommendations found for postgresql
} else if (jdbcUrl.startsWith("jdbc:h2")) {
- // dsConfig.setDataSourceClassName("org.h2.jdbcx.JdbcDataSource"); don't specify the datasource due to a classloading issue
+ // dsConfig.setDataSourceClassName("org.h2.jdbcx.JdbcDataSource"); don't specify the datasource due to a
+ // classloading issue
} else {
// unsupported config
}
@@ -419,7 +457,8 @@ private static HikariConfig createHikariConfig(String properties, String jdbcUrl
throw new IllegalStateException("Failed to read properties.", e);
}
}
- Logger.getLogger(AbstractPipelineMavenPluginDao.class.getName()).log(Level.INFO, "Applied pool properties {0}", p);
+ Logger.getLogger(AbstractPipelineMavenPluginDao.class.getName())
+ .log(Level.INFO, "Applied pool properties {0}", p);
HikariConfig dsConfig = new HikariConfig(p);
dsConfig.setJdbcUrl(jdbcUrl);
dsConfig.setUsername(jdbcUserName);
@@ -432,15 +471,29 @@ private static HikariConfig createHikariConfig(String properties, String jdbcUrl
protected abstract void registerJdbcDriver();
@Override
- public void recordDependency(String jobFullName, int buildNumber, String groupId, String artifactId, String version, String type, String scope, boolean ignoreUpstreamTriggers, String classifier) {
- LOGGER.log(Level.FINE, "recordDependency({0}#{1}, {2}:{3}:{4}:{5}, {6}, ignoreUpstreamTriggers:{7}})",
- new Object[]{jobFullName, buildNumber, groupId, artifactId, version, type, scope, ignoreUpstreamTriggers});
+ public void recordDependency(
+ String jobFullName,
+ int buildNumber,
+ String groupId,
+ String artifactId,
+ String version,
+ String type,
+ String scope,
+ boolean ignoreUpstreamTriggers,
+ String classifier) {
+ LOGGER.log(
+ Level.FINE,
+ "recordDependency({0}#{1}, {2}:{3}:{4}:{5}, {6}, ignoreUpstreamTriggers:{7}})",
+ new Object[] {
+ jobFullName, buildNumber, groupId, artifactId, version, type, scope, ignoreUpstreamTriggers
+ });
long buildPrimaryKey = getOrCreateBuildPrimaryKey(jobFullName, buildNumber);
long artifactPrimaryKey = getOrCreateArtifactPrimaryKey(groupId, artifactId, version, type, classifier);
try (Connection cnn = ds.getConnection()) {
cnn.setAutoCommit(false);
- try (PreparedStatement stmt = cnn.prepareStatement("INSERT INTO MAVEN_DEPENDENCY(ARTIFACT_ID, BUILD_ID, SCOPE, IGNORE_UPSTREAM_TRIGGERS) VALUES (?, ?, ?, ?)")) {
+ try (PreparedStatement stmt = cnn.prepareStatement(
+ "INSERT INTO MAVEN_DEPENDENCY(ARTIFACT_ID, BUILD_ID, SCOPE, IGNORE_UPSTREAM_TRIGGERS) VALUES (?, ?, ?, ?)")) {
stmt.setLong(1, artifactPrimaryKey);
stmt.setLong(2, buildPrimaryKey);
stmt.setString(3, scope);
@@ -456,16 +509,17 @@ public void recordDependency(String jobFullName, int buildNumber, String groupId
@NonNull
@Override
public List listDependencies(@NonNull String jobFullName, int buildNumber) {
- LOGGER.log(Level.FINER, "listDependencies({0}, {1})", new Object[]{jobFullName, buildNumber});
- String dependenciesSql = "SELECT DISTINCT MAVEN_ARTIFACT.group_id, MAVEN_ARTIFACT.artifact_id, MAVEN_ARTIFACT.version, MAVEN_ARTIFACT.type, MAVEN_ARTIFACT.classifier, MAVEN_DEPENDENCY.scope " +
- " FROM MAVEN_ARTIFACT " +
- " INNER JOIN MAVEN_DEPENDENCY ON MAVEN_ARTIFACT.ID = MAVEN_DEPENDENCY.ARTIFACT_ID" +
- " INNER JOIN JENKINS_BUILD ON MAVEN_DEPENDENCY.BUILD_ID = JENKINS_BUILD.ID " +
- " INNER JOIN JENKINS_JOB ON JENKINS_BUILD.JOB_ID = JENKINS_JOB.ID " +
- " WHERE " +
- " JENKINS_JOB.FULL_NAME = ? AND" +
- " JENKINS_JOB.JENKINS_MASTER_ID = ? AND" +
- " JENKINS_BUILD.NUMBER = ? ";
+ LOGGER.log(Level.FINER, "listDependencies({0}, {1})", new Object[] {jobFullName, buildNumber});
+ String dependenciesSql =
+ "SELECT DISTINCT MAVEN_ARTIFACT.group_id, MAVEN_ARTIFACT.artifact_id, MAVEN_ARTIFACT.version, MAVEN_ARTIFACT.type, MAVEN_ARTIFACT.classifier, MAVEN_DEPENDENCY.scope "
+ + " FROM MAVEN_ARTIFACT "
+ + " INNER JOIN MAVEN_DEPENDENCY ON MAVEN_ARTIFACT.ID = MAVEN_DEPENDENCY.ARTIFACT_ID"
+ + " INNER JOIN JENKINS_BUILD ON MAVEN_DEPENDENCY.BUILD_ID = JENKINS_BUILD.ID "
+ + " INNER JOIN JENKINS_JOB ON JENKINS_BUILD.JOB_ID = JENKINS_JOB.ID "
+ + " WHERE "
+ + " JENKINS_JOB.FULL_NAME = ? AND"
+ + " JENKINS_JOB.JENKINS_MASTER_ID = ? AND"
+ + " JENKINS_BUILD.NUMBER = ? ";
List results = new ArrayList<>();
try (Connection cnn = this.ds.getConnection()) {
@@ -497,15 +551,24 @@ public List listDependencies(@NonNull String jobFullName, int b
}
@Override
- public void recordParentProject(@NonNull String jobFullName, int buildNumber, @NonNull String parentGroupId, @NonNull String parentArtifactId, @NonNull String parentVersion, boolean ignoreUpstreamTriggers) {
- LOGGER.log(Level.FINE, "recordParentProject({0}#{1}, {2}:{3} ignoreUpstreamTriggers:{5}})",
- new Object[]{jobFullName, buildNumber, parentGroupId, parentArtifactId, parentVersion, ignoreUpstreamTriggers});
+ public void recordParentProject(
+ @NonNull String jobFullName,
+ int buildNumber,
+ @NonNull String parentGroupId,
+ @NonNull String parentArtifactId,
+ @NonNull String parentVersion,
+ boolean ignoreUpstreamTriggers) {
+ LOGGER.log(Level.FINE, "recordParentProject({0}#{1}, {2}:{3} ignoreUpstreamTriggers:{5}})", new Object[] {
+ jobFullName, buildNumber, parentGroupId, parentArtifactId, parentVersion, ignoreUpstreamTriggers
+ });
long buildPrimaryKey = getOrCreateBuildPrimaryKey(jobFullName, buildNumber);
- long parentArtifactPrimaryKey = getOrCreateArtifactPrimaryKey(parentGroupId, parentArtifactId, parentVersion, "pom", null);
+ long parentArtifactPrimaryKey =
+ getOrCreateArtifactPrimaryKey(parentGroupId, parentArtifactId, parentVersion, "pom", null);
try (Connection cnn = ds.getConnection()) {
cnn.setAutoCommit(false);
- try (PreparedStatement stmt = cnn.prepareStatement("INSERT INTO MAVEN_PARENT_PROJECT(ARTIFACT_ID, BUILD_ID, IGNORE_UPSTREAM_TRIGGERS) VALUES (?, ?, ?)")) {
+ try (PreparedStatement stmt = cnn.prepareStatement(
+ "INSERT INTO MAVEN_PARENT_PROJECT(ARTIFACT_ID, BUILD_ID, IGNORE_UPSTREAM_TRIGGERS) VALUES (?, ?, ?)")) {
stmt.setLong(1, parentArtifactPrimaryKey);
stmt.setLong(2, buildPrimaryKey);
stmt.setBoolean(3, ignoreUpstreamTriggers);
@@ -518,15 +581,39 @@ public void recordParentProject(@NonNull String jobFullName, int buildNumber, @N
}
@Override
- public void recordGeneratedArtifact(String jobFullName, int buildNumber, String groupId, String artifactId, String version, String type, String baseVersion, String repositoryUrl, boolean skipDownstreamTriggers, String extension, String classifier) {
- LOGGER.log(Level.FINE, "recordGeneratedArtifact({0}#{1}, {2}:{3}:{4}:{5}, version:{6}, repositoryUrl:{7}, skipDownstreamTriggers:{8})",
- new Object[]{jobFullName, buildNumber, groupId, artifactId, baseVersion, type, version, repositoryUrl, skipDownstreamTriggers});
+ public void recordGeneratedArtifact(
+ String jobFullName,
+ int buildNumber,
+ String groupId,
+ String artifactId,
+ String version,
+ String type,
+ String baseVersion,
+ String repositoryUrl,
+ boolean skipDownstreamTriggers,
+ String extension,
+ String classifier) {
+ LOGGER.log(
+ Level.FINE,
+ "recordGeneratedArtifact({0}#{1}, {2}:{3}:{4}:{5}, version:{6}, repositoryUrl:{7}, skipDownstreamTriggers:{8})",
+ new Object[] {
+ jobFullName,
+ buildNumber,
+ groupId,
+ artifactId,
+ baseVersion,
+ type,
+ version,
+ repositoryUrl,
+ skipDownstreamTriggers
+ });
long buildPrimaryKey = getOrCreateBuildPrimaryKey(jobFullName, buildNumber);
long artifactPrimaryKey = getOrCreateArtifactPrimaryKey(groupId, artifactId, baseVersion, type, classifier);
try (Connection cnn = ds.getConnection()) {
cnn.setAutoCommit(false);
- try (PreparedStatement stmt = cnn.prepareStatement("INSERT INTO GENERATED_MAVEN_ARTIFACT(ARTIFACT_ID, BUILD_ID, VERSION, REPOSITORY_URL, EXTENSION, SKIP_DOWNSTREAM_TRIGGERS) VALUES (?, ?, ?, ?, ?, ?)")) {
+ try (PreparedStatement stmt = cnn.prepareStatement(
+ "INSERT INTO GENERATED_MAVEN_ARTIFACT(ARTIFACT_ID, BUILD_ID, VERSION, REPOSITORY_URL, EXTENSION, SKIP_DOWNSTREAM_TRIGGERS) VALUES (?, ?, ?, ?, ?, ?)")) {
stmt.setLong(1, artifactPrimaryKey);
stmt.setLong(2, buildPrimaryKey);
stmt.setString(3, version);
@@ -542,12 +629,16 @@ public void recordGeneratedArtifact(String jobFullName, int buildNumber, String
}
@Override
- public void recordBuildUpstreamCause(String upstreamJobName, int upstreamBuildNumber, String downstreamJobName, int downstreamBuildNumber) {
- LOGGER.log(Level.FINE, "recordBuildUpstreamCause(upstreamBuild: {0}#{1}, downstreamBuild: {2}#{3})",
- new Object[]{upstreamJobName, upstreamBuildNumber, downstreamJobName, downstreamBuildNumber});
+ public void recordBuildUpstreamCause(
+ String upstreamJobName, int upstreamBuildNumber, String downstreamJobName, int downstreamBuildNumber) {
+ LOGGER.log(
+ Level.FINE,
+ "recordBuildUpstreamCause(upstreamBuild: {0}#{1}, downstreamBuild: {2}#{3})",
+ new Object[] {upstreamJobName, upstreamBuildNumber, downstreamJobName, downstreamBuildNumber});
try (Connection cnn = ds.getConnection()) {
cnn.setAutoCommit(false);
- String sql = "insert into JENKINS_BUILD_UPSTREAM_CAUSE (upstream_build_id, downstream_build_id) values (?, ?)";
+ String sql =
+ "insert into JENKINS_BUILD_UPSTREAM_CAUSE (upstream_build_id, downstream_build_id) values (?, ?)";
long upstreamBuildPrimaryKey = getOrCreateBuildPrimaryKey(upstreamJobName, upstreamBuildNumber);
long downstreamBuildPrimaryKey = getOrCreateBuildPrimaryKey(downstreamJobName, downstreamBuildNumber);
@@ -558,28 +649,32 @@ public void recordBuildUpstreamCause(String upstreamJobName, int upstreamBuildNu
int rowCount = stmt.executeUpdate();
if (rowCount != 1) {
- LOGGER.log(Level.INFO, "More/less ({0}) than 1 record inserted in JENKINS_BUILD_UPSTREAM_CAUSE for upstreamBuild: {1}#{2}, downstreamBuild: {3}#{4}",
- new Object[]{rowCount, upstreamJobName, upstreamBuildNumber, downstreamJobName, downstreamBuildNumber});
+ LOGGER.log(
+ Level.INFO,
+ "More/less ({0}) than 1 record inserted in JENKINS_BUILD_UPSTREAM_CAUSE for upstreamBuild: {1}#{2}, downstreamBuild: {3}#{4}",
+ new Object[] {
+ rowCount, upstreamJobName, upstreamBuildNumber, downstreamJobName, downstreamBuildNumber
+ });
}
}
cnn.commit();
} catch (SQLException e) {
throw new RuntimeSqlException(e);
}
-
}
@Override
public void renameJob(String oldFullName, String newFullName) {
- LOGGER.log(Level.FINER, "renameJob({0}, {1})", new Object[]{oldFullName, newFullName});
+ LOGGER.log(Level.FINER, "renameJob({0}, {1})", new Object[] {oldFullName, newFullName});
try (Connection cnn = ds.getConnection()) {
cnn.setAutoCommit(false);
- try (PreparedStatement stmt = cnn.prepareStatement("UPDATE JENKINS_JOB SET FULL_NAME = ? WHERE FULL_NAME = ? AND JENKINS_MASTER_ID = ?")) {
+ try (PreparedStatement stmt = cnn.prepareStatement(
+ "UPDATE JENKINS_JOB SET FULL_NAME = ? WHERE FULL_NAME = ? AND JENKINS_MASTER_ID = ?")) {
stmt.setString(1, newFullName);
stmt.setString(2, oldFullName);
stmt.setLong(3, getJenkinsMasterPrimaryKey(cnn));
int count = stmt.executeUpdate();
- LOGGER.log(Level.FINE, "renameJob({0}, {1}): {2}", new Object[]{oldFullName, newFullName, count});
+ LOGGER.log(Level.FINE, "renameJob({0}, {1}): {2}", new Object[] {oldFullName, newFullName, count});
}
cnn.commit();
} catch (SQLException e) {
@@ -589,14 +684,15 @@ public void renameJob(String oldFullName, String newFullName) {
@Override
public void deleteJob(String jobFullName) {
- LOGGER.log(Level.FINER, "deleteJob({0})", new Object[]{jobFullName});
+ LOGGER.log(Level.FINER, "deleteJob({0})", new Object[] {jobFullName});
try (Connection cnn = ds.getConnection()) {
cnn.setAutoCommit(false);
- try (PreparedStatement stmt = cnn.prepareStatement("DELETE FROM JENKINS_JOB WHERE FULL_NAME = ? AND JENKINS_MASTER_ID = ?")) {
+ try (PreparedStatement stmt =
+ cnn.prepareStatement("DELETE FROM JENKINS_JOB WHERE FULL_NAME = ? AND JENKINS_MASTER_ID = ?")) {
stmt.setString(1, jobFullName);
stmt.setLong(2, getJenkinsMasterPrimaryKey(cnn));
int count = stmt.executeUpdate();
- LOGGER.log(Level.FINE, "deleteJob({0}): {1}", new Object[]{jobFullName, count});
+ LOGGER.log(Level.FINE, "deleteJob({0}): {1}", new Object[] {jobFullName, count});
}
cnn.commit();
} catch (SQLException e) {
@@ -606,13 +702,14 @@ public void deleteJob(String jobFullName) {
@Override
public void deleteBuild(String jobFullName, int buildNumber) {
- LOGGER.log(Level.FINER, "deleteBuild({0}#{1})", new Object[]{jobFullName, buildNumber});
+ LOGGER.log(Level.FINER, "deleteBuild({0}#{1})", new Object[] {jobFullName, buildNumber});
try (Connection cnn = ds.getConnection()) {
cnn.setAutoCommit(false);
Long jobPrimaryKey;
Integer lastBuildNumber;
Integer lastSuccessfulBuildNumber;
- try (PreparedStatement stmt = cnn.prepareStatement("SELECT ID, LAST_BUILD_NUMBER, LAST_SUCCESSFUL_BUILD_NUMBER FROM JENKINS_JOB WHERE FULL_NAME = ? AND JENKINS_MASTER_ID = ?")) {
+ try (PreparedStatement stmt = cnn.prepareStatement(
+ "SELECT ID, LAST_BUILD_NUMBER, LAST_SUCCESSFUL_BUILD_NUMBER FROM JENKINS_JOB WHERE FULL_NAME = ? AND JENKINS_MASTER_ID = ?")) {
stmt.setString(1, jobFullName);
stmt.setLong(2, getJenkinsMasterPrimaryKey(cnn));
try (ResultSet rst = stmt.executeQuery()) {
@@ -628,15 +725,17 @@ public void deleteBuild(String jobFullName, int buildNumber) {
}
}
if (jobPrimaryKey == null) {
- LOGGER.log(Level.FINE, "No record found for job {0}", new Object[]{jobFullName});
+ LOGGER.log(Level.FINE, "No record found for job {0}", new Object[] {jobFullName});
return;
}
if (buildNumber == lastBuildNumber || buildNumber == lastSuccessfulBuildNumber) {
Integer newLastBuildNumber = (lastBuildNumber == buildNumber) ? null : lastBuildNumber;
- Integer newLastSuccessfulBuildNumber = (lastSuccessfulBuildNumber == buildNumber) ? null : lastSuccessfulBuildNumber;
+ Integer newLastSuccessfulBuildNumber =
+ (lastSuccessfulBuildNumber == buildNumber) ? null : lastSuccessfulBuildNumber;
- try (PreparedStatement stmt = cnn.prepareStatement("SELECT JENKINS_BUILD.number, JENKINS_BUILD.result_id FROM JENKINS_BUILD WHERE JOB_ID = ? AND NUMBER != ? ORDER BY NUMBER DESC")) {
+ try (PreparedStatement stmt = cnn.prepareStatement(
+ "SELECT JENKINS_BUILD.number, JENKINS_BUILD.result_id FROM JENKINS_BUILD WHERE JOB_ID = ? AND NUMBER != ? ORDER BY NUMBER DESC")) {
stmt.setLong(1, jobPrimaryKey);
stmt.setInt(2, buildNumber);
stmt.setFetchSize(5);
@@ -645,18 +744,20 @@ public void deleteBuild(String jobFullName, int buildNumber) {
int currentBuildNumber = rst.getInt("number");
int currentBuildResultId = rst.getInt("result_id");
- if(newLastBuildNumber == null) {
+ if (newLastBuildNumber == null) {
newLastBuildNumber = currentBuildNumber;
}
- if (newLastSuccessfulBuildNumber == null && Result.SUCCESS.ordinal == currentBuildResultId) {
+ if (newLastSuccessfulBuildNumber == null
+ && Result.SUCCESS.ordinal == currentBuildResultId) {
newLastSuccessfulBuildNumber = currentBuildNumber;
}
}
}
}
- try(PreparedStatement stmt = cnn.prepareStatement("UPDATE JENKINS_JOB SET LAST_BUILD_NUMBER = ?, LAST_SUCCESSFUL_BUILD_NUMBER = ? WHERE ID = ?")) {
+ try (PreparedStatement stmt = cnn.prepareStatement(
+ "UPDATE JENKINS_JOB SET LAST_BUILD_NUMBER = ?, LAST_SUCCESSFUL_BUILD_NUMBER = ? WHERE ID = ?")) {
stmt.setInt(1, newLastBuildNumber);
stmt.setInt(2, newLastSuccessfulBuildNumber);
stmt.setLong(3, jobPrimaryKey);
@@ -664,11 +765,12 @@ public void deleteBuild(String jobFullName, int buildNumber) {
}
}
- try (PreparedStatement stmt = cnn.prepareStatement("DELETE FROM JENKINS_BUILD WHERE JOB_ID = ? AND NUMBER = ?")) {
+ try (PreparedStatement stmt =
+ cnn.prepareStatement("DELETE FROM JENKINS_BUILD WHERE JOB_ID = ? AND NUMBER = ?")) {
stmt.setLong(1, jobPrimaryKey);
stmt.setInt(2, buildNumber);
int count = stmt.executeUpdate();
- LOGGER.log(Level.FINE, "deleteJob({0}#{1}): {2}", new Object[]{jobFullName, buildNumber, count});
+ LOGGER.log(Level.FINE, "deleteJob({0}#{1}): {2}", new Object[] {jobFullName, buildNumber, count});
}
cnn.commit();
} catch (SQLException e) {
@@ -680,10 +782,11 @@ public void deleteBuild(String jobFullName, int buildNumber) {
public void cleanup() {
try (Connection cnn = ds.getConnection()) {
cnn.setAutoCommit(false);
- String sql = "DELETE FROM MAVEN_ARTIFACT WHERE ID NOT IN (SELECT DISTINCT ARTIFACT_ID FROM MAVEN_DEPENDENCY UNION SELECT DISTINCT ARTIFACT_ID FROM GENERATED_MAVEN_ARTIFACT)";
+ String sql =
+ "DELETE FROM MAVEN_ARTIFACT WHERE ID NOT IN (SELECT DISTINCT ARTIFACT_ID FROM MAVEN_DEPENDENCY UNION SELECT DISTINCT ARTIFACT_ID FROM GENERATED_MAVEN_ARTIFACT)";
try (Statement stmt = cnn.createStatement()) {
int count = stmt.executeUpdate(sql);
- LOGGER.log(Level.FINE, "cleanup(): {0}", new Object[]{count});
+ LOGGER.log(Level.FINE, "cleanup(): {0}", new Object[] {count});
}
cnn.commit();
} catch (SQLException e) {
@@ -696,7 +799,8 @@ protected synchronized long getOrCreateBuildPrimaryKey(String jobFullName, int b
cnn.setAutoCommit(false);
Long jobPrimaryKey = null;
- try (PreparedStatement stmt = cnn.prepareStatement("SELECT ID FROM JENKINS_JOB WHERE FULL_NAME = ? AND JENKINS_MASTER_ID = ?")) {
+ try (PreparedStatement stmt =
+ cnn.prepareStatement("SELECT ID FROM JENKINS_JOB WHERE FULL_NAME = ? AND JENKINS_MASTER_ID = ?")) {
stmt.setString(1, jobFullName);
stmt.setLong(2, getJenkinsMasterPrimaryKey(cnn));
try (ResultSet rst = stmt.executeQuery()) {
@@ -706,7 +810,9 @@ protected synchronized long getOrCreateBuildPrimaryKey(String jobFullName, int b
}
}
if (jobPrimaryKey == null) {
- try (PreparedStatement stmt = cnn.prepareStatement("INSERT INTO JENKINS_JOB(FULL_NAME, JENKINS_MASTER_ID) VALUES (?, ?)", Statement.RETURN_GENERATED_KEYS)) {
+ try (PreparedStatement stmt = cnn.prepareStatement(
+ "INSERT INTO JENKINS_JOB(FULL_NAME, JENKINS_MASTER_ID) VALUES (?, ?)",
+ Statement.RETURN_GENERATED_KEYS)) {
stmt.setString(1, jobFullName);
stmt.setLong(2, getJenkinsMasterPrimaryKey(cnn));
stmt.execute();
@@ -714,7 +820,8 @@ protected synchronized long getOrCreateBuildPrimaryKey(String jobFullName, int b
}
}
Long buildPrimaryKey = null;
- try (PreparedStatement stmt = cnn.prepareStatement("SELECT ID FROM JENKINS_BUILD WHERE JOB_ID=? AND NUMBER=?")) {
+ try (PreparedStatement stmt =
+ cnn.prepareStatement("SELECT ID FROM JENKINS_BUILD WHERE JOB_ID=? AND NUMBER=?")) {
stmt.setLong(1, jobPrimaryKey);
stmt.setInt(2, buildNumber);
try (ResultSet rst = stmt.executeQuery()) {
@@ -725,7 +832,8 @@ protected synchronized long getOrCreateBuildPrimaryKey(String jobFullName, int b
}
if (buildPrimaryKey == null) {
- try (PreparedStatement stmt = cnn.prepareStatement("INSERT INTO JENKINS_BUILD(JOB_ID, NUMBER) VALUES (?, ?)", Statement.RETURN_GENERATED_KEYS)) {
+ try (PreparedStatement stmt = cnn.prepareStatement(
+ "INSERT INTO JENKINS_BUILD(JOB_ID, NUMBER) VALUES (?, ?)", Statement.RETURN_GENERATED_KEYS)) {
stmt.setLong(1, jobPrimaryKey);
stmt.setInt(2, buildNumber);
stmt.execute();
@@ -751,14 +859,21 @@ protected Long getGeneratedPrimaryKey(PreparedStatement stmt, String column) thr
return jobPrimaryKey;
}
- protected long getOrCreateArtifactPrimaryKey(@NonNull String groupId, @NonNull String artifactId, @NonNull String version, @NonNull String type, @Nullable String classifier) {
+ protected long getOrCreateArtifactPrimaryKey(
+ @NonNull String groupId,
+ @NonNull String artifactId,
+ @NonNull String version,
+ @NonNull String type,
+ @Nullable String classifier) {
try (Connection cnn = ds.getConnection()) {
cnn.setAutoCommit(false);
// get or create build record
Long artifactPrimaryKey = null;
if (classifier == null) {
- // For an unknown reason, "where classifier = null" does not work as expected when "where classifier is null" does
- try (PreparedStatement stmt = cnn.prepareStatement("SELECT ID FROM MAVEN_ARTIFACT WHERE GROUP_ID = ? AND ARTIFACT_ID = ? AND VERSION = ? AND TYPE = ? AND CLASSIFIER is NULL")) {
+ // For an unknown reason, "where classifier = null" does not work as expected when "where classifier is
+ // null" does
+ try (PreparedStatement stmt = cnn.prepareStatement(
+ "SELECT ID FROM MAVEN_ARTIFACT WHERE GROUP_ID = ? AND ARTIFACT_ID = ? AND VERSION = ? AND TYPE = ? AND CLASSIFIER is NULL")) {
stmt.setString(1, groupId);
stmt.setString(2, artifactId);
stmt.setString(3, version);
@@ -771,7 +886,8 @@ protected long getOrCreateArtifactPrimaryKey(@NonNull String groupId, @NonNull S
}
}
} else {
- try (PreparedStatement stmt = cnn.prepareStatement("SELECT ID FROM MAVEN_ARTIFACT WHERE GROUP_ID = ? AND ARTIFACT_ID = ? AND VERSION = ? AND TYPE = ? AND CLASSIFIER = ?")) {
+ try (PreparedStatement stmt = cnn.prepareStatement(
+ "SELECT ID FROM MAVEN_ARTIFACT WHERE GROUP_ID = ? AND ARTIFACT_ID = ? AND VERSION = ? AND TYPE = ? AND CLASSIFIER = ?")) {
stmt.setString(1, groupId);
stmt.setString(2, artifactId);
stmt.setString(3, version);
@@ -787,7 +903,9 @@ protected long getOrCreateArtifactPrimaryKey(@NonNull String groupId, @NonNull S
}
if (artifactPrimaryKey == null) {
- try (PreparedStatement stmt = cnn.prepareStatement("INSERT INTO MAVEN_ARTIFACT(GROUP_ID, ARTIFACT_ID, VERSION, TYPE, CLASSIFIER) VALUES (?, ?, ?, ?, ?)", Statement.RETURN_GENERATED_KEYS)) {
+ try (PreparedStatement stmt = cnn.prepareStatement(
+ "INSERT INTO MAVEN_ARTIFACT(GROUP_ID, ARTIFACT_ID, VERSION, TYPE, CLASSIFIER) VALUES (?, ?, ?, ?, ?)",
+ Statement.RETURN_GENERATED_KEYS)) {
stmt.setString(1, groupId);
stmt.setString(2, artifactId);
stmt.setString(3, version);
@@ -810,7 +928,9 @@ protected synchronized void initializeDatabase() {
cnn.setAutoCommit(false);
int initialSchemaVersion = getSchemaVersion(cnn);
- LOGGER.log(Level.FINE, "Initialise database. Current schema version: {0}", new Object[]{initialSchemaVersion});
+ LOGGER.log(
+ Level.FINE, "Initialise database. Current schema version: {0}", new Object[] {initialSchemaVersion
+ });
NumberFormat numberFormat = new DecimalFormat("00");
int idx = initialSchemaVersion;
@@ -837,20 +957,27 @@ protected synchronized void initializeDatabase() {
LOGGER.log(Level.FINER, "Execute command {0}", sqlCommand);
stmt.execute(sqlCommand);
} catch (SQLException e) {
- LOGGER.log(Level.SEVERE, "Failed to run SQL {0} from script {1}: {2}", new Object[] {sqlCommand, sqlScriptPath, e.getMessage()});
+ LOGGER.log(Level.SEVERE, "Failed to run SQL {0} from script {1}: {2}", new Object[] {
+ sqlCommand, sqlScriptPath, e.getMessage()
+ });
handleDatabaseInitialisationException(e);
}
}
}
- String className = "org.jenkinsci.plugins.pipeline.maven.db.migration." + getJdbcScheme() + ".MigrationStep" + idx;
+ String className = "org.jenkinsci.plugins.pipeline.maven.db.migration." + getJdbcScheme()
+ + ".MigrationStep" + idx;
try {
- MigrationStep migrationStep = (MigrationStep) Class.forName(className).newInstance();
- LOGGER.log(Level.FINE, "Execute database migration step {0}", migrationStep.getClass().getName());
+ MigrationStep migrationStep =
+ (MigrationStep) Class.forName(className).newInstance();
+ LOGGER.log(
+ Level.FINE,
+ "Execute database migration step {0}",
+ migrationStep.getClass().getName());
migrationStep.execute(cnn, getJenkinsDetails());
} catch (ClassNotFoundException e) {
// no migration class found, just a migration script
- LOGGER.log(Level.FINER, "Migration step {0} not found", new Object[]{className});
+ LOGGER.log(Level.FINER, "Migration step {0} not found", new Object[] {className});
} catch (Exception e) {
cnn.rollback();
throw new RuntimeException(e);
@@ -862,15 +989,18 @@ protected synchronized void initializeDatabase() {
if (newSchemaVersion == 0) {
// https://issues.jenkins-ci.org/browse/JENKINS-46577
- throw new IllegalStateException("Failure to load database DDL files. " +
- "Files 'sql/" + getJdbcScheme() + "/xxx_migration.sql' NOT found in the Thread Context Class Loader. " +
- " Pipeline Maven Plugin may be installed in an unsupported manner " +
- "(thread.contextClassLoader: " + Thread.currentThread().getContextClassLoader() + ", "
+ throw new IllegalStateException("Failure to load database DDL files. " + "Files 'sql/"
+ + getJdbcScheme() + "/xxx_migration.sql' NOT found in the Thread Context Class Loader. "
+ + " Pipeline Maven Plugin may be installed in an unsupported manner "
+ + "(thread.contextClassLoader: "
+ + Thread.currentThread().getContextClassLoader() + ", "
+ "classLoader: " + ClassUtils.class.getClassLoader() + ")");
} else if (newSchemaVersion == initialSchemaVersion) {
// no migration was needed
} else {
- LOGGER.log(Level.INFO, "Database successfully migrated from version {0} to version {1}", new Object[]{initialSchemaVersion, newSchemaVersion});
+ LOGGER.log(Level.INFO, "Database successfully migrated from version {0} to version {1}", new Object[] {
+ initialSchemaVersion, newSchemaVersion
+ });
}
} catch (SQLException e) {
throw new RuntimeSqlException(e);
@@ -920,13 +1050,19 @@ protected int getSchemaVersion(Connection cnn) throws SQLException {
*/
protected synchronized void testDatabase() throws RuntimeSqlException {
try (Connection cnn = ds.getConnection()) {
- List tables = Arrays.asList("MAVEN_ARTIFACT", "JENKINS_JOB", "JENKINS_BUILD", "MAVEN_DEPENDENCY", "GENERATED_MAVEN_ARTIFACT", "MAVEN_PARENT_PROJECT");
+ List tables = Arrays.asList(
+ "MAVEN_ARTIFACT",
+ "JENKINS_JOB",
+ "JENKINS_BUILD",
+ "MAVEN_DEPENDENCY",
+ "GENERATED_MAVEN_ARTIFACT",
+ "MAVEN_PARENT_PROJECT");
for (String table : tables) {
try (Statement stmt = cnn.createStatement()) {
try (ResultSet rst = stmt.executeQuery("SELECT count(*) FROM " + table)) {
if (rst.next()) {
int count = rst.getInt(1);
- LOGGER.log(Level.FINE, "Table {0}: {1} rows", new Object[]{table, count});
+ LOGGER.log(Level.FINE, "Table {0}: {1} rows", new Object[] {table, count});
} else {
throw new IllegalStateException("Exception testing table '" + table + "'");
}
@@ -954,15 +1090,25 @@ public List listDownstreamJobs(@NonNull String jobFullName, int buildNum
@NonNull
@Override
- public Map> listDownstreamJobsByArtifact(@NonNull String jobFullName, int buildNumber) {
- Map> downstreamJobsByArtifactBasedOnMavenDependencies = listDownstreamJobsByArtifactBasedOnMavenDependencies(jobFullName, buildNumber);
- LOGGER.log(Level.FINER, "Got downstreamJobsByArtifactBasedOnMavenDependencies for job named {0} and build #{1}: {2}", new Object[]{jobFullName, buildNumber, downstreamJobsByArtifactBasedOnMavenDependencies});
- Map> downstreamJobsByArtifactBasedOnParentProjectDependencies = listDownstreamJobsByArtifactBasedOnParentProjectDependencies(jobFullName, buildNumber);
- LOGGER.log(Level.FINER, "Got downstreamJobsByArtifactBasedOnParentProjectDependencies for job named {0} and build #{1}: {2}", new Object[]{jobFullName, buildNumber, downstreamJobsByArtifactBasedOnParentProjectDependencies});
+ public Map> listDownstreamJobsByArtifact(
+ @NonNull String jobFullName, int buildNumber) {
+ Map> downstreamJobsByArtifactBasedOnMavenDependencies =
+ listDownstreamJobsByArtifactBasedOnMavenDependencies(jobFullName, buildNumber);
+ LOGGER.log(
+ Level.FINER,
+ "Got downstreamJobsByArtifactBasedOnMavenDependencies for job named {0} and build #{1}: {2}",
+ new Object[] {jobFullName, buildNumber, downstreamJobsByArtifactBasedOnMavenDependencies});
+ Map> downstreamJobsByArtifactBasedOnParentProjectDependencies =
+ listDownstreamJobsByArtifactBasedOnParentProjectDependencies(jobFullName, buildNumber);
+ LOGGER.log(
+ Level.FINER,
+ "Got downstreamJobsByArtifactBasedOnParentProjectDependencies for job named {0} and build #{1}: {2}",
+ new Object[] {jobFullName, buildNumber, downstreamJobsByArtifactBasedOnParentProjectDependencies});
Map> results = new HashMap<>(downstreamJobsByArtifactBasedOnMavenDependencies);
- for(Entry> entry: downstreamJobsByArtifactBasedOnParentProjectDependencies.entrySet()) {
+ for (Entry> entry :
+ downstreamJobsByArtifactBasedOnParentProjectDependencies.entrySet()) {
MavenArtifact mavenArtifact = entry.getKey();
if (results.containsKey(mavenArtifact)) {
results.get(mavenArtifact).addAll(entry.getValue());
@@ -970,16 +1116,22 @@ public Map> listDownstreamJobsByArtifact(@NonNu
results.put(mavenArtifact, new TreeSet<>(entry.getValue()));
}
}
- LOGGER.log(Level.FINER, "Got results for job named {0} and build #{1}: {2}", new Object[]{jobFullName, buildNumber, results});
+ LOGGER.log(Level.FINER, "Got results for job named {0} and build #{1}: {2}", new Object[] {
+ jobFullName, buildNumber, results
+ });
// JENKINS-50507 Don't return the passed job in case of pipelines consuming the artifacts they produce
- for (Iterator>> it = results.entrySet().iterator(); it.hasNext();) {
+ for (Iterator>> it =
+ results.entrySet().iterator();
+ it.hasNext(); ) {
Entry> entry = it.next();
MavenArtifact mavenArtifact = entry.getKey();
SortedSet jobs = entry.getValue();
boolean removed = jobs.remove(jobFullName);
if (removed) {
- LOGGER.log(Level.FINER, "Remove {0} from downstreamJobs of artifact {1}", new Object[]{jobFullName, mavenArtifact});
+ LOGGER.log(Level.FINER, "Remove {0} from downstreamJobs of artifact {1}", new Object[] {
+ jobFullName, mavenArtifact
+ });
if (jobs.isEmpty()) {
it.remove();
}
@@ -991,24 +1143,37 @@ public Map> listDownstreamJobsByArtifact(@NonNu
@NonNull
@Override
- public SortedSet listDownstreamJobs(@NonNull String groupId, @NonNull String artifactId, @NonNull String version, @Nullable String baseVersion, @NonNull String type, @Nullable String classifier) {
- return listDownstreamPipelinesBasedOnMavenDependencies(groupId, artifactId, (baseVersion == null ? version : baseVersion), type, classifier);
+ public SortedSet listDownstreamJobs(
+ @NonNull String groupId,
+ @NonNull String artifactId,
+ @NonNull String version,
+ @Nullable String baseVersion,
+ @NonNull String type,
+ @Nullable String classifier) {
+ return listDownstreamPipelinesBasedOnMavenDependencies(
+ groupId, artifactId, (baseVersion == null ? version : baseVersion), type, classifier);
}
- protected SortedSet listDownstreamPipelinesBasedOnMavenDependencies(@NonNull String groupId, @NonNull String artifactId, @NonNull String version, @NonNull String type, @Nullable String classifier) {
- LOGGER.log(Level.FINER, "listDownstreamPipelinesBasedOnMavenDependencies({0}:{1}:{2}:{3}:{4})", new Object[]{groupId, artifactId, version, type, classifier});
-
- String sql = "select distinct downstream_job.full_name \n" +
- "from MAVEN_ARTIFACT \n" +
- "inner join MAVEN_DEPENDENCY on (MAVEN_DEPENDENCY.artifact_id = MAVEN_ARTIFACT.id and MAVEN_DEPENDENCY.ignore_upstream_triggers = false) \n" +
- "inner join JENKINS_BUILD as downstream_build on MAVEN_DEPENDENCY.build_id = downstream_build.id \n" +
- "inner join JENKINS_JOB as downstream_job on (downstream_build.number = downstream_job.last_successful_build_number and downstream_build.job_id = downstream_job.id) \n" +
- "where MAVEN_ARTIFACT.group_id = ? " +
- "and MAVEN_ARTIFACT.artifact_id = ? " +
- "and MAVEN_ARTIFACT.version = ? " +
- "and MAVEN_ARTIFACT.type = ? " +
- "and (MAVEN_ARTIFACT.classifier = ? or (MAVEN_ARTIFACT.classifier is null and ? is null)) " +
- "and downstream_job.jenkins_master_id = ?";
+ protected SortedSet listDownstreamPipelinesBasedOnMavenDependencies(
+ @NonNull String groupId,
+ @NonNull String artifactId,
+ @NonNull String version,
+ @NonNull String type,
+ @Nullable String classifier) {
+ LOGGER.log(Level.FINER, "listDownstreamPipelinesBasedOnMavenDependencies({0}:{1}:{2}:{3}:{4})", new Object[] {
+ groupId, artifactId, version, type, classifier
+ });
+
+ String sql = "select distinct downstream_job.full_name \n" + "from MAVEN_ARTIFACT \n"
+ + "inner join MAVEN_DEPENDENCY on (MAVEN_DEPENDENCY.artifact_id = MAVEN_ARTIFACT.id and MAVEN_DEPENDENCY.ignore_upstream_triggers = false) \n"
+ + "inner join JENKINS_BUILD as downstream_build on MAVEN_DEPENDENCY.build_id = downstream_build.id \n"
+ + "inner join JENKINS_JOB as downstream_job on (downstream_build.number = downstream_job.last_successful_build_number and downstream_build.job_id = downstream_job.id) \n"
+ + "where MAVEN_ARTIFACT.group_id = ? "
+ + "and MAVEN_ARTIFACT.artifact_id = ? "
+ + "and MAVEN_ARTIFACT.version = ? "
+ + "and MAVEN_ARTIFACT.type = ? "
+ + "and (MAVEN_ARTIFACT.classifier = ? or (MAVEN_ARTIFACT.classifier is null and ? is null)) "
+ + "and downstream_job.jenkins_master_id = ?";
SortedSet downstreamJobsFullNames = new TreeSet<>();
@@ -1030,27 +1195,31 @@ protected SortedSet listDownstreamPipelinesBasedOnMavenDependencies(@Non
} catch (SQLException e) {
throw new RuntimeSqlException(e);
}
- LOGGER.log(Level.FINER, "listDownstreamPipelinesBasedOnMavenDependencies({0}:{1}:{2}:{3}): {4}", new Object[]{groupId, artifactId, version, type, downstreamJobsFullNames});
+ LOGGER.log(Level.FINER, "listDownstreamPipelinesBasedOnMavenDependencies({0}:{1}:{2}:{3}): {4}", new Object[] {
+ groupId, artifactId, version, type, downstreamJobsFullNames
+ });
return downstreamJobsFullNames;
}
- @Deprecated
- protected List listDownstreamPipelinesBasedOnMavenDependencies(@NonNull String jobFullName, int buildNumber) {
- LOGGER.log(Level.FINER, "listDownstreamJobs({0}, {1})", new Object[]{jobFullName, buildNumber});
-
- String sql = "select distinct downstream_job.full_name \n" +
- "from JENKINS_JOB as upstream_job \n" +
- "inner join JENKINS_BUILD as upstream_build on upstream_job.id = upstream_build.job_id \n" +
- "inner join GENERATED_MAVEN_ARTIFACT on (upstream_build.id = GENERATED_MAVEN_ARTIFACT.build_id and GENERATED_MAVEN_ARTIFACT.skip_downstream_triggers = false) \n" +
- "inner join MAVEN_ARTIFACT on GENERATED_MAVEN_ARTIFACT.artifact_id = MAVEN_ARTIFACT.id \n" +
- "inner join MAVEN_DEPENDENCY on (MAVEN_DEPENDENCY.artifact_id = MAVEN_ARTIFACT.id and MAVEN_DEPENDENCY.ignore_upstream_triggers = false) \n" +
- "inner join JENKINS_BUILD as downstream_build on MAVEN_DEPENDENCY.build_id = downstream_build.id \n" +
- "inner join JENKINS_JOB as downstream_job on (downstream_build.number = downstream_job.last_successful_build_number and downstream_build.job_id = downstream_job.id) \n" +
- "where upstream_job.full_name = ? and upstream_job.jenkins_master_id = ? and upstream_build.number = ? and downstream_job.jenkins_master_id = ?";
+ @Deprecated
+ protected List listDownstreamPipelinesBasedOnMavenDependencies(
+ @NonNull String jobFullName, int buildNumber) {
+ LOGGER.log(Level.FINER, "listDownstreamJobs({0}, {1})", new Object[] {jobFullName, buildNumber});
+
+ String sql = "select distinct downstream_job.full_name \n" + "from JENKINS_JOB as upstream_job \n"
+ + "inner join JENKINS_BUILD as upstream_build on upstream_job.id = upstream_build.job_id \n"
+ + "inner join GENERATED_MAVEN_ARTIFACT on (upstream_build.id = GENERATED_MAVEN_ARTIFACT.build_id and GENERATED_MAVEN_ARTIFACT.skip_downstream_triggers = false) \n"
+ + "inner join MAVEN_ARTIFACT on GENERATED_MAVEN_ARTIFACT.artifact_id = MAVEN_ARTIFACT.id \n"
+ + "inner join MAVEN_DEPENDENCY on (MAVEN_DEPENDENCY.artifact_id = MAVEN_ARTIFACT.id and MAVEN_DEPENDENCY.ignore_upstream_triggers = false) \n"
+ + "inner join JENKINS_BUILD as downstream_build on MAVEN_DEPENDENCY.build_id = downstream_build.id \n"
+ + "inner join JENKINS_JOB as downstream_job on (downstream_build.number = downstream_job.last_successful_build_number and downstream_build.job_id = downstream_job.id) \n"
+ + "where upstream_job.full_name = ? and upstream_job.jenkins_master_id = ? and upstream_build.number = ? and downstream_job.jenkins_master_id = ?";
List downstreamJobsFullNames = new ArrayList<>();
- LOGGER.log(Level.FINER, "sql: {0}, jobFullName:{1}, buildNumber: {2}", new Object[]{sql, jobFullName, buildNumber});
+ LOGGER.log(
+ Level.FINER, "sql: {0}, jobFullName:{1}, buildNumber: {2}", new Object[] {sql, jobFullName, buildNumber
+ });
try (Connection cnn = ds.getConnection()) {
try (PreparedStatement stmt = cnn.prepareStatement(sql)) {
@@ -1067,28 +1236,34 @@ protected List listDownstreamPipelinesBasedOnMavenDependencies(@NonNull
} catch (SQLException e) {
throw new RuntimeSqlException(e);
}
- LOGGER.log(Level.FINE, "listDownstreamJobs({0}, {1}): {2}", new Object[]{jobFullName, buildNumber, downstreamJobsFullNames});
+ LOGGER.log(Level.FINE, "listDownstreamJobs({0}, {1}): {2}", new Object[] {
+ jobFullName, buildNumber, downstreamJobsFullNames
+ });
return downstreamJobsFullNames;
}
- protected Map> listDownstreamJobsByArtifactBasedOnMavenDependencies(@NonNull String jobFullName, int buildNumber) {
- LOGGER.log(Level.FINER, "listDownstreamJobsByArtifactBasedOnMavenDependencies({0}, {1})", new Object[]{jobFullName, buildNumber});
-
-
- String sql = "select distinct downstream_job.full_name, \n " +
- " MAVEN_ARTIFACT.group_id, MAVEN_ARTIFACT.artifact_id, MAVEN_ARTIFACT.version as base_version, MAVEN_ARTIFACT.type, MAVEN_ARTIFACT.classifier, \n" +
- " GENERATED_MAVEN_ARTIFACT.version as version, GENERATED_MAVEN_ARTIFACT.extension \n" +
- "from JENKINS_JOB as upstream_job \n" +
- "inner join JENKINS_BUILD as upstream_build on upstream_job.id = upstream_build.job_id \n" +
- "inner join GENERATED_MAVEN_ARTIFACT on (upstream_build.id = GENERATED_MAVEN_ARTIFACT.build_id and GENERATED_MAVEN_ARTIFACT.skip_downstream_triggers = false) \n" +
- "inner join MAVEN_ARTIFACT on GENERATED_MAVEN_ARTIFACT.artifact_id = MAVEN_ARTIFACT.id \n" +
- "inner join MAVEN_DEPENDENCY on (MAVEN_DEPENDENCY.artifact_id = MAVEN_ARTIFACT.id and MAVEN_DEPENDENCY.ignore_upstream_triggers = false) \n" +
- "inner join JENKINS_BUILD as downstream_build on MAVEN_DEPENDENCY.build_id = downstream_build.id \n" +
- "inner join JENKINS_JOB as downstream_job on (downstream_build.number = downstream_job.last_successful_build_number and downstream_build.job_id = downstream_job.id) \n" +
- "where upstream_job.full_name = ? and upstream_job.jenkins_master_id = ? and upstream_build.number = ? and downstream_job.jenkins_master_id = ?";
-
- LOGGER.log(Level.FINER, "sql: {0}, jobFullName:{1}, buildNumber: {2}", new Object[]{sql, jobFullName, buildNumber});
+ protected Map> listDownstreamJobsByArtifactBasedOnMavenDependencies(
+ @NonNull String jobFullName, int buildNumber) {
+ LOGGER.log(Level.FINER, "listDownstreamJobsByArtifactBasedOnMavenDependencies({0}, {1})", new Object[] {
+ jobFullName, buildNumber
+ });
+
+ String sql = "select distinct downstream_job.full_name, \n "
+ + " MAVEN_ARTIFACT.group_id, MAVEN_ARTIFACT.artifact_id, MAVEN_ARTIFACT.version as base_version, MAVEN_ARTIFACT.type, MAVEN_ARTIFACT.classifier, \n"
+ + " GENERATED_MAVEN_ARTIFACT.version as version, GENERATED_MAVEN_ARTIFACT.extension \n"
+ + "from JENKINS_JOB as upstream_job \n"
+ + "inner join JENKINS_BUILD as upstream_build on upstream_job.id = upstream_build.job_id \n"
+ + "inner join GENERATED_MAVEN_ARTIFACT on (upstream_build.id = GENERATED_MAVEN_ARTIFACT.build_id and GENERATED_MAVEN_ARTIFACT.skip_downstream_triggers = false) \n"
+ + "inner join MAVEN_ARTIFACT on GENERATED_MAVEN_ARTIFACT.artifact_id = MAVEN_ARTIFACT.id \n"
+ + "inner join MAVEN_DEPENDENCY on (MAVEN_DEPENDENCY.artifact_id = MAVEN_ARTIFACT.id and MAVEN_DEPENDENCY.ignore_upstream_triggers = false) \n"
+ + "inner join JENKINS_BUILD as downstream_build on MAVEN_DEPENDENCY.build_id = downstream_build.id \n"
+ + "inner join JENKINS_JOB as downstream_job on (downstream_build.number = downstream_job.last_successful_build_number and downstream_build.job_id = downstream_job.id) \n"
+ + "where upstream_job.full_name = ? and upstream_job.jenkins_master_id = ? and upstream_build.number = ? and downstream_job.jenkins_master_id = ?";
+
+ LOGGER.log(
+ Level.FINER, "sql: {0}, jobFullName:{1}, buildNumber: {2}", new Object[] {sql, jobFullName, buildNumber
+ });
Map> results = new HashMap<>();
try (Connection cnn = ds.getConnection()) {
@@ -1116,27 +1291,32 @@ protected Map> listDownstreamJobsByArtifactBase
} catch (SQLException e) {
throw new RuntimeSqlException(e);
}
- LOGGER.log(Level.FINE, "listDownstreamJobsByArtifactBasedOnMavenDependencies({0}, {1}): {2}", new Object[]{jobFullName, buildNumber, results});
+ LOGGER.log(Level.FINE, "listDownstreamJobsByArtifactBasedOnMavenDependencies({0}, {1}): {2}", new Object[] {
+ jobFullName, buildNumber, results
+ });
return results;
}
-
@Deprecated
- protected List listDownstreamPipelinesBasedOnParentProjectDependencies(@NonNull String jobFullName, int buildNumber) {
- LOGGER.log(Level.FINER, "listDownstreamPipelinesBasedOnParentProjectDependencies({0}, {1})", new Object[]{jobFullName, buildNumber});
- String sql = "select distinct downstream_job.full_name \n" +
- "from JENKINS_JOB as upstream_job \n" +
- "inner join JENKINS_BUILD as upstream_build on upstream_job.id = upstream_build.job_id \n" +
- "inner join GENERATED_MAVEN_ARTIFACT on (upstream_build.id = GENERATED_MAVEN_ARTIFACT.build_id and GENERATED_MAVEN_ARTIFACT.skip_downstream_triggers = false) \n" +
- "inner join MAVEN_ARTIFACT on GENERATED_MAVEN_ARTIFACT.artifact_id = MAVEN_ARTIFACT.id \n" +
- "inner join MAVEN_PARENT_PROJECT on (MAVEN_PARENT_PROJECT.artifact_id = MAVEN_ARTIFACT.id and MAVEN_PARENT_PROJECT.ignore_upstream_triggers = false) \n" +
- "inner join JENKINS_BUILD as downstream_build on MAVEN_PARENT_PROJECT.build_id = downstream_build.id \n" +
- "inner join JENKINS_JOB as downstream_job on (downstream_build.number = downstream_job.last_successful_build_number and downstream_build.job_id = downstream_job.id) \n" +
- "where upstream_job.full_name = ? and upstream_job.jenkins_master_id = ? and upstream_build.number = ? and downstream_job.jenkins_master_id = ?";
+ protected List listDownstreamPipelinesBasedOnParentProjectDependencies(
+ @NonNull String jobFullName, int buildNumber) {
+ LOGGER.log(Level.FINER, "listDownstreamPipelinesBasedOnParentProjectDependencies({0}, {1})", new Object[] {
+ jobFullName, buildNumber
+ });
+ String sql = "select distinct downstream_job.full_name \n" + "from JENKINS_JOB as upstream_job \n"
+ + "inner join JENKINS_BUILD as upstream_build on upstream_job.id = upstream_build.job_id \n"
+ + "inner join GENERATED_MAVEN_ARTIFACT on (upstream_build.id = GENERATED_MAVEN_ARTIFACT.build_id and GENERATED_MAVEN_ARTIFACT.skip_downstream_triggers = false) \n"
+ + "inner join MAVEN_ARTIFACT on GENERATED_MAVEN_ARTIFACT.artifact_id = MAVEN_ARTIFACT.id \n"
+ + "inner join MAVEN_PARENT_PROJECT on (MAVEN_PARENT_PROJECT.artifact_id = MAVEN_ARTIFACT.id and MAVEN_PARENT_PROJECT.ignore_upstream_triggers = false) \n"
+ + "inner join JENKINS_BUILD as downstream_build on MAVEN_PARENT_PROJECT.build_id = downstream_build.id \n"
+ + "inner join JENKINS_JOB as downstream_job on (downstream_build.number = downstream_job.last_successful_build_number and downstream_build.job_id = downstream_job.id) \n"
+ + "where upstream_job.full_name = ? and upstream_job.jenkins_master_id = ? and upstream_build.number = ? and downstream_job.jenkins_master_id = ?";
List downstreamJobsFullNames = new ArrayList<>();
- LOGGER.log(Level.FINER, "sql: {0}, jobFullName:{1}, buildNumber: {2}", new Object[]{sql, jobFullName, buildNumber});
+ LOGGER.log(
+ Level.FINER, "sql: {0}, jobFullName:{1}, buildNumber: {2}", new Object[] {sql, jobFullName, buildNumber
+ });
try (Connection cnn = ds.getConnection()) {
try (PreparedStatement stmt = cnn.prepareStatement(sql)) {
@@ -1153,27 +1333,33 @@ protected List listDownstreamPipelinesBasedOnParentProjectDependencies(@
} catch (SQLException e) {
throw new RuntimeSqlException(e);
}
- LOGGER.log(Level.FINE, "listDownstreamPipelinesBasedOnParentProjectDependencies({0}, {1}): {2}", new Object[]{jobFullName, buildNumber, downstreamJobsFullNames});
+ LOGGER.log(Level.FINE, "listDownstreamPipelinesBasedOnParentProjectDependencies({0}, {1}): {2}", new Object[] {
+ jobFullName, buildNumber, downstreamJobsFullNames
+ });
return downstreamJobsFullNames;
}
-
- protected Map> listDownstreamJobsByArtifactBasedOnParentProjectDependencies(String jobFullName, int buildNumber) {
- LOGGER.log(Level.FINER, "listDownstreamPipelinesBasedOnParentProjectDependencies({0}, {1})", new Object[]{jobFullName, buildNumber});
- String sql = "select distinct downstream_job.full_name, \n" +
- " MAVEN_ARTIFACT.group_id, MAVEN_ARTIFACT.artifact_id, MAVEN_ARTIFACT.version as base_version, MAVEN_ARTIFACT.type, MAVEN_ARTIFACT.classifier, \n" +
- " GENERATED_MAVEN_ARTIFACT.version as version, GENERATED_MAVEN_ARTIFACT.extension \n" +
- "from JENKINS_JOB as upstream_job \n" +
- "inner join JENKINS_BUILD as upstream_build on upstream_job.id = upstream_build.job_id \n" +
- "inner join GENERATED_MAVEN_ARTIFACT on (upstream_build.id = GENERATED_MAVEN_ARTIFACT.build_id and GENERATED_MAVEN_ARTIFACT.skip_downstream_triggers = false) \n" +
- "inner join MAVEN_ARTIFACT on GENERATED_MAVEN_ARTIFACT.artifact_id = MAVEN_ARTIFACT.id \n" +
- "inner join MAVEN_PARENT_PROJECT on (MAVEN_PARENT_PROJECT.artifact_id = MAVEN_ARTIFACT.id and MAVEN_PARENT_PROJECT.ignore_upstream_triggers = false) \n" +
- "inner join JENKINS_BUILD as downstream_build on MAVEN_PARENT_PROJECT.build_id = downstream_build.id \n" +
- "inner join JENKINS_JOB as downstream_job on (downstream_build.number = downstream_job.last_successful_build_number and downstream_build.job_id = downstream_job.id) \n" +
- "where upstream_job.full_name = ? and upstream_job.jenkins_master_id = ? and upstream_build.number = ? and downstream_job.jenkins_master_id = ?";
-
- LOGGER.log(Level.FINER, "sql: {0}, jobFullName:{1}, buildNumber: {2}", new Object[]{sql, jobFullName, buildNumber});
+ protected Map> listDownstreamJobsByArtifactBasedOnParentProjectDependencies(
+ String jobFullName, int buildNumber) {
+ LOGGER.log(Level.FINER, "listDownstreamPipelinesBasedOnParentProjectDependencies({0}, {1})", new Object[] {
+ jobFullName, buildNumber
+ });
+ String sql = "select distinct downstream_job.full_name, \n"
+ + " MAVEN_ARTIFACT.group_id, MAVEN_ARTIFACT.artifact_id, MAVEN_ARTIFACT.version as base_version, MAVEN_ARTIFACT.type, MAVEN_ARTIFACT.classifier, \n"
+ + " GENERATED_MAVEN_ARTIFACT.version as version, GENERATED_MAVEN_ARTIFACT.extension \n"
+ + "from JENKINS_JOB as upstream_job \n"
+ + "inner join JENKINS_BUILD as upstream_build on upstream_job.id = upstream_build.job_id \n"
+ + "inner join GENERATED_MAVEN_ARTIFACT on (upstream_build.id = GENERATED_MAVEN_ARTIFACT.build_id and GENERATED_MAVEN_ARTIFACT.skip_downstream_triggers = false) \n"
+ + "inner join MAVEN_ARTIFACT on GENERATED_MAVEN_ARTIFACT.artifact_id = MAVEN_ARTIFACT.id \n"
+ + "inner join MAVEN_PARENT_PROJECT on (MAVEN_PARENT_PROJECT.artifact_id = MAVEN_ARTIFACT.id and MAVEN_PARENT_PROJECT.ignore_upstream_triggers = false) \n"
+ + "inner join JENKINS_BUILD as downstream_build on MAVEN_PARENT_PROJECT.build_id = downstream_build.id \n"
+ + "inner join JENKINS_JOB as downstream_job on (downstream_build.number = downstream_job.last_successful_build_number and downstream_build.job_id = downstream_job.id) \n"
+ + "where upstream_job.full_name = ? and upstream_job.jenkins_master_id = ? and upstream_build.number = ? and downstream_job.jenkins_master_id = ?";
+
+ LOGGER.log(
+ Level.FINER, "sql: {0}, jobFullName:{1}, buildNumber: {2}", new Object[] {sql, jobFullName, buildNumber
+ });
Map> results = new HashMap<>();
@@ -1202,7 +1388,10 @@ protected Map> listDownstreamJobsByArtifactBase
} catch (SQLException e) {
throw new RuntimeSqlException(e);
}
- LOGGER.log(Level.FINE, "listDownstreamJobsByArtifactBasedOnParentProjectDependencies({0}, {1}): {2}", new Object[]{jobFullName, buildNumber, results});
+ LOGGER.log(
+ Level.FINE,
+ "listDownstreamJobsByArtifactBasedOnParentProjectDependencies({0}, {1}): {2}",
+ new Object[] {jobFullName, buildNumber, results});
return results;
}
@@ -1225,31 +1414,37 @@ public Map listUpstreamJobs(@NonNull String jobFullName, int bu
* @param downstreamBuildNumber
* @return
*/
- protected Map listUpstreamPipelinesBasedOnMavenDependencies(@NonNull String downstreamJobFullName, int downstreamBuildNumber) {
- LOGGER.log(Level.FINER, "listUpstreamPipelinesBasedOnMavenDependencies({0}, {1})", new Object[]{downstreamJobFullName, downstreamBuildNumber});
+ protected Map listUpstreamPipelinesBasedOnMavenDependencies(
+ @NonNull String downstreamJobFullName, int downstreamBuildNumber) {
+ LOGGER.log(Level.FINER, "listUpstreamPipelinesBasedOnMavenDependencies({0}, {1})", new Object[] {
+ downstreamJobFullName, downstreamBuildNumber
+ });
- // if we join JENKINS_JOB to the listUpstreamPipelinesBasedOnMavenDependencies query we get performance problems
+ // if we join JENKINS_JOB to the listUpstreamPipelinesBasedOnMavenDependencies query we get performance problems
// in large setups with postgres.
- // The analyzer does not use an index for JENKINS_JOB and uses a sequential scan in the query plan and
+ // The analyzer does not use an index for JENKINS_JOB and uses a sequential scan in the query plan and
// the query needs some minutes to execute!
// There is a workaround: you can give the query a hint that only one row is selected on JENKINS_JOB
// I tried this out with Solution 4 of https://learnsql.com/blog/sql-join-only-first-row/ and it worked.
//
- // ...
- // inner join JENKINS_BUILD as downstream_build on (MAVEN_DEPENDENCY.build_id = downstream_build.id and downstream_build.job_id = (
- // SELECT downstream_job.id FROM JENKINS_JOB as downstream_job
+ // ...
+ // inner join JENKINS_BUILD as downstream_build on (MAVEN_DEPENDENCY.build_id = downstream_build.id and
+ // downstream_build.job_id = (
+ // SELECT downstream_job.id FROM JENKINS_JOB as downstream_job
// WHERE downstream_job.full_name = ? and downstream_job.jenkins_master_id = ?
// LIMIT 1))
//
- // The LIMIT 1 gives the optimizer a hint that should not be necessary because it has a unique index on full_name and jenkins_master_id
+ // The LIMIT 1 gives the optimizer a hint that should not be necessary because it has a unique index on
+ // full_name and jenkins_master_id
//
// Problem: is LIMIT or a similar solutions supported by all databases?
// Therefore i made a second query that reads the primaryKey of the matching JENKINS_JOB first.
- // The second query does not need the problematic join on JENKINS_BUILD and performs very well.
-
+ // The second query does not need the problematic join on JENKINS_BUILD and performs very well.
+
Long jobPrimaryKey;
try (Connection cnn = ds.getConnection()) {
- try (PreparedStatement stmt = cnn.prepareStatement("SELECT ID FROM JENKINS_JOB WHERE FULL_NAME = ? AND JENKINS_MASTER_ID = ?")) {
+ try (PreparedStatement stmt =
+ cnn.prepareStatement("SELECT ID FROM JENKINS_JOB WHERE FULL_NAME = ? AND JENKINS_MASTER_ID = ?")) {
stmt.setString(1, downstreamJobFullName);
stmt.setLong(2, getJenkinsMasterPrimaryKey(cnn));
try (ResultSet rst = stmt.executeQuery()) {
@@ -1268,17 +1463,19 @@ protected Map listUpstreamPipelinesBasedOnMavenDependencies(@No
return new HashMap<>();
}
- String sql = "select distinct upstream_job.full_name, upstream_build.number\n" +
- "from JENKINS_JOB as upstream_job\n" +
- "inner join JENKINS_BUILD as upstream_build on (upstream_job.id = upstream_build.job_id and upstream_job.last_successful_build_number = upstream_build.number)\n" +
- "inner join GENERATED_MAVEN_ARTIFACT on (upstream_build.id = GENERATED_MAVEN_ARTIFACT.build_id and GENERATED_MAVEN_ARTIFACT.skip_downstream_triggers = false)\n" +
- "inner join MAVEN_ARTIFACT on GENERATED_MAVEN_ARTIFACT.artifact_id = MAVEN_ARTIFACT.id\n" +
- "inner join MAVEN_DEPENDENCY on (MAVEN_DEPENDENCY.artifact_id = MAVEN_ARTIFACT.id and MAVEN_DEPENDENCY.ignore_upstream_triggers = false)\n" +
- "inner join JENKINS_BUILD as downstream_build on MAVEN_DEPENDENCY.build_id = downstream_build.id\n" +
- "where downstream_build.job_id = ? and downstream_build.number = ? and upstream_job.jenkins_master_id = ?";
+ String sql =
+ "select distinct upstream_job.full_name, upstream_build.number\n" + "from JENKINS_JOB as upstream_job\n"
+ + "inner join JENKINS_BUILD as upstream_build on (upstream_job.id = upstream_build.job_id and upstream_job.last_successful_build_number = upstream_build.number)\n"
+ + "inner join GENERATED_MAVEN_ARTIFACT on (upstream_build.id = GENERATED_MAVEN_ARTIFACT.build_id and GENERATED_MAVEN_ARTIFACT.skip_downstream_triggers = false)\n"
+ + "inner join MAVEN_ARTIFACT on GENERATED_MAVEN_ARTIFACT.artifact_id = MAVEN_ARTIFACT.id\n"
+ + "inner join MAVEN_DEPENDENCY on (MAVEN_DEPENDENCY.artifact_id = MAVEN_ARTIFACT.id and MAVEN_DEPENDENCY.ignore_upstream_triggers = false)\n"
+ + "inner join JENKINS_BUILD as downstream_build on MAVEN_DEPENDENCY.build_id = downstream_build.id\n"
+ + "where downstream_build.job_id = ? and downstream_build.number = ? and upstream_job.jenkins_master_id = ?";
Map upstreamJobsFullNames = new HashMap<>();
- LOGGER.log(Level.FINER, "sql: {0}, jobFullName:{1}, buildNumber: {2}", new Object[]{sql, downstreamJobFullName, downstreamBuildNumber});
+ LOGGER.log(Level.FINER, "sql: {0}, jobFullName:{1}, buildNumber: {2}", new Object[] {
+ sql, downstreamJobFullName, downstreamBuildNumber
+ });
try (Connection cnn = ds.getConnection()) {
try (PreparedStatement stmt = cnn.prepareStatement(sql)) {
@@ -1294,26 +1491,33 @@ protected Map listUpstreamPipelinesBasedOnMavenDependencies(@No
} catch (SQLException e) {
throw new RuntimeSqlException(e);
}
- LOGGER.log(Level.FINE, "listUpstreamPipelinesBasedOnMavenDependencies({0}, {1}): {2}", new Object[]{downstreamJobFullName, downstreamBuildNumber, upstreamJobsFullNames});
+ LOGGER.log(Level.FINE, "listUpstreamPipelinesBasedOnMavenDependencies({0}, {1}): {2}", new Object[] {
+ downstreamJobFullName, downstreamBuildNumber, upstreamJobsFullNames
+ });
return upstreamJobsFullNames;
}
- protected Map listUpstreamPipelinesBasedOnParentProjectDependencies(@NonNull String downstreamJobFullName, int downstreamBuildNumber) {
- LOGGER.log(Level.FINER, "listUpstreamPipelinesBasedOnParentProjectDependencies({0}, {1})", new Object[]{downstreamJobFullName, downstreamBuildNumber});
-
- String sql = "select distinct upstream_job.full_name, upstream_build.number\n" +
- "from JENKINS_JOB as upstream_job\n" +
- "inner join JENKINS_BUILD as upstream_build on (upstream_job.id = upstream_build.job_id and upstream_job.last_successful_build_number = upstream_build.number)\n" +
- "inner join GENERATED_MAVEN_ARTIFACT on (upstream_build.id = GENERATED_MAVEN_ARTIFACT.build_id and GENERATED_MAVEN_ARTIFACT.skip_downstream_triggers = false)\n" +
- "inner join MAVEN_ARTIFACT on GENERATED_MAVEN_ARTIFACT.artifact_id = MAVEN_ARTIFACT.id\n" +
- "inner join MAVEN_PARENT_PROJECT on (MAVEN_PARENT_PROJECT.artifact_id = MAVEN_ARTIFACT.id and MAVEN_PARENT_PROJECT.ignore_upstream_triggers = false)\n" +
- "inner join JENKINS_BUILD as downstream_build on MAVEN_PARENT_PROJECT.build_id = downstream_build.id\n" +
- "inner join JENKINS_JOB as downstream_job on downstream_build.job_id = downstream_job.id\n" +
- "where downstream_job.full_name = ? and downstream_job.jenkins_master_id = ? and downstream_build.number = ? and upstream_job.jenkins_master_id = ?";
+ protected Map listUpstreamPipelinesBasedOnParentProjectDependencies(
+ @NonNull String downstreamJobFullName, int downstreamBuildNumber) {
+ LOGGER.log(Level.FINER, "listUpstreamPipelinesBasedOnParentProjectDependencies({0}, {1})", new Object[] {
+ downstreamJobFullName, downstreamBuildNumber
+ });
+
+ String sql =
+ "select distinct upstream_job.full_name, upstream_build.number\n" + "from JENKINS_JOB as upstream_job\n"
+ + "inner join JENKINS_BUILD as upstream_build on (upstream_job.id = upstream_build.job_id and upstream_job.last_successful_build_number = upstream_build.number)\n"
+ + "inner join GENERATED_MAVEN_ARTIFACT on (upstream_build.id = GENERATED_MAVEN_ARTIFACT.build_id and GENERATED_MAVEN_ARTIFACT.skip_downstream_triggers = false)\n"
+ + "inner join MAVEN_ARTIFACT on GENERATED_MAVEN_ARTIFACT.artifact_id = MAVEN_ARTIFACT.id\n"
+ + "inner join MAVEN_PARENT_PROJECT on (MAVEN_PARENT_PROJECT.artifact_id = MAVEN_ARTIFACT.id and MAVEN_PARENT_PROJECT.ignore_upstream_triggers = false)\n"
+ + "inner join JENKINS_BUILD as downstream_build on MAVEN_PARENT_PROJECT.build_id = downstream_build.id\n"
+ + "inner join JENKINS_JOB as downstream_job on downstream_build.job_id = downstream_job.id\n"
+ + "where downstream_job.full_name = ? and downstream_job.jenkins_master_id = ? and downstream_build.number = ? and upstream_job.jenkins_master_id = ?";
Map upstreamJobsFullNames = new HashMap<>();
- LOGGER.log(Level.FINER, "sql: {0}, jobFullName:{1}, buildNumber: {2}", new Object[]{sql, downstreamJobFullName, downstreamBuildNumber});
+ LOGGER.log(Level.FINER, "sql: {0}, jobFullName:{1}, buildNumber: {2}", new Object[] {
+ sql, downstreamJobFullName, downstreamBuildNumber
+ });
try (Connection cnn = ds.getConnection()) {
try (PreparedStatement stmt = cnn.prepareStatement(sql)) {
@@ -1330,23 +1534,31 @@ protected Map listUpstreamPipelinesBasedOnParentProjectDependen
} catch (SQLException e) {
throw new RuntimeSqlException(e);
}
- LOGGER.log(Level.FINE, "listUpstreamPipelinesBasedOnParentProjectDependencies({0}, {1}): {2}", new Object[]{downstreamJobFullName, downstreamBuildNumber, upstreamJobsFullNames});
+ LOGGER.log(Level.FINE, "listUpstreamPipelinesBasedOnParentProjectDependencies({0}, {1}): {2}", new Object[] {
+ downstreamJobFullName, downstreamBuildNumber, upstreamJobsFullNames
+ });
return upstreamJobsFullNames;
}
@NonNull
public Map listTransitiveUpstreamJobs(@NonNull String jobFullName, int buildNumber) {
- UpstreamMemory upstreamMemory = new UpstreamMemory();
+ UpstreamMemory upstreamMemory = new UpstreamMemory();
return listTransitiveUpstreamJobs(jobFullName, buildNumber, new HashMap<>(), 0, upstreamMemory);
}
@NonNull
- public Map listTransitiveUpstreamJobs(@NonNull String jobFullName, int buildNumber, UpstreamMemory upstreamMemory) {
+ public Map listTransitiveUpstreamJobs(
+ @NonNull String jobFullName, int buildNumber, UpstreamMemory upstreamMemory) {
return listTransitiveUpstreamJobs(jobFullName, buildNumber, new HashMap<>(), 0, upstreamMemory);
}
- private Map listTransitiveUpstreamJobs(@NonNull String jobFullName, int buildNumber, Map transitiveUpstreamBuilds, int recursionDepth, UpstreamMemory upstreamMemory) {
+ private Map listTransitiveUpstreamJobs(
+ @NonNull String jobFullName,
+ int buildNumber,
+ Map transitiveUpstreamBuilds,
+ int recursionDepth,
+ UpstreamMemory upstreamMemory) {
Map upstreamBuilds = upstreamMemory.listUpstreamJobs(this, jobFullName, buildNumber);
for (Entry upstreamBuild : upstreamBuilds.entrySet()) {
String upstreamJobFullName = upstreamBuild.getKey();
@@ -1356,7 +1568,12 @@ private Map listTransitiveUpstreamJobs(@NonNull String jobFullN
} else {
transitiveUpstreamBuilds.put(upstreamJobFullName, upstreamBuildNumber);
if (recursionDepth < OPTIMIZATION_MAX_RECURSION_DEPTH) {
- listTransitiveUpstreamJobs(upstreamJobFullName, upstreamBuildNumber, transitiveUpstreamBuilds, recursionDepth++, upstreamMemory);
+ listTransitiveUpstreamJobs(
+ upstreamJobFullName,
+ upstreamBuildNumber,
+ transitiveUpstreamBuilds,
+ recursionDepth++,
+ upstreamMemory);
}
}
}
@@ -1372,17 +1589,18 @@ private Map listTransitiveUpstreamJobs(@NonNull String jobFullN
*/
@NonNull
public List getGeneratedArtifacts(@NonNull String jobFullName, @NonNull int buildNumber) {
- LOGGER.log(Level.FINER, "getGeneratedArtifacts({0}, {1})", new Object[]{jobFullName, buildNumber});
- String generatedArtifactsSql = "SELECT DISTINCT MAVEN_ARTIFACT.group_id, MAVEN_ARTIFACT.artifact_id, MAVEN_ARTIFACT.type, MAVEN_ARTIFACT.classifier, MAVEN_ARTIFACT.version as base_version, " +
- "GENERATED_MAVEN_ARTIFACT.version as version, GENERATED_MAVEN_ARTIFACT.repository_url, GENERATED_MAVEN_ARTIFACT.extension" +
- " FROM MAVEN_ARTIFACT " +
- " INNER JOIN GENERATED_MAVEN_ARTIFACT ON MAVEN_ARTIFACT.ID = GENERATED_MAVEN_ARTIFACT.ARTIFACT_ID" +
- " INNER JOIN JENKINS_BUILD AS UPSTREAM_BUILD ON GENERATED_MAVEN_ARTIFACT.BUILD_ID = UPSTREAM_BUILD.ID " +
- " INNER JOIN JENKINS_JOB AS UPSTREAM_JOB ON UPSTREAM_BUILD.JOB_ID = UPSTREAM_JOB.ID " +
- " WHERE " +
- " UPSTREAM_JOB.FULL_NAME = ? AND" +
- " UPSTREAM_JOB.JENKINS_MASTER_ID = ? AND" +
- " UPSTREAM_BUILD.NUMBER = ? ";
+ LOGGER.log(Level.FINER, "getGeneratedArtifacts({0}, {1})", new Object[] {jobFullName, buildNumber});
+ String generatedArtifactsSql =
+ "SELECT DISTINCT MAVEN_ARTIFACT.group_id, MAVEN_ARTIFACT.artifact_id, MAVEN_ARTIFACT.type, MAVEN_ARTIFACT.classifier, MAVEN_ARTIFACT.version as base_version, "
+ + "GENERATED_MAVEN_ARTIFACT.version as version, GENERATED_MAVEN_ARTIFACT.repository_url, GENERATED_MAVEN_ARTIFACT.extension"
+ + " FROM MAVEN_ARTIFACT "
+ + " INNER JOIN GENERATED_MAVEN_ARTIFACT ON MAVEN_ARTIFACT.ID = GENERATED_MAVEN_ARTIFACT.ARTIFACT_ID"
+ + " INNER JOIN JENKINS_BUILD AS UPSTREAM_BUILD ON GENERATED_MAVEN_ARTIFACT.BUILD_ID = UPSTREAM_BUILD.ID "
+ + " INNER JOIN JENKINS_JOB AS UPSTREAM_JOB ON UPSTREAM_BUILD.JOB_ID = UPSTREAM_JOB.ID "
+ + " WHERE "
+ + " UPSTREAM_JOB.FULL_NAME = ? AND"
+ + " UPSTREAM_JOB.JENKINS_MASTER_ID = ? AND"
+ + " UPSTREAM_BUILD.NUMBER = ? ";
List results = new ArrayList<>();
try (Connection cnn = this.ds.getConnection()) {
@@ -1409,7 +1627,8 @@ public List getGeneratedArtifacts(@NonNull String jobFullName, @N
artifact.setExtension(rst.getString("extension"));
artifact.setSnapshot(artifact.getVersion().endsWith("-SNAPSHOT"));
- // artifact.put("skip_downstream_triggers", rst.getString("GENERATED_MAVEN_ARTIFACT.skip_downstream_triggers"));
+ // artifact.put("skip_downstream_triggers",
+ // rst.getString("GENERATED_MAVEN_ARTIFACT.skip_downstream_triggers"));
results.add(artifact);
}
}
@@ -1429,7 +1648,8 @@ public synchronized Long getJenkinsMasterPrimaryKey(Connection cnn) throws SQLEx
String jenkinsMasterUrl = getJenkinsDetails().getMasterRootUrl();
String jenkinsMasterUrlValueInDb = null;
- try (PreparedStatement stmt = cnn.prepareStatement("SELECT ID, URL FROM JENKINS_MASTER WHERE LEGACY_INSTANCE_ID=?")) {
+ try (PreparedStatement stmt =
+ cnn.prepareStatement("SELECT ID, URL FROM JENKINS_MASTER WHERE LEGACY_INSTANCE_ID=?")) {
stmt.setString(1, jenkinsMasterLegacyInstanceId);
try (ResultSet rst = stmt.executeQuery()) {
if (rst.next()) {
@@ -1439,7 +1659,9 @@ public synchronized Long getJenkinsMasterPrimaryKey(Connection cnn) throws SQLEx
}
}
if (this.jenkinsMasterPrimaryKey == null) { // NOT FOUND IN DB
- try (PreparedStatement stmt = cnn.prepareStatement("INSERT INTO JENKINS_MASTER(LEGACY_INSTANCE_ID, URL) values (?, ?)", Statement.RETURN_GENERATED_KEYS)) {
+ try (PreparedStatement stmt = cnn.prepareStatement(
+ "INSERT INTO JENKINS_MASTER(LEGACY_INSTANCE_ID, URL) values (?, ?)",
+ Statement.RETURN_GENERATED_KEYS)) {
stmt.setString(1, jenkinsMasterLegacyInstanceId);
stmt.setString(2, jenkinsMasterUrl);
stmt.execute();
@@ -1449,13 +1671,18 @@ public synchronized Long getJenkinsMasterPrimaryKey(Connection cnn) throws SQLEx
}
} else { // FOUND IN DB, UPDATE IF NEEDED
if (!Objects.equals(jenkinsMasterUrl, jenkinsMasterUrlValueInDb)) {
- LOGGER.log(Level.INFO, "Update url from \"{0}\" to \"{1}\" for master with legacyId {2}", new Object[]{jenkinsMasterUrlValueInDb, jenkinsMasterUrl, jenkinsMasterLegacyInstanceId});
- try (PreparedStatement stmt = cnn.prepareStatement("UPDATE JENKINS_MASTER set URL = ? where ID = ?")) {
+ LOGGER.log(
+ Level.INFO,
+ "Update url from \"{0}\" to \"{1}\" for master with legacyId {2}",
+ new Object[] {jenkinsMasterUrlValueInDb, jenkinsMasterUrl, jenkinsMasterLegacyInstanceId});
+ try (PreparedStatement stmt =
+ cnn.prepareStatement("UPDATE JENKINS_MASTER set URL = ? where ID = ?")) {
stmt.setString(1, jenkinsMasterUrl);
stmt.setLong(2, this.jenkinsMasterPrimaryKey);
int count = stmt.executeUpdate();
if (count != 1) {
- LOGGER.warning("Updated more/less than 1 JENKINS_MASTER.URL=" + jenkinsMasterUrl + " for ID=" + this.jenkinsMasterPrimaryKey);
+ LOGGER.warning("Updated more/less than 1 JENKINS_MASTER.URL=" + jenkinsMasterUrl
+ + " for ID=" + this.jenkinsMasterPrimaryKey);
}
} finally {
cnn.commit();
@@ -1478,8 +1705,15 @@ public String toPrettyString() {
List prettyStrings = new ArrayList<>();
try (Connection cnn = ds.getConnection()) {
prettyStrings.add("JDBC URL: " + cnn.getMetaData().getURL());
- List tables = Arrays.asList("JENKINS_MASTER", "MAVEN_ARTIFACT", "JENKINS_JOB", "JENKINS_BUILD",
- "MAVEN_DEPENDENCY", "GENERATED_MAVEN_ARTIFACT", "MAVEN_PARENT_PROJECT", "JENKINS_BUILD_UPSTREAM_CAUSE");
+ List tables = Arrays.asList(
+ "JENKINS_MASTER",
+ "MAVEN_ARTIFACT",
+ "JENKINS_JOB",
+ "JENKINS_BUILD",
+ "MAVEN_DEPENDENCY",
+ "GENERATED_MAVEN_ARTIFACT",
+ "MAVEN_PARENT_PROJECT",
+ "JENKINS_BUILD_UPSTREAM_CAUSE");
for (String table : tables) {
try (Statement stmt = cnn.createStatement()) {
try (ResultSet rst = stmt.executeQuery("SELECT count(*) FROM " + table)) {
@@ -1487,7 +1721,8 @@ public String toPrettyString() {
int count = rst.getInt(1);
prettyStrings.add("Table " + table + ": " + count + " rows");
} else {
- prettyStrings.add("Table " + table + ": #IllegalStateException 'select count(*)' didn't return any row#");
+ prettyStrings.add("Table " + table
+ + ": #IllegalStateException 'select count(*)' didn't return any row#");
}
}
} catch (SQLException e) {
@@ -1500,7 +1735,8 @@ public String toPrettyString() {
LOGGER.log(Level.WARNING, "SQLException getting a connection to " + ds, e);
}
- StringBuilder result = new StringBuilder(StringUtils.substringAfterLast(getClass().getName(), ".") + " - " + getDatabaseDescription());
+ StringBuilder result = new StringBuilder(
+ StringUtils.substringAfterLast(getClass().getName(), ".") + " - " + getDatabaseDescription());
for (String prettyString : prettyStrings) {
result.append("\r\n\t").append(prettyString);
}
@@ -1510,62 +1746,80 @@ public String toPrettyString() {
protected String getDatabaseDescription() {
try (Connection cnn = ds.getConnection()) {
DatabaseMetaData metaData = cnn.getMetaData();
- return metaData. getDatabaseProductName() + " " + metaData.getDatabaseProductVersion();
+ return metaData.getDatabaseProductName() + " " + metaData.getDatabaseProductVersion();
} catch (SQLException e) {
return "#" + e.toString() + "#";
}
}
@Override
- public void updateBuildOnCompletion(@NonNull String jobFullName, int buildNumber, int buildResultOrdinal, long startTimeInMillis, long durationInMillis) {
- LOGGER.log(Level.FINE, "updateBuildOnCompletion({0}, {1}, result: {2}, startTime): {3}, duration: {4}",
- new Object[]{jobFullName, buildNumber, buildResultOrdinal, startTimeInMillis, durationInMillis});
+ public void updateBuildOnCompletion(
+ @NonNull String jobFullName,
+ int buildNumber,
+ int buildResultOrdinal,
+ long startTimeInMillis,
+ long durationInMillis) {
+ LOGGER.log(
+ Level.FINE,
+ "updateBuildOnCompletion({0}, {1}, result: {2}, startTime): {3}, duration: {4}",
+ new Object[] {jobFullName, buildNumber, buildResultOrdinal, startTimeInMillis, durationInMillis});
long buildPrimaryKey = getOrCreateBuildPrimaryKey(jobFullName, buildNumber);
try (Connection cnn = ds.getConnection()) {
cnn.setAutoCommit(false);
- try (PreparedStatement stmt = cnn.prepareStatement("UPDATE JENKINS_BUILD " +
- "SET RESULT_ID = ?, START_TIME = ?, DURATION_IN_MILLIS = ? " +
- "WHERE ID = ?")) {
+ try (PreparedStatement stmt = cnn.prepareStatement("UPDATE JENKINS_BUILD "
+ + "SET RESULT_ID = ?, START_TIME = ?, DURATION_IN_MILLIS = ? " + "WHERE ID = ?")) {
stmt.setInt(1, buildResultOrdinal);
stmt.setTimestamp(2, new Timestamp(startTimeInMillis));
stmt.setLong(3, durationInMillis);
stmt.setLong(4, buildPrimaryKey);
int count = stmt.executeUpdate();
if (count != 1) {
- LOGGER.log(Level.WARNING, "updateBuildOnCompletion - more/less than 1 JENKINS_BUILD record updated (" +
- count + ") for " + jobFullName + "#" + buildNumber + ", buildPrimaryKey=" + buildPrimaryKey);
+ LOGGER.log(
+ Level.WARNING,
+ "updateBuildOnCompletion - more/less than 1 JENKINS_BUILD record updated (" + count
+ + ") for " + jobFullName + "#" + buildNumber + ", buildPrimaryKey="
+ + buildPrimaryKey);
}
}
if (Result.SUCCESS.ordinal == buildResultOrdinal) {
- try (PreparedStatement stmt = cnn.prepareStatement("UPDATE JENKINS_JOB set LAST_BUILD_NUMBER = ?, LAST_SUCCESSFUL_BUILD_NUMBER = ? where FULL_NAME = ? and JENKINS_MASTER_ID = ?")) {
+ try (PreparedStatement stmt = cnn.prepareStatement(
+ "UPDATE JENKINS_JOB set LAST_BUILD_NUMBER = ?, LAST_SUCCESSFUL_BUILD_NUMBER = ? where FULL_NAME = ? and JENKINS_MASTER_ID = ?")) {
stmt.setInt(1, buildNumber);
stmt.setInt(2, buildNumber);
stmt.setString(3, jobFullName);
stmt.setLong(4, getJenkinsMasterPrimaryKey(cnn));
int count = stmt.executeUpdate();
if (count != 1) {
- LOGGER.log(Level.WARNING, "updateBuildOnCompletion - more/less than 1 JENKINS_JOB record updated (" +
- count + ") for " + jobFullName + "#" + buildNumber);
+ LOGGER.log(
+ Level.WARNING,
+ "updateBuildOnCompletion - more/less than 1 JENKINS_JOB record updated (" + count
+ + ") for " + jobFullName + "#" + buildNumber);
}
}
} else {
- try (PreparedStatement stmt = cnn.prepareStatement("UPDATE JENKINS_JOB set LAST_BUILD_NUMBER = ? where FULL_NAME = ? and JENKINS_MASTER_ID = ?")) {
+ try (PreparedStatement stmt = cnn.prepareStatement(
+ "UPDATE JENKINS_JOB set LAST_BUILD_NUMBER = ? where FULL_NAME = ? and JENKINS_MASTER_ID = ?")) {
stmt.setInt(1, buildNumber);
stmt.setString(2, jobFullName);
stmt.setLong(3, getJenkinsMasterPrimaryKey(cnn));
int count = stmt.executeUpdate();
if (count != 1) {
- LOGGER.log(Level.WARNING, "updateBuildOnCompletion - more/less than 1 JENKINS_JOB record updated (" +
- count + ") for " + jobFullName + "#" + buildNumber);
+ LOGGER.log(
+ Level.WARNING,
+ "updateBuildOnCompletion - more/less than 1 JENKINS_JOB record updated (" + count
+ + ") for " + jobFullName + "#" + buildNumber);
}
}
}
cnn.commit();
} catch (SQLException e) {
- throw new RuntimeSqlException("Exception updating build " + jobFullName + "#" + buildNumber + " with result " + buildResultOrdinal, e);
+ throw new RuntimeSqlException(
+ "Exception updating build " + jobFullName + "#" + buildNumber + " with result "
+ + buildResultOrdinal,
+ e);
}
}
diff --git a/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginH2Dao.java b/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginH2Dao.java
index 5f63879f..db9e65a5 100644
--- a/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginH2Dao.java
+++ b/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginH2Dao.java
@@ -24,13 +24,8 @@
package org.jenkinsci.plugins.pipeline.maven.db;
-import hudson.Extension;
-import jenkins.model.Jenkins;
-import org.h2.jdbcx.JdbcConnectionPool;
-
import edu.umd.cs.findbugs.annotations.NonNull;
-
-import javax.sql.DataSource;
+import hudson.Extension;
import java.io.File;
import java.io.IOException;
import java.sql.Connection;
@@ -38,6 +33,9 @@
import java.sql.SQLException;
import java.sql.Statement;
import java.util.logging.Level;
+import javax.sql.DataSource;
+import jenkins.model.Jenkins;
+import org.h2.jdbcx.JdbcConnectionPool;
/**
* @author Cyrille Le Clerc
@@ -63,18 +61,20 @@ public String getDescription() {
}
public PipelineMavenPluginH2Dao(@NonNull File rootDir) {
- this(JdbcConnectionPool.create("jdbc:h2:file:" + new File(rootDir, "jenkins-jobs").getAbsolutePath() + ";" +
- "AUTO_SERVER=TRUE;MULTI_THREADED=1;QUERY_CACHE_SIZE=25;JMX=TRUE", "sa", "sa"));
+ this(JdbcConnectionPool.create(
+ "jdbc:h2:file:" + new File(rootDir, "jenkins-jobs").getAbsolutePath() + ";"
+ + "AUTO_SERVER=TRUE;MULTI_THREADED=1;QUERY_CACHE_SIZE=25;JMX=TRUE",
+ "sa",
+ "sa"));
}
-
-
@Override
protected void registerJdbcDriver() {
try {
Class.forName("org.h2.Driver");
} catch (ClassNotFoundException e) {
- throw new RuntimeException("H2 driver 'org.h2.Driver' not found. Please install the 'H2 Database Plugin' to install the H2 driver");
+ throw new RuntimeException(
+ "H2 driver 'org.h2.Driver' not found. Please install the 'H2 Database Plugin' to install the H2 driver");
}
}
@@ -129,7 +129,7 @@ public void close() throws IOException {
stmt.execute("SHUTDOWN");
}
} catch (SQLException e) {
- if (e.getErrorCode() == 90121) {
+ if (e.getErrorCode() == 90121) {
// DATABASE_CALLED_AT_SHUTDOWN (the JVM shutdown hooks are running already :-o )
LOGGER.log(Level.FINE, "Failed to close the database as it is already closed", e);
} else {
@@ -150,8 +150,7 @@ public String getDefaultJdbcUrl() {
throw new IllegalStateException("Failure to create database root dir " + databaseRootDir);
}
}
- return "jdbc:h2:file:" + new File(databaseRootDir, "jenkins-jobs").getAbsolutePath() + ";" +
- "AUTO_SERVER=TRUE;MULTI_THREADED=1;QUERY_CACHE_SIZE=25;JMX=TRUE";
+ return "jdbc:h2:file:" + new File(databaseRootDir, "jenkins-jobs").getAbsolutePath() + ";"
+ + "AUTO_SERVER=TRUE;MULTI_THREADED=1;QUERY_CACHE_SIZE=25;JMX=TRUE";
}
-
}
diff --git a/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginMySqlDao.java b/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginMySqlDao.java
index 2f3e7db6..6b10c3ec 100644
--- a/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginMySqlDao.java
+++ b/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginMySqlDao.java
@@ -25,19 +25,18 @@
package org.jenkinsci.plugins.pipeline.maven.db;
import com.mysql.cj.exceptions.MysqlErrorNumbers;
-import hudson.Extension;
-import org.apache.commons.lang.StringUtils;
-import org.jenkinsci.plugins.pipeline.maven.db.util.RuntimeSqlException;
-
import edu.umd.cs.findbugs.annotations.NonNull;
import edu.umd.cs.findbugs.annotations.Nullable;
-import javax.sql.DataSource;
+import hudson.Extension;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.logging.Level;
+import javax.sql.DataSource;
+import org.apache.commons.lang.StringUtils;
+import org.jenkinsci.plugins.pipeline.maven.db.util.RuntimeSqlException;
/**
* @author Cyrille Le Clerc
@@ -61,25 +60,24 @@ public String getDescription() {
* @return {@code null} if this is not a MariaDB version, the MariaDB server version (e.g. "10.2.20", "10.3.11") if parsed, the entire {@link DatabaseMetaData#getDatabaseProductVersion()} if the parsing oof the MariaDB server version failed
*/
@Nullable
- public static String extractMariaDbVersion(@Nullable String jdbcDatabaseProductVersion) {
+ public static String extractMariaDbVersion(@Nullable String jdbcDatabaseProductVersion) {
if (jdbcDatabaseProductVersion == null) {
return null;
}
- if(!jdbcDatabaseProductVersion.contains("MariaDB")) {
+ if (!jdbcDatabaseProductVersion.contains("MariaDB")) {
return null;
}
String mariaDbVersion = StringUtils.substringBetween(jdbcDatabaseProductVersion, "-", "-MariaDB");
if (mariaDbVersion == null) { // MariaDB version format has changed.
- return jdbcDatabaseProductVersion;
+ return jdbcDatabaseProductVersion;
} else {
return mariaDbVersion;
}
}
-
public PipelineMavenPluginMySqlDao(@NonNull DataSource ds) {
super(ds);
}
@@ -91,7 +89,7 @@ public String getJdbcScheme() {
@Override
protected void handleDatabaseInitialisationException(SQLException e) {
- if ( MysqlErrorNumbers.SQL_STATE_ILLEGAL_ARGUMENT.equals(e.getSQLState())) {
+ if (MysqlErrorNumbers.SQL_STATE_ILLEGAL_ARGUMENT.equals(e.getSQLState())) {
LOGGER.log(Level.FINE, "Ignore sql exception " + e.getErrorCode() + " - " + e.getSQLState(), e);
} else if (MysqlErrorNumbers.ER_EMPTY_QUERY == e.getErrorCode()) {
LOGGER.log(Level.FINE, "Ignore sql exception " + e.getErrorCode() + " - " + e.getSQLState(), e);
@@ -108,7 +106,8 @@ protected void registerJdbcDriver() {
try {
Class.forName("com.mysql.cj.jdbc.Driver");
} catch (ClassNotFoundException e) {
- throw new RuntimeException("MySql driver 'com.mysql.cj.jdbc.Driver' not found. Please install the 'MySQL Database Plugin' to install the MySql driver");
+ throw new RuntimeException(
+ "MySql driver 'com.mysql.cj.jdbc.Driver' not found. Please install the 'MySQL Database Plugin' to install the MySql driver");
}
}
@@ -116,7 +115,7 @@ protected void registerJdbcDriver() {
protected String getDatabaseDescription() {
try (Connection cnn = getDataSource().getConnection()) {
DatabaseMetaData metaData = cnn.getMetaData();
- String version = metaData. getDatabaseProductName() + " " + metaData.getDatabaseProductVersion();
+ String version = metaData.getDatabaseProductName() + " " + metaData.getDatabaseProductVersion();
try (Statement stmt = cnn.createStatement()) {
try (ResultSet rst = stmt.executeQuery("select AURORA_VERSION()")) {
rst.next();
diff --git a/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginPostgreSqlDao.java b/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginPostgreSqlDao.java
index 130d1798..f6300ed0 100644
--- a/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginPostgreSqlDao.java
+++ b/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginPostgreSqlDao.java
@@ -24,11 +24,8 @@
package org.jenkinsci.plugins.pipeline.maven.db;
-import hudson.Extension;
-import org.postgresql.util.PSQLState;
-
import edu.umd.cs.findbugs.annotations.NonNull;
-import javax.sql.DataSource;
+import hudson.Extension;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.PreparedStatement;
@@ -36,6 +33,8 @@
import java.sql.SQLException;
import java.sql.Statement;
import java.util.logging.Level;
+import javax.sql.DataSource;
+import org.postgresql.util.PSQLState;
/**
* @author Cyrille Le Clerc
@@ -51,7 +50,6 @@ public PipelineMavenPluginPostgreSqlDao(@NonNull DataSource ds) {
super(ds);
}
-
@Override
public String getDescription() {
return Messages.dao_postgesql_description();
@@ -67,7 +65,8 @@ protected void registerJdbcDriver() {
try {
Class.forName("org.postgresql.Driver");
} catch (ClassNotFoundException e) {
- throw new RuntimeException("PostgreSQL driver 'org.postgresql.Driver' not found. Please install the 'PostgreSQL API Plugin' to install the PostgreSQL driver");
+ throw new RuntimeException(
+ "PostgreSQL driver 'org.postgresql.Driver' not found. Please install the 'PostgreSQL API Plugin' to install the PostgreSQL driver");
}
}
@@ -75,13 +74,15 @@ protected void registerJdbcDriver() {
protected String getDatabaseDescription() {
try (Connection cnn = getDataSource().getConnection()) {
DatabaseMetaData metaData = cnn.getMetaData();
- String version = metaData. getDatabaseProductName() + " " + metaData.getDatabaseProductVersion();
+ String version = metaData.getDatabaseProductName() + " " + metaData.getDatabaseProductVersion();
try (Statement stmt = cnn.createStatement()) {
try (ResultSet rst = stmt.executeQuery("select AURORA_VERSION()")) {
rst.next();
version += " / Amazon Aurora " + rst.getString(1);
} catch (SQLException e) {
- if (PSQLState.UNDEFINED_FUNCTION.getState().equals(e.getSQLState())) { // " 42883 - ERROR: function aurora_version() does not exist"
+ if (PSQLState.UNDEFINED_FUNCTION
+ .getState()
+ .equals(e.getSQLState())) { // " 42883 - ERROR: function aurora_version() does not exist"
// not Amazon aurora, the function aurora_version() does not exist
} else {
LOGGER.log(Level.WARNING, "Exception checking Amazon Aurora version", e);
diff --git a/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/migration/MigrationStep.java b/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/migration/MigrationStep.java
index 90dcb470..62949c37 100644
--- a/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/migration/MigrationStep.java
+++ b/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/migration/MigrationStep.java
@@ -1,11 +1,10 @@
package org.jenkinsci.plugins.pipeline.maven.db.migration;
-import jenkins.model.Jenkins;
-
import edu.umd.cs.findbugs.annotations.NonNull;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.Objects;
+import jenkins.model.Jenkins;
public interface MigrationStep {
void execute(@NonNull Connection cnn, @NonNull JenkinsDetails jenkinsDetails) throws SQLException;
@@ -20,7 +19,7 @@ public String getMasterLegacyInstanceId() {
}
@NonNull
- public String getMasterRootUrl(){
+ public String getMasterRootUrl() {
return Objects.toString(Jenkins.get().getRootUrl(), "");
}
}
diff --git a/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/migration/h2/MigrationStep10.java b/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/migration/h2/MigrationStep10.java
index 2458a247..262c095b 100644
--- a/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/migration/h2/MigrationStep10.java
+++ b/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/migration/h2/MigrationStep10.java
@@ -1,20 +1,19 @@
package org.jenkinsci.plugins.pipeline.maven.db.migration.h2;
-import hudson.model.Run;
-import org.jenkinsci.plugins.pipeline.maven.db.migration.MigrationStep;
-
import edu.umd.cs.findbugs.annotations.NonNull;
import edu.umd.cs.findbugs.annotations.Nullable;
+import hudson.model.Run;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.logging.Level;
import java.util.logging.Logger;
+import org.jenkinsci.plugins.pipeline.maven.db.migration.MigrationStep;
public class MigrationStep10 implements MigrationStep {
- private final static Logger LOGGER = Logger.getLogger(MigrationStep10.class.getName());
+ private static final Logger LOGGER = Logger.getLogger(MigrationStep10.class.getName());
@Override
public void execute(@NonNull Connection cnn, @NonNull JenkinsDetails jenkinsDetails) throws SQLException {
@@ -25,9 +24,9 @@ public void execute(@NonNull Connection cnn, @NonNull JenkinsDetails jenkinsDeta
try (ResultSet rst = stmt.executeQuery()) {
while (rst.next()) {
count++;
- if ((count < 100 && (count % 10) == 0) ||
- (count < 500 && (count % 20) == 0) ||
- ((count % 50) == 0)) {
+ if ((count < 100 && (count % 10) == 0)
+ || (count < 500 && (count % 20) == 0)
+ || ((count % 50) == 0)) {
LOGGER.log(Level.INFO, "#" + count + " - " + rst.getString("FULL_NAME") + "...");
}
@@ -42,11 +41,12 @@ public void execute(@NonNull Connection cnn, @NonNull JenkinsDetails jenkinsDeta
}
}
LOGGER.info("Successfully upgraded table JENKINS_JOB, " + count + " records upgraded");
-
}
- protected void updateJenkinsJobRecord(@NonNull Connection cnn, long jenkinsJobPrimaryKey, int lastBuildNumber) throws SQLException {
- try (PreparedStatement stmt = cnn.prepareStatement("UPDATE JENKINS_JOB set LAST_BUILD_NUMBER = ?, LAST_SUCCESSFUL_BUILD_NUMBER = ? where ID = ?")) {
+ protected void updateJenkinsJobRecord(@NonNull Connection cnn, long jenkinsJobPrimaryKey, int lastBuildNumber)
+ throws SQLException {
+ try (PreparedStatement stmt = cnn.prepareStatement(
+ "UPDATE JENKINS_JOB set LAST_BUILD_NUMBER = ?, LAST_SUCCESSFUL_BUILD_NUMBER = ? where ID = ?")) {
stmt.setInt(1, lastBuildNumber);
// TRICK we assume that the last build is successful
stmt.setInt(2, lastBuildNumber);
@@ -60,7 +60,8 @@ protected void updateJenkinsJobRecord(@NonNull Connection cnn, long jenkinsJobPr
*/
@Nullable
protected Integer findLastBuildNumber(@NonNull Connection cnn, long jobPrimaryKey) throws SQLException {
- try (PreparedStatement stmt2 = cnn.prepareStatement("SELECT * FROM JENKINS_BUILD WHERE JOB_ID = ? ORDER BY JENKINS_BUILD.NUMBER DESC LIMIT 1")) {
+ try (PreparedStatement stmt2 = cnn.prepareStatement(
+ "SELECT * FROM JENKINS_BUILD WHERE JOB_ID = ? ORDER BY JENKINS_BUILD.NUMBER DESC LIMIT 1")) {
stmt2.setLong(1, jobPrimaryKey);
try (ResultSet rst2 = stmt2.executeQuery()) {
if (rst2.next()) {
diff --git a/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/migration/h2/MigrationStep11.java b/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/migration/h2/MigrationStep11.java
index 88d9b111..7a6485c1 100644
--- a/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/migration/h2/MigrationStep11.java
+++ b/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/migration/h2/MigrationStep11.java
@@ -1,23 +1,22 @@
package org.jenkinsci.plugins.pipeline.maven.db.migration.h2;
+import edu.umd.cs.findbugs.annotations.NonNull;
+import edu.umd.cs.findbugs.annotations.Nullable;
import hudson.model.Cause;
import hudson.model.Job;
import hudson.model.Run;
-import jenkins.model.Jenkins;
-import org.jenkinsci.plugins.pipeline.maven.db.migration.MigrationStep;
-
-import edu.umd.cs.findbugs.annotations.NonNull;
-import edu.umd.cs.findbugs.annotations.Nullable;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.logging.Level;
import java.util.logging.Logger;
+import jenkins.model.Jenkins;
+import org.jenkinsci.plugins.pipeline.maven.db.migration.MigrationStep;
public class MigrationStep11 implements MigrationStep {
- private final static Logger LOGGER = Logger.getLogger(MigrationStep11.class.getName());
+ private static final Logger LOGGER = Logger.getLogger(MigrationStep11.class.getName());
@Override
public void execute(@NonNull Connection cnn, @NonNull JenkinsDetails jenkinsDetails) throws SQLException {
@@ -25,25 +24,26 @@ public void execute(@NonNull Connection cnn, @NonNull JenkinsDetails jenkinsDeta
int buildCauseCount = 0;
LOGGER.info("Upgrade table JENKINS_BUILD_UPSTREAM_CAUSE...");
- String select = "select jenkins_job.full_name, jenkins_job.jenkins_master_id, jenkins_build.number, jenkins_build.id " +
- " from jenkins_build inner join jenkins_job on jenkins_build.job_id = jenkins_job.id order by jenkins_job.full_name, jenkins_build.number";
+ String select =
+ "select jenkins_job.full_name, jenkins_job.jenkins_master_id, jenkins_build.number, jenkins_build.id "
+ + " from jenkins_build inner join jenkins_job on jenkins_build.job_id = jenkins_job.id order by jenkins_job.full_name, jenkins_build.number";
- String insert = " insert into JENKINS_BUILD_UPSTREAM_CAUSE (upstream_build_id, downstream_build_id) " +
- " select upstream_build.id, ? " +
- " from jenkins_build as upstream_build, jenkins_job as upstream_job " +
- " where " +
- " upstream_build.job_id = upstream_job.id and" +
- " upstream_job.full_name = ? and" +
- " upstream_job.jenkins_master_id = ? and" +
- " upstream_build.number = ? ";
+ String insert = " insert into JENKINS_BUILD_UPSTREAM_CAUSE (upstream_build_id, downstream_build_id) "
+ + " select upstream_build.id, ? "
+ + " from jenkins_build as upstream_build, jenkins_job as upstream_job "
+ + " where "
+ + " upstream_build.job_id = upstream_job.id and"
+ + " upstream_job.full_name = ? and"
+ + " upstream_job.jenkins_master_id = ? and"
+ + " upstream_build.number = ? ";
try (PreparedStatement insertStmt = cnn.prepareStatement(insert)) {
try (PreparedStatement selectStmt = cnn.prepareStatement(select)) {
try (ResultSet rst = selectStmt.executeQuery()) {
while (rst.next()) {
jobCount++;
- if ((jobCount < 100 && (jobCount % 10) == 0) ||
- (jobCount < 500 && (jobCount % 20) == 0) ||
- ((jobCount % 50) == 0)) {
+ if ((jobCount < 100 && (jobCount % 10) == 0)
+ || (jobCount < 500 && (jobCount % 20) == 0)
+ || ((jobCount % 50) == 0)) {
LOGGER.log(Level.INFO, "#" + jobCount + " - " + rst.getString("FULL_NAME") + "...");
}
@@ -77,20 +77,24 @@ public void execute(@NonNull Connection cnn, @NonNull JenkinsDetails jenkinsDeta
}
}
} catch (RuntimeException e) {
- LOGGER.log(Level.WARNING, "Silently ignore exception migrating build " + jobFullName + "#" + buildNumber, e);
+ LOGGER.log(
+ Level.WARNING,
+ "Silently ignore exception migrating build " + jobFullName + "#" + buildNumber,
+ e);
}
-
}
insertStmt.executeBatch();
}
}
}
- LOGGER.info("Successfully upgraded table JENKINS_BUILD_UPSTREAM_CAUSE, " + jobCount + " jobs scanned, " + buildCauseCount + " job causes inserted");
-
+ LOGGER.info("Successfully upgraded table JENKINS_BUILD_UPSTREAM_CAUSE, " + jobCount + " jobs scanned, "
+ + buildCauseCount + " job causes inserted");
}
- protected void updateJenkinsJobRecord(@NonNull Connection cnn, long jenkinsJobPrimaryKey, int lastBuildNumber) throws SQLException {
- try (PreparedStatement stmt = cnn.prepareStatement("UPDATE JENKINS_JOB set LAST_BUILD_NUMBER = ?, LAST_SUCCESSFUL_BUILD_NUMBER = ? where ID = ?")) {
+ protected void updateJenkinsJobRecord(@NonNull Connection cnn, long jenkinsJobPrimaryKey, int lastBuildNumber)
+ throws SQLException {
+ try (PreparedStatement stmt = cnn.prepareStatement(
+ "UPDATE JENKINS_JOB set LAST_BUILD_NUMBER = ?, LAST_SUCCESSFUL_BUILD_NUMBER = ? where ID = ?")) {
stmt.setInt(1, lastBuildNumber);
// TRICK we assume that the last build is successful
stmt.setInt(2, lastBuildNumber);
@@ -104,7 +108,8 @@ protected void updateJenkinsJobRecord(@NonNull Connection cnn, long jenkinsJobPr
*/
@Nullable
protected Integer findLastBuildNumber(@NonNull Connection cnn, long jobPrimaryKey) throws SQLException {
- try (PreparedStatement stmt2 = cnn.prepareStatement("SELECT * FROM JENKINS_BUILD WHERE JOB_ID = ? ORDER BY JENKINS_BUILD.NUMBER DESC LIMIT 1")) {
+ try (PreparedStatement stmt2 = cnn.prepareStatement(
+ "SELECT * FROM JENKINS_BUILD WHERE JOB_ID = ? ORDER BY JENKINS_BUILD.NUMBER DESC LIMIT 1")) {
stmt2.setLong(1, jobPrimaryKey);
try (ResultSet rst2 = stmt2.executeQuery()) {
if (rst2.next()) {
diff --git a/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/migration/h2/MigrationStep8.java b/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/migration/h2/MigrationStep8.java
index 0bc3143d..b587ca06 100644
--- a/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/migration/h2/MigrationStep8.java
+++ b/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/migration/h2/MigrationStep8.java
@@ -1,12 +1,11 @@
package org.jenkinsci.plugins.pipeline.maven.db.migration.h2;
-import org.jenkinsci.plugins.pipeline.maven.db.migration.MigrationStep;
-
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
+import org.jenkinsci.plugins.pipeline.maven.db.migration.MigrationStep;
public class MigrationStep8 implements MigrationStep {
@@ -22,7 +21,9 @@ public void execute(Connection cnn, JenkinsDetails jenkinsDetails) throws SQLExc
}
}
if (masterId == null) {
- try (PreparedStatement stmt = cnn.prepareStatement("INSERT INTO JENKINS_MASTER(LEGACY_INSTANCE_ID, URL) values (?, ?)", Statement.RETURN_GENERATED_KEYS)) {
+ try (PreparedStatement stmt = cnn.prepareStatement(
+ "INSERT INTO JENKINS_MASTER(LEGACY_INSTANCE_ID, URL) values (?, ?)",
+ Statement.RETURN_GENERATED_KEYS)) {
stmt.setString(1, jenkinsDetails.getMasterLegacyInstanceId());
stmt.setString(2, jenkinsDetails.getMasterRootUrl());
stmt.execute();
@@ -35,7 +36,8 @@ public void execute(Connection cnn, JenkinsDetails jenkinsDetails) throws SQLExc
}
}
}
- try (PreparedStatement stmt = cnn.prepareStatement("UPDATE JENKINS_JOB set JENKINS_MASTER_ID=? where JENKINS_MASTER_ID IS NULL")) {
+ try (PreparedStatement stmt =
+ cnn.prepareStatement("UPDATE JENKINS_JOB set JENKINS_MASTER_ID=? where JENKINS_MASTER_ID IS NULL")) {
stmt.setInt(1, masterId);
stmt.execute();
}
diff --git a/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/migration/mysql/MigrationStep12.java b/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/migration/mysql/MigrationStep12.java
index 3d77498d..26c7223e 100644
--- a/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/migration/mysql/MigrationStep12.java
+++ b/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/migration/mysql/MigrationStep12.java
@@ -1,29 +1,30 @@
package org.jenkinsci.plugins.pipeline.maven.db.migration.mysql;
-import org.jenkinsci.plugins.pipeline.maven.db.migration.MigrationStep;
-
import edu.umd.cs.findbugs.annotations.NonNull;
import java.sql.Connection;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.logging.Level;
import java.util.logging.Logger;
+import org.jenkinsci.plugins.pipeline.maven.db.migration.MigrationStep;
public class MigrationStep12 implements MigrationStep {
- private final static Logger LOGGER = Logger.getLogger(MigrationStep12.class.getName());
+ private static final Logger LOGGER = Logger.getLogger(MigrationStep12.class.getName());
@Override
public void execute(@NonNull Connection cnn, @NonNull JenkinsDetails jenkinsDetails) throws SQLException {
try (Statement stmt = cnn.createStatement()) {
stmt.execute("ALTER TABLE MAVEN_ARTIFACT MODIFY COLUMN VERSION varchar(100)");
- LOGGER.log(Level.INFO, "Successfully resized column MAVEN_ARTIFACT.VERSION to varchar(100)" );
+ LOGGER.log(Level.INFO, "Successfully resized column MAVEN_ARTIFACT.VERSION to varchar(100)");
} catch (SQLException e) {
// some old mysql version may not accept the resize due to constraints on the index size
- LOGGER.log(Level.WARNING, "Silently ignore failure to resize column MAVEN_ARTIFACT.VERSION to varchar(100). " +
- "It is probably caused by the old version of the MySQL engine, it will not restrict the capabilities, " +
- "it will just continue to restrict the max size of the maven_artifact.version column to 56 chars" );
+ LOGGER.log(
+ Level.WARNING,
+ "Silently ignore failure to resize column MAVEN_ARTIFACT.VERSION to varchar(100). "
+ + "It is probably caused by the old version of the MySQL engine, it will not restrict the capabilities, "
+ + "it will just continue to restrict the max size of the maven_artifact.version column to 56 chars");
}
}
}
diff --git a/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/util/ClassUtils.java b/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/util/ClassUtils.java
index deb95b51..4af6cf57 100644
--- a/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/util/ClassUtils.java
+++ b/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/util/ClassUtils.java
@@ -1,9 +1,8 @@
package org.jenkinsci.plugins.pipeline.maven.db.util;
-import java.io.InputStream;
-
import edu.umd.cs.findbugs.annotations.NonNull;
import edu.umd.cs.findbugs.annotations.Nullable;
+import java.io.InputStream;
/**
* @author Cyrille Le Clerc
@@ -18,5 +17,4 @@ public static InputStream getResourceAsStream(@NonNull String resourcePath) {
}
return result;
}
-
}
diff --git a/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/util/RuntimeSqlException.java b/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/util/RuntimeSqlException.java
index 495440b6..a4fc5b0c 100644
--- a/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/util/RuntimeSqlException.java
+++ b/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/util/RuntimeSqlException.java
@@ -24,7 +24,8 @@ public RuntimeSqlException(Throwable cause) {
super(cause);
}
- protected RuntimeSqlException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) {
+ protected RuntimeSqlException(
+ String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) {
super(message, cause, enableSuppression, writableStackTrace);
}
}
diff --git a/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/util/SqlUtils.java b/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/util/SqlUtils.java
index de4d1f9e..82324d20 100644
--- a/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/util/SqlUtils.java
+++ b/pipeline-maven-database/src/main/java/org/jenkinsci/plugins/pipeline/maven/db/util/SqlUtils.java
@@ -10,9 +10,8 @@
*/
public class SqlUtils {
- private SqlUtils() {
+ private SqlUtils() {}
- }
public static void dumpResultsetMetadata(ResultSet rst, PrintStream out) {
try {
ResultSetMetaData metaData = rst.getMetaData();
diff --git a/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginDaoAbstractTest.java b/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginDaoAbstractTest.java
index ef658a1d..d9048ec9 100644
--- a/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginDaoAbstractTest.java
+++ b/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginDaoAbstractTest.java
@@ -26,15 +26,15 @@
import static org.assertj.core.api.Assertions.assertThat;
+import edu.umd.cs.findbugs.annotations.NonNull;
+import hudson.model.Result;
import java.io.Closeable;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import java.util.SortedSet;
import java.util.stream.Collectors;
-
import javax.sql.DataSource;
-
import org.jenkinsci.plugins.pipeline.maven.MavenArtifact;
import org.jenkinsci.plugins.pipeline.maven.MavenDependency;
import org.jenkinsci.plugins.pipeline.maven.db.util.SqlTestsUtils;
@@ -43,9 +43,6 @@
import org.junit.jupiter.api.Test;
import org.jvnet.hudson.test.Issue;
-import edu.umd.cs.findbugs.annotations.NonNull;
-import hudson.model.Result;
-
/**
* @author Cyrille Le Clerc
*/
@@ -58,7 +55,13 @@ public abstract class PipelineMavenPluginDaoAbstractTest {
@BeforeEach
public void before() throws Exception {
ds = before_newDataSource();
- SqlTestsUtils.silentlyDeleteTableRows(ds, "JENKINS_MASTER", "JENKINS_JOB", "JENKINS_BUILD", "MAVEN_ARTIFACT", "MAVEN_DEPENDENCY",
+ SqlTestsUtils.silentlyDeleteTableRows(
+ ds,
+ "JENKINS_MASTER",
+ "JENKINS_JOB",
+ "JENKINS_BUILD",
+ "MAVEN_ARTIFACT",
+ "MAVEN_DEPENDENCY",
"GENERATED_MAVEN_ARTIFACT");
dao = before_newAbstractPipelineMavenPluginDao(ds);
}
@@ -96,11 +99,13 @@ public void getOrCreateArtifactPrimaryKey() throws Exception {
@Test
public void getOrCreateArtifactPrimaryKey_jarWithDependencies() throws Exception {
- long primaryKey = dao.getOrCreateArtifactPrimaryKey("com.example", "my-bundle", "1.2.3", "jar", "jar-with-dependencies");
+ long primaryKey =
+ dao.getOrCreateArtifactPrimaryKey("com.example", "my-bundle", "1.2.3", "jar", "jar-with-dependencies");
System.out.println(primaryKey);
SqlTestsUtils.dump("select * from MAVEN_ARTIFACT", ds, System.out);
- long primaryKeySecondCall = dao.getOrCreateArtifactPrimaryKey("com.example", "my-bundle", "1.2.3", "jar", "jar-with-dependencies");
+ long primaryKeySecondCall =
+ dao.getOrCreateArtifactPrimaryKey("com.example", "my-bundle", "1.2.3", "jar", "jar-with-dependencies");
SqlTestsUtils.dump("select * from MAVEN_ARTIFACT", ds, System.out);
assertThat(primaryKeySecondCall).isEqualTo(primaryKey);
@@ -121,7 +126,6 @@ public void getOrCreateJobPrimaryKey() throws Exception {
SqlTestsUtils.dump("select * from JENKINS_BUILD", ds, System.out);
assertThat(SqlTestsUtils.countRows("select * from JENKINS_BUILD", ds)).isEqualTo(1);
-
}
@Test
@@ -140,11 +144,12 @@ public void create_job_and_2_builds() throws Exception {
SqlTestsUtils.dump("select * from JENKINS_JOB", ds, System.out);
SqlTestsUtils.dump("select * from JENKINS_BUILD", ds, System.out);
- assertThat(SqlTestsUtils.countRows("select * from JENKINS_JOB where FULL_NAME='my-pipeline' AND LAST_BUILD_NUMBER=2 AND LAST_SUCCESSFUL_BUILD_NUMBER=2",
- ds)).isEqualTo(1);
+ assertThat(SqlTestsUtils.countRows(
+ "select * from JENKINS_JOB where FULL_NAME='my-pipeline' AND LAST_BUILD_NUMBER=2 AND LAST_SUCCESSFUL_BUILD_NUMBER=2",
+ ds))
+ .isEqualTo(1);
assertThat(SqlTestsUtils.countRows("select * from JENKINS_BUILD", ds)).isEqualTo(2);
-
}
@Test
@@ -177,7 +182,6 @@ public void create_job_and_3_builds_and_delete_builds() throws Exception {
dao.deleteBuild("my-pipeline", 3);
System.out.println("AFTER DELETE LAST BUILD");
SqlTestsUtils.dump("select * from JENKINS_JOB", ds, System.out);
-
}
@Test
@@ -185,14 +189,18 @@ public void record_one_dependency() throws Exception {
dao.recordDependency("my-pipeline", 1, "com.h2.database", "h2", "1.4.196", "jar", "compile", false, null);
- SqlTestsUtils.dump("select * from JENKINS_BUILD LEFT OUTER JOIN JENKINS_JOB ON JENKINS_BUILD.JOB_ID = JENKINS_JOB.ID", ds, System.out);
+ SqlTestsUtils.dump(
+ "select * from JENKINS_BUILD LEFT OUTER JOIN JENKINS_JOB ON JENKINS_BUILD.JOB_ID = JENKINS_JOB.ID",
+ ds,
+ System.out);
SqlTestsUtils.dump("select * from MAVEN_ARTIFACT", ds, System.out);
SqlTestsUtils.dump("select * from MAVEN_DEPENDENCY", ds, System.out);
assertThat(SqlTestsUtils.countRows("select * from JENKINS_BUILD", ds)).isEqualTo(1);
assertThat(SqlTestsUtils.countRows("select * from MAVEN_ARTIFACT", ds)).isEqualTo(1);
- assertThat(SqlTestsUtils.countRows("select * from MAVEN_DEPENDENCY", ds)).isEqualTo(1);
+ assertThat(SqlTestsUtils.countRows("select * from MAVEN_DEPENDENCY", ds))
+ .isEqualTo(1);
List mavenDependencies = dao.listDependencies("my-pipeline", 1);
assertThat(mavenDependencies).hasSize(1);
@@ -203,22 +211,26 @@ public void record_one_dependency() throws Exception {
assertThat(dependency.getVersion()).isEqualTo("1.4.196");
assertThat(dependency.getType()).isEqualTo("jar");
assertThat(dependency.getScope()).isEqualTo("compile");
-
}
@Test
public void record_one_parent_project() throws Exception {
- dao.recordParentProject("my-pipeline", 1, "org.springframework.boot", "spring-boot-starter-parent", "1.5.4.RELEASE", false);
+ dao.recordParentProject(
+ "my-pipeline", 1, "org.springframework.boot", "spring-boot-starter-parent", "1.5.4.RELEASE", false);
- SqlTestsUtils.dump("select * from JENKINS_BUILD LEFT OUTER JOIN JENKINS_JOB ON JENKINS_BUILD.JOB_ID = JENKINS_JOB.ID", ds, System.out);
+ SqlTestsUtils.dump(
+ "select * from JENKINS_BUILD LEFT OUTER JOIN JENKINS_JOB ON JENKINS_BUILD.JOB_ID = JENKINS_JOB.ID",
+ ds,
+ System.out);
SqlTestsUtils.dump("select * from MAVEN_ARTIFACT", ds, System.out);
SqlTestsUtils.dump("select * from MAVEN_PARENT_PROJECT", ds, System.out);
assertThat(SqlTestsUtils.countRows("select * from JENKINS_BUILD", ds)).isEqualTo(1);
assertThat(SqlTestsUtils.countRows("select * from MAVEN_ARTIFACT", ds)).isEqualTo(1);
- assertThat(SqlTestsUtils.countRows("select * from MAVEN_PARENT_PROJECT", ds)).isEqualTo(1);
+ assertThat(SqlTestsUtils.countRows("select * from MAVEN_PARENT_PROJECT", ds))
+ .isEqualTo(1);
}
@Test
@@ -228,18 +240,23 @@ public void rename_job() throws Exception {
dao.renameJob("my-pipeline-name-1", "my-pipeline-name-2");
- SqlTestsUtils.dump("select * from JENKINS_BUILD LEFT OUTER JOIN JENKINS_JOB ON JENKINS_BUILD.JOB_ID = JENKINS_JOB.ID", ds, System.out);
+ SqlTestsUtils.dump(
+ "select * from JENKINS_BUILD LEFT OUTER JOIN JENKINS_JOB ON JENKINS_BUILD.JOB_ID = JENKINS_JOB.ID",
+ ds,
+ System.out);
SqlTestsUtils.dump("select * from MAVEN_ARTIFACT", ds, System.out);
SqlTestsUtils.dump("select * from MAVEN_DEPENDENCY", ds, System.out);
assertThat(SqlTestsUtils.countRows("select * from JENKINS_JOB", ds)).isEqualTo(1);
- assertThat(SqlTestsUtils.countRows("select * from JENKINS_JOB WHERE FULL_NAME='my-pipeline-name-2'", ds)).isEqualTo(1);
+ assertThat(SqlTestsUtils.countRows("select * from JENKINS_JOB WHERE FULL_NAME='my-pipeline-name-2'", ds))
+ .isEqualTo(1);
assertThat(SqlTestsUtils.countRows("select * from JENKINS_BUILD", ds)).isEqualTo(1);
assertThat(SqlTestsUtils.countRows("select * from MAVEN_ARTIFACT", ds)).isEqualTo(1);
- assertThat(SqlTestsUtils.countRows("select * from MAVEN_DEPENDENCY", ds)).isEqualTo(1);
+ assertThat(SqlTestsUtils.countRows("select * from MAVEN_DEPENDENCY", ds))
+ .isEqualTo(1);
}
@Test
@@ -250,7 +267,10 @@ public void delete_job() throws Exception {
dao.deleteJob("my-pipeline");
- SqlTestsUtils.dump("select * from JENKINS_BUILD LEFT OUTER JOIN JENKINS_JOB ON JENKINS_BUILD.JOB_ID = JENKINS_JOB.ID", ds, System.out);
+ SqlTestsUtils.dump(
+ "select * from JENKINS_BUILD LEFT OUTER JOIN JENKINS_JOB ON JENKINS_BUILD.JOB_ID = JENKINS_JOB.ID",
+ ds,
+ System.out);
SqlTestsUtils.dump("select * from MAVEN_ARTIFACT", ds, System.out);
SqlTestsUtils.dump("select * from MAVEN_DEPENDENCY", ds, System.out);
@@ -259,7 +279,8 @@ public void delete_job() throws Exception {
assertThat(SqlTestsUtils.countRows("select * from JENKINS_BUILD", ds)).isEqualTo(0);
assertThat(SqlTestsUtils.countRows("select * from MAVEN_ARTIFACT", ds)).isEqualTo(2);
- assertThat(SqlTestsUtils.countRows("select * from MAVEN_DEPENDENCY", ds)).isEqualTo(0);
+ assertThat(SqlTestsUtils.countRows("select * from MAVEN_DEPENDENCY", ds))
+ .isEqualTo(0);
}
@Test
@@ -272,7 +293,10 @@ public void delete_build() throws Exception {
dao.deleteBuild("my-pipeline", 2);
- SqlTestsUtils.dump("select * from JENKINS_BUILD LEFT OUTER JOIN JENKINS_JOB ON JENKINS_BUILD.JOB_ID = JENKINS_JOB.ID", ds, System.out);
+ SqlTestsUtils.dump(
+ "select * from JENKINS_BUILD LEFT OUTER JOIN JENKINS_JOB ON JENKINS_BUILD.JOB_ID = JENKINS_JOB.ID",
+ ds,
+ System.out);
SqlTestsUtils.dump("select * from MAVEN_ARTIFACT", ds, System.out);
SqlTestsUtils.dump("select * from MAVEN_DEPENDENCY", ds, System.out);
@@ -281,7 +305,8 @@ public void delete_build() throws Exception {
assertThat(SqlTestsUtils.countRows("select * from JENKINS_BUILD", ds)).isEqualTo(1);
assertThat(SqlTestsUtils.countRows("select * from MAVEN_ARTIFACT", ds)).isEqualTo(2);
- assertThat(SqlTestsUtils.countRows("select * from MAVEN_DEPENDENCY", ds)).isEqualTo(2);
+ assertThat(SqlTestsUtils.countRows("select * from MAVEN_DEPENDENCY", ds))
+ .isEqualTo(2);
}
@Test
@@ -290,31 +315,59 @@ public void move_build() throws Exception {
dao.recordDependency("my-pipeline", 1, "com.h2database", "h2", "1.4.196", "jar", "compile", false, null);
dao.renameJob("my-pipeline", "my-new-pipeline");
- SqlTestsUtils.dump("select * from JENKINS_BUILD LEFT OUTER JOIN JENKINS_JOB ON JENKINS_BUILD.JOB_ID = JENKINS_JOB.ID", ds, System.out);
+ SqlTestsUtils.dump(
+ "select * from JENKINS_BUILD LEFT OUTER JOIN JENKINS_JOB ON JENKINS_BUILD.JOB_ID = JENKINS_JOB.ID",
+ ds,
+ System.out);
SqlTestsUtils.dump("select * from MAVEN_ARTIFACT", ds, System.out);
SqlTestsUtils.dump("select * from MAVEN_DEPENDENCY", ds, System.out);
assertThat(SqlTestsUtils.countRows("select * from JENKINS_JOB", ds)).isEqualTo(1);
- assertThat(SqlTestsUtils.countRows("select * from JENKINS_JOB where full_name='my-new-pipeline'", ds)).isEqualTo(1);
+ assertThat(SqlTestsUtils.countRows("select * from JENKINS_JOB where full_name='my-new-pipeline'", ds))
+ .isEqualTo(1);
assertThat(SqlTestsUtils.countRows("select * from JENKINS_BUILD", ds)).isEqualTo(1);
assertThat(SqlTestsUtils.countRows("select * from MAVEN_ARTIFACT", ds)).isEqualTo(1);
- assertThat(SqlTestsUtils.countRows("select * from MAVEN_DEPENDENCY", ds)).isEqualTo(1);
+ assertThat(SqlTestsUtils.countRows("select * from MAVEN_DEPENDENCY", ds))
+ .isEqualTo(1);
}
@Test
public void record_two_generated_artifacts_on_the_same_build() throws Exception {
- dao.recordGeneratedArtifact("my-upstream-pipeline-1", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "1.0-SNAPSHOT", null, false, "jar", null);
- dao.recordGeneratedArtifact("my-upstream-pipeline-1", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "war", "1.0-SNAPSHOT", null, false, "war", null);
+ dao.recordGeneratedArtifact(
+ "my-upstream-pipeline-1",
+ 1,
+ "com.mycompany",
+ "core",
+ "1.0-SNAPSHOT",
+ "jar",
+ "1.0-SNAPSHOT",
+ null,
+ false,
+ "jar",
+ null);
+ dao.recordGeneratedArtifact(
+ "my-upstream-pipeline-1",
+ 1,
+ "com.mycompany",
+ "core",
+ "1.0-SNAPSHOT",
+ "war",
+ "1.0-SNAPSHOT",
+ null,
+ false,
+ "war",
+ null);
assertThat(SqlTestsUtils.countRows("select * from JENKINS_JOB", ds)).isEqualTo(1);
assertThat(SqlTestsUtils.countRows("select * from JENKINS_BUILD", ds)).isEqualTo(1);
assertThat(SqlTestsUtils.countRows("select * from MAVEN_ARTIFACT", ds)).isEqualTo(2);
- assertThat(SqlTestsUtils.countRows("select * from GENERATED_MAVEN_ARTIFACT", ds)).isEqualTo(2);
+ assertThat(SqlTestsUtils.countRows("select * from GENERATED_MAVEN_ARTIFACT", ds))
+ .isEqualTo(2);
List generatedArtifacts = dao.getGeneratedArtifacts("my-upstream-pipeline-1", 1);
assertThat(generatedArtifacts).hasSize(2);
@@ -326,10 +379,12 @@ public void record_two_generated_artifacts_on_the_same_build() throws Exception
assertThat(generatedArtifact.getExtension()).isIn("war", "jar");
}
- SqlTestsUtils.dump("select * from JENKINS_BUILD LEFT OUTER JOIN JENKINS_JOB ON JENKINS_BUILD.JOB_ID = JENKINS_JOB.ID", ds, System.out);
+ SqlTestsUtils.dump(
+ "select * from JENKINS_BUILD LEFT OUTER JOIN JENKINS_JOB ON JENKINS_BUILD.JOB_ID = JENKINS_JOB.ID",
+ ds,
+ System.out);
SqlTestsUtils.dump("select * from MAVEN_ARTIFACT", ds, System.out);
SqlTestsUtils.dump("select * from GENERATED_MAVEN_ARTIFACT", ds, System.out);
-
}
@Test
@@ -343,12 +398,15 @@ public void record_two_dependencies_on_the_same_build() throws Exception {
assertThat(SqlTestsUtils.countRows("select * from JENKINS_BUILD", ds)).isEqualTo(1);
assertThat(SqlTestsUtils.countRows("select * from MAVEN_ARTIFACT", ds)).isEqualTo(2);
- assertThat(SqlTestsUtils.countRows("select * from MAVEN_DEPENDENCY", ds)).isEqualTo(2);
+ assertThat(SqlTestsUtils.countRows("select * from MAVEN_DEPENDENCY", ds))
+ .isEqualTo(2);
- SqlTestsUtils.dump("select * from JENKINS_BUILD LEFT OUTER JOIN JENKINS_JOB ON JENKINS_BUILD.JOB_ID = JENKINS_JOB.ID", ds, System.out);
+ SqlTestsUtils.dump(
+ "select * from JENKINS_BUILD LEFT OUTER JOIN JENKINS_JOB ON JENKINS_BUILD.JOB_ID = JENKINS_JOB.ID",
+ ds,
+ System.out);
SqlTestsUtils.dump("select * from MAVEN_ARTIFACT", ds, System.out);
SqlTestsUtils.dump("select * from MAVEN_DEPENDENCY", ds, System.out);
-
}
@Test
@@ -359,7 +417,10 @@ public void record_two_dependencies_on_consecutive_builds_of_the_same_job() thro
dao.recordDependency("my-pipeline", 2, "com.h2database", "h2", "1.4.196", "jar", "compile", false, null);
dao.recordDependency("my-pipeline", 2, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null);
- SqlTestsUtils.dump("select * from JENKINS_BUILD LEFT OUTER JOIN JENKINS_JOB ON JENKINS_BUILD.JOB_ID = JENKINS_JOB.ID", ds, System.out);
+ SqlTestsUtils.dump(
+ "select * from JENKINS_BUILD LEFT OUTER JOIN JENKINS_JOB ON JENKINS_BUILD.JOB_ID = JENKINS_JOB.ID",
+ ds,
+ System.out);
SqlTestsUtils.dump("select * from MAVEN_ARTIFACT", ds, System.out);
SqlTestsUtils.dump("select * from MAVEN_DEPENDENCY", ds, System.out);
@@ -368,19 +429,24 @@ public void record_two_dependencies_on_consecutive_builds_of_the_same_job() thro
assertThat(SqlTestsUtils.countRows("select * from JENKINS_BUILD", ds)).isEqualTo(2);
assertThat(SqlTestsUtils.countRows("select * from MAVEN_ARTIFACT", ds)).isEqualTo(2);
- assertThat(SqlTestsUtils.countRows("select * from MAVEN_DEPENDENCY", ds)).isEqualTo(4);
-
+ assertThat(SqlTestsUtils.countRows("select * from MAVEN_DEPENDENCY", ds))
+ .isEqualTo(4);
}
@Test
public void record_two_dependencies_on_two_jobs() throws Exception {
dao.recordDependency("my-pipeline-1", 1, "com.h2database", "h2", "1.4.196", "jar", "compile", false, null);
- dao.recordDependency("my-pipeline-1", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null);
+ dao.recordDependency(
+ "my-pipeline-1", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null);
dao.recordDependency("my-pipeline-2", 2, "com.h2database", "h2", "1.4.196", "jar", "compile", false, null);
- dao.recordDependency("my-pipeline-2", 2, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null);
+ dao.recordDependency(
+ "my-pipeline-2", 2, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null);
- SqlTestsUtils.dump("select * from JENKINS_BUILD LEFT OUTER JOIN JENKINS_JOB ON JENKINS_BUILD.JOB_ID = JENKINS_JOB.ID", ds, System.out);
+ SqlTestsUtils.dump(
+ "select * from JENKINS_BUILD LEFT OUTER JOIN JENKINS_JOB ON JENKINS_BUILD.JOB_ID = JENKINS_JOB.ID",
+ ds,
+ System.out);
SqlTestsUtils.dump("select * from MAVEN_ARTIFACT", ds, System.out);
SqlTestsUtils.dump("select * from MAVEN_DEPENDENCY", ds, System.out);
@@ -389,8 +455,8 @@ public void record_two_dependencies_on_two_jobs() throws Exception {
assertThat(SqlTestsUtils.countRows("select * from JENKINS_BUILD", ds)).isEqualTo(2);
assertThat(SqlTestsUtils.countRows("select * from MAVEN_ARTIFACT", ds)).isEqualTo(2);
- assertThat(SqlTestsUtils.countRows("select * from MAVEN_DEPENDENCY", ds)).isEqualTo(4);
-
+ assertThat(SqlTestsUtils.countRows("select * from MAVEN_DEPENDENCY", ds))
+ .isEqualTo(4);
}
@Deprecated
@@ -398,31 +464,85 @@ public void record_two_dependencies_on_two_jobs() throws Exception {
public void listDownstreamJobs_upstream_jar_triggers_downstream_pipelines() {
dao.getOrCreateBuildPrimaryKey("my-upstream-pipeline-1", 1);
- dao.recordGeneratedArtifact("my-upstream-pipeline-1", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "1.0-SNAPSHOT", null, false, "jar", null);
- dao.recordGeneratedArtifact("my-upstream-pipeline-1", 1, "com.mycompany", "service", "1.0-SNAPSHOT", "war", "1.0-SNAPSHOT", null, false, "war", null);
- dao.updateBuildOnCompletion("my-upstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5);
+ dao.recordGeneratedArtifact(
+ "my-upstream-pipeline-1",
+ 1,
+ "com.mycompany",
+ "core",
+ "1.0-SNAPSHOT",
+ "jar",
+ "1.0-SNAPSHOT",
+ null,
+ false,
+ "jar",
+ null);
+ dao.recordGeneratedArtifact(
+ "my-upstream-pipeline-1",
+ 1,
+ "com.mycompany",
+ "service",
+ "1.0-SNAPSHOT",
+ "war",
+ "1.0-SNAPSHOT",
+ null,
+ false,
+ "war",
+ null);
+ dao.updateBuildOnCompletion(
+ "my-upstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5);
dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-1", 1);
- dao.recordDependency("my-downstream-pipeline-1", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null);
- dao.updateBuildOnCompletion("my-downstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5);
+ dao.recordDependency(
+ "my-downstream-pipeline-1", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null);
+ dao.updateBuildOnCompletion(
+ "my-downstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5);
dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-2", 1);
- dao.recordDependency("my-downstream-pipeline-2", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null);
- dao.updateBuildOnCompletion("my-downstream-pipeline-2", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5);
+ dao.recordDependency(
+ "my-downstream-pipeline-2", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null);
+ dao.updateBuildOnCompletion(
+ "my-downstream-pipeline-2", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5);
List downstreamPipelinesForBuild1 = dao.listDownstreamJobs("my-upstream-pipeline-1", 1);
assertThat(downstreamPipelinesForBuild1).contains("my-downstream-pipeline-1", "my-downstream-pipeline-2");
dao.getOrCreateBuildPrimaryKey("my-upstream-pipeline-1", 2);
- dao.recordGeneratedArtifact("my-upstream-pipeline-1", 2, "com.mycompany", "core", "1.1-SNAPSHOT", "jar", "1.1-SNAPSHOT", null, false, "jar", null);
- dao.recordGeneratedArtifact("my-upstream-pipeline-1", 2, "com.mycompany", "service", "1.1-SNAPSHOT", "war", "1.1-SNAPSHOT", null, false, "war", null);
- dao.updateBuildOnCompletion("my-upstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5);
+ dao.recordGeneratedArtifact(
+ "my-upstream-pipeline-1",
+ 2,
+ "com.mycompany",
+ "core",
+ "1.1-SNAPSHOT",
+ "jar",
+ "1.1-SNAPSHOT",
+ null,
+ false,
+ "jar",
+ null);
+ dao.recordGeneratedArtifact(
+ "my-upstream-pipeline-1",
+ 2,
+ "com.mycompany",
+ "service",
+ "1.1-SNAPSHOT",
+ "war",
+ "1.1-SNAPSHOT",
+ null,
+ false,
+ "war",
+ null);
+ dao.updateBuildOnCompletion(
+ "my-upstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5);
- dao.recordDependency("my-downstream-pipeline-1", 2, "com.mycompany", "core", "1.1-SNAPSHOT", "jar", "compile", false, null);
- dao.updateBuildOnCompletion("my-downstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5);
+ dao.recordDependency(
+ "my-downstream-pipeline-1", 2, "com.mycompany", "core", "1.1-SNAPSHOT", "jar", "compile", false, null);
+ dao.updateBuildOnCompletion(
+ "my-downstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5);
- dao.recordDependency("my-downstream-pipeline-2", 2, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null);
- dao.updateBuildOnCompletion("my-downstream-pipeline-2", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5);
+ dao.recordDependency(
+ "my-downstream-pipeline-2", 2, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null);
+ dao.updateBuildOnCompletion(
+ "my-downstream-pipeline-2", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5);
List downstreamPipelinesForBuild2 = dao.listDownstreamJobs("my-upstream-pipeline-1", 2);
assertThat(downstreamPipelinesForBuild2).contains("my-downstream-pipeline-1");
@@ -432,17 +552,44 @@ public void listDownstreamJobs_upstream_jar_triggers_downstream_pipelines() {
public void listDownstreamJobsByArtifact_upstream_jar_triggers_downstream_pipelines() {
dao.getOrCreateBuildPrimaryKey("my-upstream-pipeline-1", 1);
- dao.recordGeneratedArtifact("my-upstream-pipeline-1", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "1.0-SNAPSHOT", null, false, "jar", null);
- dao.recordGeneratedArtifact("my-upstream-pipeline-1", 1, "com.mycompany", "service", "1.0-SNAPSHOT", "war", "1.0-SNAPSHOT", null, false, "war", null);
- dao.updateBuildOnCompletion("my-upstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5);
+ dao.recordGeneratedArtifact(
+ "my-upstream-pipeline-1",
+ 1,
+ "com.mycompany",
+ "core",
+ "1.0-SNAPSHOT",
+ "jar",
+ "1.0-SNAPSHOT",
+ null,
+ false,
+ "jar",
+ null);
+ dao.recordGeneratedArtifact(
+ "my-upstream-pipeline-1",
+ 1,
+ "com.mycompany",
+ "service",
+ "1.0-SNAPSHOT",
+ "war",
+ "1.0-SNAPSHOT",
+ null,
+ false,
+ "war",
+ null);
+ dao.updateBuildOnCompletion(
+ "my-upstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5);
dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-1", 1);
- dao.recordDependency("my-downstream-pipeline-1", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null);
- dao.updateBuildOnCompletion("my-downstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5);
+ dao.recordDependency(
+ "my-downstream-pipeline-1", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null);
+ dao.updateBuildOnCompletion(
+ "my-downstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5);
dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-2", 1);
- dao.recordDependency("my-downstream-pipeline-2", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null);
- dao.updateBuildOnCompletion("my-downstream-pipeline-2", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5);
+ dao.recordDependency(
+ "my-downstream-pipeline-2", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null);
+ dao.updateBuildOnCompletion(
+ "my-downstream-pipeline-2", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5);
{
MavenArtifact expectedMavenArtifact = new MavenArtifact();
@@ -453,7 +600,8 @@ public void listDownstreamJobsByArtifact_upstream_jar_triggers_downstream_pipeli
expectedMavenArtifact.setType("jar");
expectedMavenArtifact.setExtension("jar");
- Map> downstreamJobsByArtifactForBuild1 = dao.listDownstreamJobsByArtifact("my-upstream-pipeline-1", 1);
+ Map> downstreamJobsByArtifactForBuild1 =
+ dao.listDownstreamJobsByArtifact("my-upstream-pipeline-1", 1);
SortedSet actualJobs = downstreamJobsByArtifactForBuild1.get(expectedMavenArtifact);
assertThat(actualJobs).contains("my-downstream-pipeline-1", "my-downstream-pipeline-2");
@@ -462,15 +610,42 @@ public void listDownstreamJobsByArtifact_upstream_jar_triggers_downstream_pipeli
}
dao.getOrCreateBuildPrimaryKey("my-upstream-pipeline-1", 2);
- dao.recordGeneratedArtifact("my-upstream-pipeline-1", 2, "com.mycompany", "core", "1.1-SNAPSHOT", "jar", "1.1-SNAPSHOT", null, false, "jar", null);
- dao.recordGeneratedArtifact("my-upstream-pipeline-1", 2, "com.mycompany", "service", "1.1-SNAPSHOT", "war", "1.1-SNAPSHOT", null, false, "war", null);
- dao.updateBuildOnCompletion("my-upstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5);
+ dao.recordGeneratedArtifact(
+ "my-upstream-pipeline-1",
+ 2,
+ "com.mycompany",
+ "core",
+ "1.1-SNAPSHOT",
+ "jar",
+ "1.1-SNAPSHOT",
+ null,
+ false,
+ "jar",
+ null);
+ dao.recordGeneratedArtifact(
+ "my-upstream-pipeline-1",
+ 2,
+ "com.mycompany",
+ "service",
+ "1.1-SNAPSHOT",
+ "war",
+ "1.1-SNAPSHOT",
+ null,
+ false,
+ "war",
+ null);
+ dao.updateBuildOnCompletion(
+ "my-upstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5);
- dao.recordDependency("my-downstream-pipeline-1", 2, "com.mycompany", "core", "1.1-SNAPSHOT", "jar", "compile", false, null);
- dao.updateBuildOnCompletion("my-downstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5);
+ dao.recordDependency(
+ "my-downstream-pipeline-1", 2, "com.mycompany", "core", "1.1-SNAPSHOT", "jar", "compile", false, null);
+ dao.updateBuildOnCompletion(
+ "my-downstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5);
- dao.recordDependency("my-downstream-pipeline-2", 2, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null);
- dao.updateBuildOnCompletion("my-downstream-pipeline-2", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5);
+ dao.recordDependency(
+ "my-downstream-pipeline-2", 2, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null);
+ dao.updateBuildOnCompletion(
+ "my-downstream-pipeline-2", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5);
{
MavenArtifact expectedMavenArtifact = new MavenArtifact();
@@ -481,7 +656,8 @@ public void listDownstreamJobsByArtifact_upstream_jar_triggers_downstream_pipeli
expectedMavenArtifact.setType("jar");
expectedMavenArtifact.setExtension("jar");
- Map> downstreamJobsByArtifactForBuild1 = dao.listDownstreamJobsByArtifact("my-upstream-pipeline-1", 2);
+ Map> downstreamJobsByArtifactForBuild1 =
+ dao.listDownstreamJobsByArtifact("my-upstream-pipeline-1", 2);
SortedSet actualJobs = downstreamJobsByArtifactForBuild1.get(expectedMavenArtifact);
assertThat(actualJobs).contains("my-downstream-pipeline-1");
@@ -494,29 +670,91 @@ public void listDownstreamJobsByArtifact_upstream_jar_triggers_downstream_pipeli
public void listDownstreamJobsByArtifact_upstream_jar_with_classifier_triggers_downstream_pipelines() {
dao.getOrCreateBuildPrimaryKey("my-upstream-pipeline-1", 1);
- dao.recordGeneratedArtifact("my-upstream-pipeline-1", 1, "com.mycompany", "upstream-1", "1.0-SNAPSHOT", "aType", "1.0-SNAPSHOT", null, false, "jar",
+ dao.recordGeneratedArtifact(
+ "my-upstream-pipeline-1",
+ 1,
+ "com.mycompany",
+ "upstream-1",
+ "1.0-SNAPSHOT",
+ "aType",
+ "1.0-SNAPSHOT",
+ null,
+ false,
+ "jar",
null);
- dao.recordGeneratedArtifact("my-upstream-pipeline-1", 1, "com.mycompany", "upstream-1", "1.0-SNAPSHOT", "anotherType", "1.0-SNAPSHOT", null, false,
- "jar", "aClassifier");
- dao.updateBuildOnCompletion("my-upstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5);
+ dao.recordGeneratedArtifact(
+ "my-upstream-pipeline-1",
+ 1,
+ "com.mycompany",
+ "upstream-1",
+ "1.0-SNAPSHOT",
+ "anotherType",
+ "1.0-SNAPSHOT",
+ null,
+ false,
+ "jar",
+ "aClassifier");
+ dao.updateBuildOnCompletion(
+ "my-upstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5);
dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-1", 1);
- dao.recordDependency("my-downstream-pipeline-1", 1, "com.mycompany", "upstream-1", "1.0-SNAPSHOT", "aType", "compile", false, null);
- dao.updateBuildOnCompletion("my-downstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5);
+ dao.recordDependency(
+ "my-downstream-pipeline-1",
+ 1,
+ "com.mycompany",
+ "upstream-1",
+ "1.0-SNAPSHOT",
+ "aType",
+ "compile",
+ false,
+ null);
+ dao.updateBuildOnCompletion(
+ "my-downstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5);
dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-2", 1);
- dao.recordDependency("my-downstream-pipeline-2", 1, "com.mycompany", "upstream-1", "1.0-SNAPSHOT", "aType", "compile", false, "whatever");
- dao.updateBuildOnCompletion("my-downstream-pipeline-2", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5);
+ dao.recordDependency(
+ "my-downstream-pipeline-2",
+ 1,
+ "com.mycompany",
+ "upstream-1",
+ "1.0-SNAPSHOT",
+ "aType",
+ "compile",
+ false,
+ "whatever");
+ dao.updateBuildOnCompletion(
+ "my-downstream-pipeline-2", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5);
dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-3", 1);
- dao.recordDependency("my-downstream-pipeline-3", 1, "com.mycompany", "upstream-1", "1.0-SNAPSHOT", "whatever", "compile", false, null);
- dao.updateBuildOnCompletion("my-downstream-pipeline-3", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5);
+ dao.recordDependency(
+ "my-downstream-pipeline-3",
+ 1,
+ "com.mycompany",
+ "upstream-1",
+ "1.0-SNAPSHOT",
+ "whatever",
+ "compile",
+ false,
+ null);
+ dao.updateBuildOnCompletion(
+ "my-downstream-pipeline-3", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5);
dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-4", 1);
- dao.recordDependency("my-downstream-pipeline-4", 1, "com.mycompany", "upstream-1", "1.0-SNAPSHOT", "anotherType", "compile", false, "aClassifier");
- dao.updateBuildOnCompletion("my-downstream-pipeline-4", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5);
+ dao.recordDependency(
+ "my-downstream-pipeline-4",
+ 1,
+ "com.mycompany",
+ "upstream-1",
+ "1.0-SNAPSHOT",
+ "anotherType",
+ "compile",
+ false,
+ "aClassifier");
+ dao.updateBuildOnCompletion(
+ "my-downstream-pipeline-4", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5);
- Map> downstreamJobsByArtifactForBuild1 = dao.listDownstreamJobsByArtifact("my-upstream-pipeline-1", 1);
+ Map> downstreamJobsByArtifactForBuild1 =
+ dao.listDownstreamJobsByArtifact("my-upstream-pipeline-1", 1);
System.out.println(downstreamJobsByArtifactForBuild1);
assertThat(downstreamJobsByArtifactForBuild1).hasSize(2);
@@ -543,18 +781,68 @@ public void listDownstreamJobsByArtifact_upstream_jar_with_classifier_triggers_d
public void listDownstreamJobsByArtifact_doesnt_return_artifacts_with_no_pipelines() {
dao.getOrCreateBuildPrimaryKey("my-upstream-pipeline-1", 1);
- dao.recordGeneratedArtifact("my-upstream-pipeline-1", 1, "com.mycompany", "upstream-shared", "1.0-SNAPSHOT", "jar", "1.0-SNAPSHOT", null, false, "jar",
+ dao.recordGeneratedArtifact(
+ "my-upstream-pipeline-1",
+ 1,
+ "com.mycompany",
+ "upstream-shared",
+ "1.0-SNAPSHOT",
+ "jar",
+ "1.0-SNAPSHOT",
+ null,
+ false,
+ "jar",
+ null);
+ dao.recordGeneratedArtifact(
+ "my-upstream-pipeline-1",
+ 1,
+ "com.mycompany",
+ "upstream-1",
+ "1.0-SNAPSHOT",
+ "jar",
+ "1.0-SNAPSHOT",
+ null,
+ false,
+ "jar",
null);
- dao.recordGeneratedArtifact("my-upstream-pipeline-1", 1, "com.mycompany", "upstream-1", "1.0-SNAPSHOT", "jar", "1.0-SNAPSHOT", null, false, "jar",
+ dao.recordGeneratedArtifact(
+ "my-upstream-pipeline-1",
+ 1,
+ "com.mycompany",
+ "upstream-2",
+ "1.0-SNAPSHOT",
+ "jar",
+ "1.0-SNAPSHOT",
+ null,
+ false,
+ "jar",
null);
- dao.recordGeneratedArtifact("my-upstream-pipeline-1", 1, "com.mycompany", "upstream-2", "1.0-SNAPSHOT", "jar", "1.0-SNAPSHOT", null, false, "jar",
+ dao.recordDependency(
+ "my-upstream-pipeline-1",
+ 1,
+ "com.mycompany",
+ "upstream-shared",
+ "1.0-SNAPSHOT",
+ "jar",
+ "compile",
+ false,
null);
- dao.recordDependency("my-upstream-pipeline-1", 1, "com.mycompany", "upstream-shared", "1.0-SNAPSHOT", "jar", "compile", false, null);
- dao.updateBuildOnCompletion("my-upstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5);
+ dao.updateBuildOnCompletion(
+ "my-upstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5);
dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-1", 1);
- dao.recordDependency("my-downstream-pipeline-1", 1, "com.mycompany", "upstream-1", "1.0-SNAPSHOT", "jar", "compile", false, null);
- dao.updateBuildOnCompletion("my-downstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5);
+ dao.recordDependency(
+ "my-downstream-pipeline-1",
+ 1,
+ "com.mycompany",
+ "upstream-1",
+ "1.0-SNAPSHOT",
+ "jar",
+ "compile",
+ false,
+ null);
+ dao.updateBuildOnCompletion(
+ "my-downstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5);
{
MavenArtifact expectedMavenArtifact = new MavenArtifact();
@@ -565,7 +853,8 @@ public void listDownstreamJobsByArtifact_doesnt_return_artifacts_with_no_pipelin
expectedMavenArtifact.setType("jar");
expectedMavenArtifact.setExtension("jar");
- Map> downstreamJobsByArtifactForBuild1 = dao.listDownstreamJobsByArtifact("my-upstream-pipeline-1", 1);
+ Map> downstreamJobsByArtifactForBuild1 =
+ dao.listDownstreamJobsByArtifact("my-upstream-pipeline-1", 1);
System.out.println(downstreamJobsByArtifactForBuild1);
SortedSet actualJobs = downstreamJobsByArtifactForBuild1.get(expectedMavenArtifact);
@@ -578,14 +867,35 @@ public void listDownstreamJobsByArtifact_doesnt_return_artifacts_with_no_pipelin
@Test
public void listDownstreamPipelinesBasedOnMavenDependencies_noBaseVersion() {
dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-1", 1);
- dao.recordDependency("my-downstream-pipeline-1", 1, "com.mycompany", "dependency-1", "1.0-SNAPSHOT", "jar", "compile", false, null);
- dao.updateBuildOnCompletion("my-downstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5);
+ dao.recordDependency(
+ "my-downstream-pipeline-1",
+ 1,
+ "com.mycompany",
+ "dependency-1",
+ "1.0-SNAPSHOT",
+ "jar",
+ "compile",
+ false,
+ null);
+ dao.updateBuildOnCompletion(
+ "my-downstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5);
dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-2", 1);
- dao.recordDependency("my-downstream-pipeline-2", 1, "com.mycompany", "dependency-1", "1.0-SNAPSHOT", "jar", "compile", false, null);
- dao.updateBuildOnCompletion("my-downstream-pipeline-2", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 2222, 22);
+ dao.recordDependency(
+ "my-downstream-pipeline-2",
+ 1,
+ "com.mycompany",
+ "dependency-1",
+ "1.0-SNAPSHOT",
+ "jar",
+ "compile",
+ false,
+ null);
+ dao.updateBuildOnCompletion(
+ "my-downstream-pipeline-2", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 2222, 22);
- SortedSet downstreamJobs = dao.listDownstreamJobs("com.mycompany", "dependency-1", "1.0-SNAPSHOT", null, "jar");
+ SortedSet downstreamJobs =
+ dao.listDownstreamJobs("com.mycompany", "dependency-1", "1.0-SNAPSHOT", null, "jar");
System.out.println(downstreamJobs);
assertThat(downstreamJobs).contains("my-downstream-pipeline-1", "my-downstream-pipeline-2");
}
@@ -593,14 +903,35 @@ public void listDownstreamPipelinesBasedOnMavenDependencies_noBaseVersion() {
@Test
public void listDownstreamPipelinesBasedOnMavenDependencies_withBaseVersion() {
dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-1", 1);
- dao.recordDependency("my-downstream-pipeline-1", 1, "com.mycompany", "dependency-1", "1.0-SNAPSHOT", "jar", "compile", false, null);
- dao.updateBuildOnCompletion("my-downstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5);
+ dao.recordDependency(
+ "my-downstream-pipeline-1",
+ 1,
+ "com.mycompany",
+ "dependency-1",
+ "1.0-SNAPSHOT",
+ "jar",
+ "compile",
+ false,
+ null);
+ dao.updateBuildOnCompletion(
+ "my-downstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5);
dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-2", 1);
- dao.recordDependency("my-downstream-pipeline-2", 1, "com.mycompany", "dependency-1", "1.0-SNAPSHOT", "jar", "compile", false, null);
- dao.updateBuildOnCompletion("my-downstream-pipeline-2", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 2222, 22);
+ dao.recordDependency(
+ "my-downstream-pipeline-2",
+ 1,
+ "com.mycompany",
+ "dependency-1",
+ "1.0-SNAPSHOT",
+ "jar",
+ "compile",
+ false,
+ null);
+ dao.updateBuildOnCompletion(
+ "my-downstream-pipeline-2", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 2222, 22);
- SortedSet downstreamJobs = dao.listDownstreamJobs("com.mycompany", "dependency-1", "1.0-20180318.225603-3", "1.0-SNAPSHOT", "jar");
+ SortedSet downstreamJobs =
+ dao.listDownstreamJobs("com.mycompany", "dependency-1", "1.0-20180318.225603-3", "1.0-SNAPSHOT", "jar");
System.out.println(downstreamJobs);
assertThat(downstreamJobs).contains("my-downstream-pipeline-1", "my-downstream-pipeline-2");
}
@@ -608,24 +939,48 @@ public void listDownstreamPipelinesBasedOnMavenDependencies_withBaseVersion() {
@Test
public void listDownstreamPipelinesBasedOnMavenDependencies_withClassifier() {
dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-1", 1);
- dao.recordDependency("my-downstream-pipeline-1", 1, "com.mycompany", "dependency-1", "1.0-SNAPSHOT", "aType", "compile", false, null);
- dao.updateBuildOnCompletion("my-downstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5);
+ dao.recordDependency(
+ "my-downstream-pipeline-1",
+ 1,
+ "com.mycompany",
+ "dependency-1",
+ "1.0-SNAPSHOT",
+ "aType",
+ "compile",
+ false,
+ null);
+ dao.updateBuildOnCompletion(
+ "my-downstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5);
dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-2", 1);
- dao.recordDependency("my-downstream-pipeline-2", 1, "com.mycompany", "dependency-1", "1.0-SNAPSHOT", "anotherType", "compile", false, "aClassifier");
- dao.updateBuildOnCompletion("my-downstream-pipeline-2", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 2222, 22);
+ dao.recordDependency(
+ "my-downstream-pipeline-2",
+ 1,
+ "com.mycompany",
+ "dependency-1",
+ "1.0-SNAPSHOT",
+ "anotherType",
+ "compile",
+ false,
+ "aClassifier");
+ dao.updateBuildOnCompletion(
+ "my-downstream-pipeline-2", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 2222, 22);
- SortedSet downstreamJobs = dao.listDownstreamJobs("com.mycompany", "dependency-1", "1.0-20180318.225603-3", "1.0-SNAPSHOT", "aType");
+ SortedSet downstreamJobs = dao.listDownstreamJobs(
+ "com.mycompany", "dependency-1", "1.0-20180318.225603-3", "1.0-SNAPSHOT", "aType");
System.out.println(downstreamJobs);
assertThat(downstreamJobs).contains("my-downstream-pipeline-1");
- downstreamJobs = dao.listDownstreamJobs("com.mycompany", "dependency-1", "1.0-20180318.225603-3", "1.0-SNAPSHOT", "aType", "whatever");
+ downstreamJobs = dao.listDownstreamJobs(
+ "com.mycompany", "dependency-1", "1.0-20180318.225603-3", "1.0-SNAPSHOT", "aType", "whatever");
assertThat(downstreamJobs).isEmpty();
- downstreamJobs = dao.listDownstreamJobs("com.mycompany", "dependency-1", "1.0-20180318.225603-3", "1.0-SNAPSHOT", "whatever");
+ downstreamJobs = dao.listDownstreamJobs(
+ "com.mycompany", "dependency-1", "1.0-20180318.225603-3", "1.0-SNAPSHOT", "whatever");
assertThat(downstreamJobs).isEmpty();
- downstreamJobs = dao.listDownstreamJobs("com.mycompany", "dependency-1", "1.0-20180318.225603-3", "1.0-SNAPSHOT", "whatever", "aClassifier");
+ downstreamJobs = dao.listDownstreamJobs(
+ "com.mycompany", "dependency-1", "1.0-20180318.225603-3", "1.0-SNAPSHOT", "whatever", "aClassifier");
assertThat(downstreamJobs).isEmpty();
}
@@ -634,18 +989,38 @@ public void listDownstreamPipelinesBasedOnMavenDependencies_withClassifier() {
public void listDownstreamJobs_upstream_pom_triggers_downstream_pipelines() {
dao.getOrCreateBuildPrimaryKey("my-upstream-pom-pipeline-1", 1);
- dao.recordGeneratedArtifact("my-upstream-pom-pipeline-1", 1, "com.mycompany.pom", "parent-pom", "1.0-SNAPSHOT", "pom", "1.0-SNAPSHOT", null, false,
- "pom", null);
- dao.updateBuildOnCompletion("my-upstream-pom-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5);
+ dao.recordGeneratedArtifact(
+ "my-upstream-pom-pipeline-1",
+ 1,
+ "com.mycompany.pom",
+ "parent-pom",
+ "1.0-SNAPSHOT",
+ "pom",
+ "1.0-SNAPSHOT",
+ null,
+ false,
+ "pom",
+ null);
+ dao.updateBuildOnCompletion(
+ "my-upstream-pom-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5);
dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-1", 2);
- dao.recordParentProject("my-downstream-pipeline-1", 2, "com.mycompany.pom", "parent-pom", "1.0-SNAPSHOT", false);
- dao.updateBuildOnCompletion("my-downstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 555, 5);
-
- SqlTestsUtils.dump("select * from JENKINS_BUILD LEFT OUTER JOIN JENKINS_JOB ON JENKINS_BUILD.JOB_ID = JENKINS_JOB.ID", ds, System.out);
- SqlTestsUtils.dump("select * from MAVEN_ARTIFACT INNER JOIN GENERATED_MAVEN_ARTIFACT ON MAVEN_ARTIFACT.ID = GENERATED_MAVEN_ARTIFACT.ARTIFACT_ID", ds,
+ dao.recordParentProject(
+ "my-downstream-pipeline-1", 2, "com.mycompany.pom", "parent-pom", "1.0-SNAPSHOT", false);
+ dao.updateBuildOnCompletion(
+ "my-downstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 555, 5);
+
+ SqlTestsUtils.dump(
+ "select * from JENKINS_BUILD LEFT OUTER JOIN JENKINS_JOB ON JENKINS_BUILD.JOB_ID = JENKINS_JOB.ID",
+ ds,
+ System.out);
+ SqlTestsUtils.dump(
+ "select * from MAVEN_ARTIFACT INNER JOIN GENERATED_MAVEN_ARTIFACT ON MAVEN_ARTIFACT.ID = GENERATED_MAVEN_ARTIFACT.ARTIFACT_ID",
+ ds,
System.out);
- SqlTestsUtils.dump("select * from MAVEN_ARTIFACT INNER JOIN MAVEN_PARENT_PROJECT ON MAVEN_ARTIFACT.ID = MAVEN_PARENT_PROJECT.ARTIFACT_ID", ds,
+ SqlTestsUtils.dump(
+ "select * from MAVEN_ARTIFACT INNER JOIN MAVEN_PARENT_PROJECT ON MAVEN_ARTIFACT.ID = MAVEN_PARENT_PROJECT.ARTIFACT_ID",
+ ds,
System.out);
List downstreamJobs = dao.listDownstreamJobs("my-upstream-pom-pipeline-1", 1);
@@ -657,18 +1032,38 @@ public void listDownstreamJobs_upstream_pom_triggers_downstream_pipelines() {
public void listDownstreamJobsbyArtifact_upstream_pom_triggers_downstream_pipelines() {
dao.getOrCreateBuildPrimaryKey("my-upstream-pom-pipeline-1", 1);
- dao.recordGeneratedArtifact("my-upstream-pom-pipeline-1", 1, "com.mycompany.pom", "parent-pom", "1.0-SNAPSHOT", "pom", "1.0-SNAPSHOT", null, false,
- "pom", null);
- dao.updateBuildOnCompletion("my-upstream-pom-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5);
+ dao.recordGeneratedArtifact(
+ "my-upstream-pom-pipeline-1",
+ 1,
+ "com.mycompany.pom",
+ "parent-pom",
+ "1.0-SNAPSHOT",
+ "pom",
+ "1.0-SNAPSHOT",
+ null,
+ false,
+ "pom",
+ null);
+ dao.updateBuildOnCompletion(
+ "my-upstream-pom-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5);
dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-1", 2);
- dao.recordParentProject("my-downstream-pipeline-1", 2, "com.mycompany.pom", "parent-pom", "1.0-SNAPSHOT", false);
- dao.updateBuildOnCompletion("my-downstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 555, 5);
-
- SqlTestsUtils.dump("select * from JENKINS_BUILD LEFT OUTER JOIN JENKINS_JOB ON JENKINS_BUILD.JOB_ID = JENKINS_JOB.ID", ds, System.out);
- SqlTestsUtils.dump("select * from MAVEN_ARTIFACT INNER JOIN GENERATED_MAVEN_ARTIFACT ON MAVEN_ARTIFACT.ID = GENERATED_MAVEN_ARTIFACT.ARTIFACT_ID", ds,
+ dao.recordParentProject(
+ "my-downstream-pipeline-1", 2, "com.mycompany.pom", "parent-pom", "1.0-SNAPSHOT", false);
+ dao.updateBuildOnCompletion(
+ "my-downstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 555, 5);
+
+ SqlTestsUtils.dump(
+ "select * from JENKINS_BUILD LEFT OUTER JOIN JENKINS_JOB ON JENKINS_BUILD.JOB_ID = JENKINS_JOB.ID",
+ ds,
System.out);
- SqlTestsUtils.dump("select * from MAVEN_ARTIFACT INNER JOIN MAVEN_PARENT_PROJECT ON MAVEN_ARTIFACT.ID = MAVEN_PARENT_PROJECT.ARTIFACT_ID", ds,
+ SqlTestsUtils.dump(
+ "select * from MAVEN_ARTIFACT INNER JOIN GENERATED_MAVEN_ARTIFACT ON MAVEN_ARTIFACT.ID = GENERATED_MAVEN_ARTIFACT.ARTIFACT_ID",
+ ds,
+ System.out);
+ SqlTestsUtils.dump(
+ "select * from MAVEN_ARTIFACT INNER JOIN MAVEN_PARENT_PROJECT ON MAVEN_ARTIFACT.ID = MAVEN_PARENT_PROJECT.ARTIFACT_ID",
+ ds,
System.out);
{
@@ -680,7 +1075,8 @@ public void listDownstreamJobsbyArtifact_upstream_pom_triggers_downstream_pipeli
expectedMavenArtifact.setType("pom");
expectedMavenArtifact.setExtension("pom");
- Map> downstreamJobsByArtifactForBuild1 = dao.listDownstreamJobsByArtifact("my-upstream-pom-pipeline-1", 1);
+ Map> downstreamJobsByArtifactForBuild1 =
+ dao.listDownstreamJobsByArtifact("my-upstream-pom-pipeline-1", 1);
SortedSet actualJobs = downstreamJobsByArtifactForBuild1.get(expectedMavenArtifact);
assertThat(actualJobs).contains("my-downstream-pipeline-1");
@@ -695,33 +1091,87 @@ public void listDownstreamJobsbyArtifact_upstream_pom_triggers_downstream_pipeli
public void list_downstream_jobs_with_ignoreUpstreamTriggers_activated() {
dao.getOrCreateBuildPrimaryKey("my-upstream-pipeline-1", 1);
- dao.recordGeneratedArtifact("my-upstream-pipeline-1", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "1.0-SNAPSHOT", null, false, "jar", null);
- dao.recordGeneratedArtifact("my-upstream-pipeline-1", 1, "com.mycompany", "service", "1.0-SNAPSHOT", "war", "1.0-SNAPSHOT", null, false, "war", null);
- dao.updateBuildOnCompletion("my-upstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 111);
+ dao.recordGeneratedArtifact(
+ "my-upstream-pipeline-1",
+ 1,
+ "com.mycompany",
+ "core",
+ "1.0-SNAPSHOT",
+ "jar",
+ "1.0-SNAPSHOT",
+ null,
+ false,
+ "jar",
+ null);
+ dao.recordGeneratedArtifact(
+ "my-upstream-pipeline-1",
+ 1,
+ "com.mycompany",
+ "service",
+ "1.0-SNAPSHOT",
+ "war",
+ "1.0-SNAPSHOT",
+ null,
+ false,
+ "war",
+ null);
+ dao.updateBuildOnCompletion(
+ "my-upstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 111);
dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-1", 1);
- dao.recordDependency("my-downstream-pipeline-1", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", true, null);
- dao.updateBuildOnCompletion("my-downstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 111, 11);
+ dao.recordDependency(
+ "my-downstream-pipeline-1", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", true, null);
+ dao.updateBuildOnCompletion(
+ "my-downstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 111, 11);
dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-2", 1);
- dao.recordDependency("my-downstream-pipeline-2", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null);
- dao.updateBuildOnCompletion("my-downstream-pipeline-2", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 111, 11);
+ dao.recordDependency(
+ "my-downstream-pipeline-2", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null);
+ dao.updateBuildOnCompletion(
+ "my-downstream-pipeline-2", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 111, 11);
List downstreamPipelinesForBuild1 = dao.listDownstreamJobs("my-upstream-pipeline-1", 1);
assertThat(downstreamPipelinesForBuild1).contains("my-downstream-pipeline-2");
dao.getOrCreateBuildPrimaryKey("my-upstream-pipeline-1", 2);
- dao.recordGeneratedArtifact("my-upstream-pipeline-1", 2, "com.mycompany", "core", "1.1-SNAPSHOT", "jar", "1.1-SNAPSHOT", null, false, "jar", null);
- dao.recordGeneratedArtifact("my-upstream-pipeline-1", 2, "com.mycompany", "service", "1.1-SNAPSHOT", "war", "1.1-SNAPSHOT", null, false, "war", null);
- dao.updateBuildOnCompletion("my-upstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 11, 5);
+ dao.recordGeneratedArtifact(
+ "my-upstream-pipeline-1",
+ 2,
+ "com.mycompany",
+ "core",
+ "1.1-SNAPSHOT",
+ "jar",
+ "1.1-SNAPSHOT",
+ null,
+ false,
+ "jar",
+ null);
+ dao.recordGeneratedArtifact(
+ "my-upstream-pipeline-1",
+ 2,
+ "com.mycompany",
+ "service",
+ "1.1-SNAPSHOT",
+ "war",
+ "1.1-SNAPSHOT",
+ null,
+ false,
+ "war",
+ null);
+ dao.updateBuildOnCompletion(
+ "my-upstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 11, 5);
dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-1", 2);
- dao.recordDependency("my-downstream-pipeline-1", 2, "com.mycompany", "core", "1.1-SNAPSHOT", "jar", "compile", false, null);
- dao.updateBuildOnCompletion("my-downstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 11, 5);
+ dao.recordDependency(
+ "my-downstream-pipeline-1", 2, "com.mycompany", "core", "1.1-SNAPSHOT", "jar", "compile", false, null);
+ dao.updateBuildOnCompletion(
+ "my-downstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 11, 5);
dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-2", 2);
- dao.recordDependency("my-downstream-pipeline-2", 2, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null);
- dao.updateBuildOnCompletion("my-downstream-pipeline-2", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 11, 5);
+ dao.recordDependency(
+ "my-downstream-pipeline-2", 2, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null);
+ dao.updateBuildOnCompletion(
+ "my-downstream-pipeline-2", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 11, 5);
List downstreamPipelinesForBuild2 = dao.listDownstreamJobs("my-upstream-pipeline-1", 2);
assertThat(downstreamPipelinesForBuild2).contains("my-downstream-pipeline-1");
@@ -731,17 +1181,44 @@ public void list_downstream_jobs_with_ignoreUpstreamTriggers_activated() {
public void list_downstream_jobs_by_artifact_with_ignoreUpstreamTriggers_activated() {
dao.getOrCreateBuildPrimaryKey("my-upstream-pipeline-1", 1);
- dao.recordGeneratedArtifact("my-upstream-pipeline-1", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "1.0-SNAPSHOT", null, false, "jar", null);
- dao.recordGeneratedArtifact("my-upstream-pipeline-1", 1, "com.mycompany", "service", "1.0-SNAPSHOT", "war", "1.0-SNAPSHOT", null, false, "war", null);
- dao.updateBuildOnCompletion("my-upstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 111);
+ dao.recordGeneratedArtifact(
+ "my-upstream-pipeline-1",
+ 1,
+ "com.mycompany",
+ "core",
+ "1.0-SNAPSHOT",
+ "jar",
+ "1.0-SNAPSHOT",
+ null,
+ false,
+ "jar",
+ null);
+ dao.recordGeneratedArtifact(
+ "my-upstream-pipeline-1",
+ 1,
+ "com.mycompany",
+ "service",
+ "1.0-SNAPSHOT",
+ "war",
+ "1.0-SNAPSHOT",
+ null,
+ false,
+ "war",
+ null);
+ dao.updateBuildOnCompletion(
+ "my-upstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 111);
dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-1", 1);
- dao.recordDependency("my-downstream-pipeline-1", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", true, null);
- dao.updateBuildOnCompletion("my-downstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 111, 11);
+ dao.recordDependency(
+ "my-downstream-pipeline-1", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", true, null);
+ dao.updateBuildOnCompletion(
+ "my-downstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 111, 11);
dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-2", 1);
- dao.recordDependency("my-downstream-pipeline-2", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null);
- dao.updateBuildOnCompletion("my-downstream-pipeline-2", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 111, 11);
+ dao.recordDependency(
+ "my-downstream-pipeline-2", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null);
+ dao.updateBuildOnCompletion(
+ "my-downstream-pipeline-2", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 111, 11);
{
MavenArtifact expectedMavenArtifact = new MavenArtifact();
@@ -752,7 +1229,8 @@ public void list_downstream_jobs_by_artifact_with_ignoreUpstreamTriggers_activat
expectedMavenArtifact.setType("jar");
expectedMavenArtifact.setExtension("jar");
- Map> downstreamJobsByArtifactForBuild1 = dao.listDownstreamJobsByArtifact("my-upstream-pipeline-1", 1);
+ Map> downstreamJobsByArtifactForBuild1 =
+ dao.listDownstreamJobsByArtifact("my-upstream-pipeline-1", 1);
SortedSet actualJobs = downstreamJobsByArtifactForBuild1.get(expectedMavenArtifact);
assertThat(actualJobs).contains("my-downstream-pipeline-2");
@@ -762,17 +1240,44 @@ public void list_downstream_jobs_by_artifact_with_ignoreUpstreamTriggers_activat
}
dao.getOrCreateBuildPrimaryKey("my-upstream-pipeline-1", 2);
- dao.recordGeneratedArtifact("my-upstream-pipeline-1", 2, "com.mycompany", "core", "1.1-SNAPSHOT", "jar", "1.1-SNAPSHOT", null, false, "jar", null);
- dao.recordGeneratedArtifact("my-upstream-pipeline-1", 2, "com.mycompany", "service", "1.1-SNAPSHOT", "war", "1.1-SNAPSHOT", null, false, "war", null);
- dao.updateBuildOnCompletion("my-upstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 11, 5);
+ dao.recordGeneratedArtifact(
+ "my-upstream-pipeline-1",
+ 2,
+ "com.mycompany",
+ "core",
+ "1.1-SNAPSHOT",
+ "jar",
+ "1.1-SNAPSHOT",
+ null,
+ false,
+ "jar",
+ null);
+ dao.recordGeneratedArtifact(
+ "my-upstream-pipeline-1",
+ 2,
+ "com.mycompany",
+ "service",
+ "1.1-SNAPSHOT",
+ "war",
+ "1.1-SNAPSHOT",
+ null,
+ false,
+ "war",
+ null);
+ dao.updateBuildOnCompletion(
+ "my-upstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 11, 5);
dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-1", 2);
- dao.recordDependency("my-downstream-pipeline-1", 2, "com.mycompany", "core", "1.1-SNAPSHOT", "jar", "compile", false, null);
- dao.updateBuildOnCompletion("my-downstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 11, 5);
+ dao.recordDependency(
+ "my-downstream-pipeline-1", 2, "com.mycompany", "core", "1.1-SNAPSHOT", "jar", "compile", false, null);
+ dao.updateBuildOnCompletion(
+ "my-downstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 11, 5);
dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-2", 2);
- dao.recordDependency("my-downstream-pipeline-2", 2, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null);
- dao.updateBuildOnCompletion("my-downstream-pipeline-2", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 11, 5);
+ dao.recordDependency(
+ "my-downstream-pipeline-2", 2, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null);
+ dao.updateBuildOnCompletion(
+ "my-downstream-pipeline-2", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 11, 5);
{
MavenArtifact expectedMavenArtifact = new MavenArtifact();
@@ -783,7 +1288,8 @@ public void list_downstream_jobs_by_artifact_with_ignoreUpstreamTriggers_activat
expectedMavenArtifact.setType("jar");
expectedMavenArtifact.setExtension("jar");
- Map> downstreamJobsByArtifactForBuild1 = dao.listDownstreamJobsByArtifact("my-upstream-pipeline-1", 2);
+ Map> downstreamJobsByArtifactForBuild1 =
+ dao.listDownstreamJobsByArtifact("my-upstream-pipeline-1", 2);
SortedSet actualJobs = downstreamJobsByArtifactForBuild1.get(expectedMavenArtifact);
assertThat(actualJobs).contains("my-downstream-pipeline-1");
@@ -798,32 +1304,91 @@ public void list_downstream_jobs_by_artifact_with_ignoreUpstreamTriggers_activat
public void list_downstream_jobs_with_skippedDownstreamTriggersActivated() {
dao.getOrCreateBuildPrimaryKey("my-upstream-pipeline-1", 1);
- dao.recordGeneratedArtifact("my-upstream-pipeline-1", 1, "com.mycompany", "shared", "1.0-SNAPSHOT", "jar", "1.0-SNAPSHOT", null, true, "jar", null);
- dao.updateBuildOnCompletion("my-upstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5);
+ dao.recordGeneratedArtifact(
+ "my-upstream-pipeline-1",
+ 1,
+ "com.mycompany",
+ "shared",
+ "1.0-SNAPSHOT",
+ "jar",
+ "1.0-SNAPSHOT",
+ null,
+ true,
+ "jar",
+ null);
+ dao.updateBuildOnCompletion(
+ "my-upstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5);
dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-1", 1);
- dao.recordDependency("my-downstream-pipeline-1", 1, "com.mycompany", "shared", "1.0-SNAPSHOT", "jar", "compile", false, null);
- dao.updateBuildOnCompletion("my-downstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 555, 5);
+ dao.recordDependency(
+ "my-downstream-pipeline-1",
+ 1,
+ "com.mycompany",
+ "shared",
+ "1.0-SNAPSHOT",
+ "jar",
+ "compile",
+ false,
+ null);
+ dao.updateBuildOnCompletion(
+ "my-downstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 555, 5);
dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-2", 1);
- dao.recordDependency("my-downstream-pipeline-2", 1, "com.mycompany", "shared", "1.0-SNAPSHOT", "jar", "compile", false, null);
- dao.updateBuildOnCompletion("my-downstream-pipeline-2", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 123, 5);
+ dao.recordDependency(
+ "my-downstream-pipeline-2",
+ 1,
+ "com.mycompany",
+ "shared",
+ "1.0-SNAPSHOT",
+ "jar",
+ "compile",
+ false,
+ null);
+ dao.updateBuildOnCompletion(
+ "my-downstream-pipeline-2", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 123, 5);
List downstreamPipelinesForBuild1 = dao.listDownstreamJobs("my-upstream-pipeline-1", 1);
assertThat(downstreamPipelinesForBuild1).isEmpty();
dao.getOrCreateBuildPrimaryKey("my-upstream-pipeline-1", 2);
- dao.recordGeneratedArtifact("my-upstream-pipeline-1", 2, "com.mycompany", "core", "1.1-SNAPSHOT", "jar", "1.1-SNAPSHOT", null, false, "jar", null);
- dao.recordGeneratedArtifact("my-upstream-pipeline-1", 2, "com.mycompany", "service", "1.1-SNAPSHOT", "war", "1.1-SNAPSHOT", null, false, "war", null);
- dao.updateBuildOnCompletion("my-upstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 11, 5);
+ dao.recordGeneratedArtifact(
+ "my-upstream-pipeline-1",
+ 2,
+ "com.mycompany",
+ "core",
+ "1.1-SNAPSHOT",
+ "jar",
+ "1.1-SNAPSHOT",
+ null,
+ false,
+ "jar",
+ null);
+ dao.recordGeneratedArtifact(
+ "my-upstream-pipeline-1",
+ 2,
+ "com.mycompany",
+ "service",
+ "1.1-SNAPSHOT",
+ "war",
+ "1.1-SNAPSHOT",
+ null,
+ false,
+ "war",
+ null);
+ dao.updateBuildOnCompletion(
+ "my-upstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 11, 5);
dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-1", 2);
- dao.recordDependency("my-downstream-pipeline-1", 2, "com.mycompany", "core", "1.1-SNAPSHOT", "jar", "compile", false, null);
- dao.updateBuildOnCompletion("my-downstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 9, 5);
+ dao.recordDependency(
+ "my-downstream-pipeline-1", 2, "com.mycompany", "core", "1.1-SNAPSHOT", "jar", "compile", false, null);
+ dao.updateBuildOnCompletion(
+ "my-downstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 9, 5);
dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-2", 2);
- dao.recordDependency("my-downstream-pipeline-2", 2, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null);
- dao.updateBuildOnCompletion("my-downstream-pipeline-2", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 9, 5);
+ dao.recordDependency(
+ "my-downstream-pipeline-2", 2, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null);
+ dao.updateBuildOnCompletion(
+ "my-downstream-pipeline-2", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 9, 5);
List downstreamPipelinesForBuild2 = dao.listDownstreamJobs("my-upstream-pipeline-1", 2);
assertThat(downstreamPipelinesForBuild2).contains("my-downstream-pipeline-1");
@@ -833,16 +1398,48 @@ public void list_downstream_jobs_with_skippedDownstreamTriggersActivated() {
public void list_downstream_jobs_by_artifact_with_skippedDownstreamTriggersActivated() {
dao.getOrCreateBuildPrimaryKey("my-upstream-pipeline-1", 1);
- dao.recordGeneratedArtifact("my-upstream-pipeline-1", 1, "com.mycompany", "shared", "1.0-SNAPSHOT", "jar", "1.0-SNAPSHOT", null, true, "jar", null);
- dao.updateBuildOnCompletion("my-upstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5);
+ dao.recordGeneratedArtifact(
+ "my-upstream-pipeline-1",
+ 1,
+ "com.mycompany",
+ "shared",
+ "1.0-SNAPSHOT",
+ "jar",
+ "1.0-SNAPSHOT",
+ null,
+ true,
+ "jar",
+ null);
+ dao.updateBuildOnCompletion(
+ "my-upstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 1111, 5);
dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-1", 1);
- dao.recordDependency("my-downstream-pipeline-1", 1, "com.mycompany", "shared", "1.0-SNAPSHOT", "jar", "compile", false, null);
- dao.updateBuildOnCompletion("my-downstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 555, 5);
+ dao.recordDependency(
+ "my-downstream-pipeline-1",
+ 1,
+ "com.mycompany",
+ "shared",
+ "1.0-SNAPSHOT",
+ "jar",
+ "compile",
+ false,
+ null);
+ dao.updateBuildOnCompletion(
+ "my-downstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 555, 5);
dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-2", 1);
- dao.recordDependency("my-downstream-pipeline-2", 1, "com.mycompany", "shared", "1.0-SNAPSHOT", "jar", "compile", false, null);
- dao.updateBuildOnCompletion("my-downstream-pipeline-2", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 123, 5);
+ dao.recordDependency(
+ "my-downstream-pipeline-2",
+ 1,
+ "com.mycompany",
+ "shared",
+ "1.0-SNAPSHOT",
+ "jar",
+ "compile",
+ false,
+ null);
+ dao.updateBuildOnCompletion(
+ "my-downstream-pipeline-2", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 123, 5);
{
MavenArtifact expectedMavenArtifact = new MavenArtifact();
@@ -851,22 +1448,50 @@ public void list_downstream_jobs_by_artifact_with_skippedDownstreamTriggersActiv
expectedMavenArtifact.setVersion("1.0-SNAPSHOT");
expectedMavenArtifact.setType("jar");
- Map> downstreamJobsByArtifactForBuild1 = dao.listDownstreamJobsByArtifact("my-upstream-pipeline-1", 1);
+ Map> downstreamJobsByArtifactForBuild1 =
+ dao.listDownstreamJobsByArtifact("my-upstream-pipeline-1", 1);
assertThat(downstreamJobsByArtifactForBuild1).hasSize(0);
}
dao.getOrCreateBuildPrimaryKey("my-upstream-pipeline-1", 2);
- dao.recordGeneratedArtifact("my-upstream-pipeline-1", 2, "com.mycompany", "core", "1.1-SNAPSHOT", "jar", "1.1-SNAPSHOT", null, false, "jar", null);
- dao.recordGeneratedArtifact("my-upstream-pipeline-1", 2, "com.mycompany", "service", "1.1-SNAPSHOT", "war", "1.1-SNAPSHOT", null, false, "war", null);
- dao.updateBuildOnCompletion("my-upstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 11, 5);
+ dao.recordGeneratedArtifact(
+ "my-upstream-pipeline-1",
+ 2,
+ "com.mycompany",
+ "core",
+ "1.1-SNAPSHOT",
+ "jar",
+ "1.1-SNAPSHOT",
+ null,
+ false,
+ "jar",
+ null);
+ dao.recordGeneratedArtifact(
+ "my-upstream-pipeline-1",
+ 2,
+ "com.mycompany",
+ "service",
+ "1.1-SNAPSHOT",
+ "war",
+ "1.1-SNAPSHOT",
+ null,
+ false,
+ "war",
+ null);
+ dao.updateBuildOnCompletion(
+ "my-upstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 11, 5);
dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-1", 2);
- dao.recordDependency("my-downstream-pipeline-1", 2, "com.mycompany", "core", "1.1-SNAPSHOT", "jar", "compile", false, null);
- dao.updateBuildOnCompletion("my-downstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 9, 5);
+ dao.recordDependency(
+ "my-downstream-pipeline-1", 2, "com.mycompany", "core", "1.1-SNAPSHOT", "jar", "compile", false, null);
+ dao.updateBuildOnCompletion(
+ "my-downstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 9, 5);
dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-2", 2);
- dao.recordDependency("my-downstream-pipeline-2", 2, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null);
- dao.updateBuildOnCompletion("my-downstream-pipeline-2", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 9, 5);
+ dao.recordDependency(
+ "my-downstream-pipeline-2", 2, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null);
+ dao.updateBuildOnCompletion(
+ "my-downstream-pipeline-2", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 9, 5);
{
MavenArtifact expectedMavenArtifact = new MavenArtifact();
@@ -877,7 +1502,8 @@ public void list_downstream_jobs_by_artifact_with_skippedDownstreamTriggersActiv
expectedMavenArtifact.setType("jar");
expectedMavenArtifact.setExtension("jar");
- Map> downstreamJobsByArtifactForBuild1 = dao.listDownstreamJobsByArtifact("my-upstream-pipeline-1", 2);
+ Map> downstreamJobsByArtifactForBuild1 =
+ dao.listDownstreamJobsByArtifact("my-upstream-pipeline-1", 2);
SortedSet actualJobs = downstreamJobsByArtifactForBuild1.get(expectedMavenArtifact);
assertThat(actualJobs).contains("my-downstream-pipeline-1");
@@ -892,37 +1518,87 @@ public void list_downstream_jobs_by_artifact_with_skippedDownstreamTriggersActiv
public void list_downstream_jobs_timestamped_snapshot_version() {
dao.getOrCreateBuildPrimaryKey("my-upstream-pipeline-1", 1);
- dao.recordGeneratedArtifact("my-upstream-pipeline-1", 1, "com.mycompany", "core", "1.0-20170808.155524-63", "jar", "1.0-SNAPSHOT", null, false, "jar",
+ dao.recordGeneratedArtifact(
+ "my-upstream-pipeline-1",
+ 1,
+ "com.mycompany",
+ "core",
+ "1.0-20170808.155524-63",
+ "jar",
+ "1.0-SNAPSHOT",
+ null,
+ false,
+ "jar",
+ null);
+ dao.recordGeneratedArtifact(
+ "my-upstream-pipeline-1",
+ 1,
+ "com.mycompany",
+ "service",
+ "1.0-20170808.155524-64",
+ "war",
+ "1.0-SNAPSHOT",
+ null,
+ false,
+ "war",
null);
- dao.recordGeneratedArtifact("my-upstream-pipeline-1", 1, "com.mycompany", "service", "1.0-20170808.155524-64", "war", "1.0-SNAPSHOT", null, false,
- "war", null);
- dao.updateBuildOnCompletion("my-upstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11);
+ dao.updateBuildOnCompletion(
+ "my-upstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11);
dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-1", 1);
- dao.recordDependency("my-downstream-pipeline-1", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null);
- dao.updateBuildOnCompletion("my-downstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 70, 22);
+ dao.recordDependency(
+ "my-downstream-pipeline-1", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null);
+ dao.updateBuildOnCompletion(
+ "my-downstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 70, 22);
dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-2", 1);
- dao.recordDependency("my-downstream-pipeline-2", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null);
- dao.updateBuildOnCompletion("my-downstream-pipeline-2", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 50, 22);
+ dao.recordDependency(
+ "my-downstream-pipeline-2", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null);
+ dao.updateBuildOnCompletion(
+ "my-downstream-pipeline-2", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 50, 22);
List downstreamPipelinesForBuild1 = dao.listDownstreamJobs("my-upstream-pipeline-1", 1);
assertThat(downstreamPipelinesForBuild1).contains("my-downstream-pipeline-1", "my-downstream-pipeline-2");
dao.getOrCreateBuildPrimaryKey("my-upstream-pipeline-1", 2);
- dao.recordGeneratedArtifact("my-upstream-pipeline-1", 2, "com.mycompany", "core", "1.1-20170808.155524-65", "jar", "1.1-SNAPSHOT", null, false, "jar",
+ dao.recordGeneratedArtifact(
+ "my-upstream-pipeline-1",
+ 2,
+ "com.mycompany",
+ "core",
+ "1.1-20170808.155524-65",
+ "jar",
+ "1.1-SNAPSHOT",
+ null,
+ false,
+ "jar",
null);
- dao.recordGeneratedArtifact("my-upstream-pipeline-1", 2, "com.mycompany", "service", "1.1-20170808.155524-66", "war", "1.1-SNAPSHOT", null, false,
- "war", null);
- dao.updateBuildOnCompletion("my-upstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 20, 9);
+ dao.recordGeneratedArtifact(
+ "my-upstream-pipeline-1",
+ 2,
+ "com.mycompany",
+ "service",
+ "1.1-20170808.155524-66",
+ "war",
+ "1.1-SNAPSHOT",
+ null,
+ false,
+ "war",
+ null);
+ dao.updateBuildOnCompletion(
+ "my-upstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 20, 9);
dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-1", 2);
- dao.recordDependency("my-downstream-pipeline-1", 2, "com.mycompany", "core", "1.1-SNAPSHOT", "jar", "compile", false, null);
- dao.updateBuildOnCompletion("my-downstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 20, 9);
+ dao.recordDependency(
+ "my-downstream-pipeline-1", 2, "com.mycompany", "core", "1.1-SNAPSHOT", "jar", "compile", false, null);
+ dao.updateBuildOnCompletion(
+ "my-downstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 20, 9);
dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-2", 2);
- dao.recordDependency("my-downstream-pipeline-2", 2, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null);
- dao.updateBuildOnCompletion("my-downstream-pipeline-2", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 20, 9);
+ dao.recordDependency(
+ "my-downstream-pipeline-2", 2, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null);
+ dao.updateBuildOnCompletion(
+ "my-downstream-pipeline-2", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 20, 9);
List downstreamPipelinesForBuild2 = dao.listDownstreamJobs("my-upstream-pipeline-1", 2);
assertThat(downstreamPipelinesForBuild2).contains("my-downstream-pipeline-1");
@@ -932,19 +1608,44 @@ public void list_downstream_jobs_timestamped_snapshot_version() {
public void list_downstream_jobs_by_artifact_timestamped_snapshot_version() {
dao.getOrCreateBuildPrimaryKey("my-upstream-pipeline-1", 1);
- dao.recordGeneratedArtifact("my-upstream-pipeline-1", 1, "com.mycompany", "core", "1.0-20170808.155524-63", "jar", "1.0-SNAPSHOT", null, false, "jar",
+ dao.recordGeneratedArtifact(
+ "my-upstream-pipeline-1",
+ 1,
+ "com.mycompany",
+ "core",
+ "1.0-20170808.155524-63",
+ "jar",
+ "1.0-SNAPSHOT",
+ null,
+ false,
+ "jar",
+ null);
+ dao.recordGeneratedArtifact(
+ "my-upstream-pipeline-1",
+ 1,
+ "com.mycompany",
+ "service",
+ "1.0-20170808.155524-64",
+ "war",
+ "1.0-SNAPSHOT",
+ null,
+ false,
+ "war",
null);
- dao.recordGeneratedArtifact("my-upstream-pipeline-1", 1, "com.mycompany", "service", "1.0-20170808.155524-64", "war", "1.0-SNAPSHOT", null, false,
- "war", null);
- dao.updateBuildOnCompletion("my-upstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11);
+ dao.updateBuildOnCompletion(
+ "my-upstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11);
dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-1", 1);
- dao.recordDependency("my-downstream-pipeline-1", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null);
- dao.updateBuildOnCompletion("my-downstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 70, 22);
+ dao.recordDependency(
+ "my-downstream-pipeline-1", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null);
+ dao.updateBuildOnCompletion(
+ "my-downstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 70, 22);
dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-2", 1);
- dao.recordDependency("my-downstream-pipeline-2", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null);
- dao.updateBuildOnCompletion("my-downstream-pipeline-2", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 50, 22);
+ dao.recordDependency(
+ "my-downstream-pipeline-2", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null);
+ dao.updateBuildOnCompletion(
+ "my-downstream-pipeline-2", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 50, 22);
{
MavenArtifact expectedMavenArtifact = new MavenArtifact();
@@ -955,7 +1656,8 @@ public void list_downstream_jobs_by_artifact_timestamped_snapshot_version() {
expectedMavenArtifact.setType("jar");
expectedMavenArtifact.setExtension("jar");
- Map> downstreamJobsByArtifactForBuild1 = dao.listDownstreamJobsByArtifact("my-upstream-pipeline-1", 1);
+ Map> downstreamJobsByArtifactForBuild1 =
+ dao.listDownstreamJobsByArtifact("my-upstream-pipeline-1", 1);
SortedSet actualJobs = downstreamJobsByArtifactForBuild1.get(expectedMavenArtifact);
assertThat(actualJobs).contains("my-downstream-pipeline-1", "my-downstream-pipeline-2");
@@ -964,19 +1666,44 @@ public void list_downstream_jobs_by_artifact_timestamped_snapshot_version() {
}
dao.getOrCreateBuildPrimaryKey("my-upstream-pipeline-1", 2);
- dao.recordGeneratedArtifact("my-upstream-pipeline-1", 2, "com.mycompany", "core", "1.1-20170808.155524-65", "jar", "1.1-SNAPSHOT", null, false, "jar",
+ dao.recordGeneratedArtifact(
+ "my-upstream-pipeline-1",
+ 2,
+ "com.mycompany",
+ "core",
+ "1.1-20170808.155524-65",
+ "jar",
+ "1.1-SNAPSHOT",
+ null,
+ false,
+ "jar",
+ null);
+ dao.recordGeneratedArtifact(
+ "my-upstream-pipeline-1",
+ 2,
+ "com.mycompany",
+ "service",
+ "1.1-20170808.155524-66",
+ "war",
+ "1.1-SNAPSHOT",
+ null,
+ false,
+ "war",
null);
- dao.recordGeneratedArtifact("my-upstream-pipeline-1", 2, "com.mycompany", "service", "1.1-20170808.155524-66", "war", "1.1-SNAPSHOT", null, false,
- "war", null);
- dao.updateBuildOnCompletion("my-upstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 20, 9);
+ dao.updateBuildOnCompletion(
+ "my-upstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 20, 9);
dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-1", 2);
- dao.recordDependency("my-downstream-pipeline-1", 2, "com.mycompany", "core", "1.1-SNAPSHOT", "jar", "compile", false, null);
- dao.updateBuildOnCompletion("my-downstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 20, 9);
+ dao.recordDependency(
+ "my-downstream-pipeline-1", 2, "com.mycompany", "core", "1.1-SNAPSHOT", "jar", "compile", false, null);
+ dao.updateBuildOnCompletion(
+ "my-downstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 20, 9);
dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-2", 2);
- dao.recordDependency("my-downstream-pipeline-2", 2, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null);
- dao.updateBuildOnCompletion("my-downstream-pipeline-2", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 20, 9);
+ dao.recordDependency(
+ "my-downstream-pipeline-2", 2, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null);
+ dao.updateBuildOnCompletion(
+ "my-downstream-pipeline-2", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 20, 9);
{
MavenArtifact expectedMavenArtifact = new MavenArtifact();
@@ -987,7 +1714,8 @@ public void list_downstream_jobs_by_artifact_timestamped_snapshot_version() {
expectedMavenArtifact.setType("jar");
expectedMavenArtifact.setExtension("jar");
- Map> downstreamJobsByArtifactForBuild1 = dao.listDownstreamJobsByArtifact("my-upstream-pipeline-1", 2);
+ Map> downstreamJobsByArtifactForBuild1 =
+ dao.listDownstreamJobsByArtifact("my-upstream-pipeline-1", 2);
SortedSet actualJobs = downstreamJobsByArtifactForBuild1.get(expectedMavenArtifact);
assertThat(actualJobs).contains("my-downstream-pipeline-1");
@@ -1002,14 +1730,27 @@ public void list_downstream_jobs_by_artifact_timestamped_snapshot_version() {
public void get_generated_artifacts_with_timestamped_snapshot_version() {
dao.getOrCreateBuildPrimaryKey("my-upstream-pipeline-1", 1);
- dao.recordGeneratedArtifact("my-upstream-pipeline-1", 1, "com.mycompany", "core", "1.0-20170808.155524-63", "jar", "1.0-SNAPSHOT", null, false, "jar",
+ dao.recordGeneratedArtifact(
+ "my-upstream-pipeline-1",
+ 1,
+ "com.mycompany",
+ "core",
+ "1.0-20170808.155524-63",
+ "jar",
+ "1.0-SNAPSHOT",
+ null,
+ false,
+ "jar",
null);
- dao.updateBuildOnCompletion("my-upstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11);
+ dao.updateBuildOnCompletion(
+ "my-upstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11);
List generatedArtifacts = dao.getGeneratedArtifacts("my-upstream-pipeline-1", 1);
- System.out.println("GeneratedArtifacts " + generatedArtifacts.stream()
- .map(mavenArtifact -> mavenArtifact.getId() + ", version: " + mavenArtifact.getVersion() + ", baseVersion: " + mavenArtifact.getBaseVersion())
- .collect(Collectors.joining(", ")));
+ System.out.println("GeneratedArtifacts "
+ + generatedArtifacts.stream()
+ .map(mavenArtifact -> mavenArtifact.getId() + ", version: " + mavenArtifact.getVersion()
+ + ", baseVersion: " + mavenArtifact.getBaseVersion())
+ .collect(Collectors.joining(", ")));
assertThat(generatedArtifacts).hasSize(1);
MavenArtifact jar = generatedArtifacts.get(0);
@@ -1023,13 +1764,27 @@ public void get_generated_artifacts_with_timestamped_snapshot_version() {
public void get_generated_artifacts_with_non_timestamped_snapshot_version() {
dao.getOrCreateBuildPrimaryKey("my-upstream-pipeline-1", 1);
- dao.recordGeneratedArtifact("my-upstream-pipeline-1", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "1.0-SNAPSHOT", null, false, "jar", null);
- dao.updateBuildOnCompletion("my-upstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11);
+ dao.recordGeneratedArtifact(
+ "my-upstream-pipeline-1",
+ 1,
+ "com.mycompany",
+ "core",
+ "1.0-SNAPSHOT",
+ "jar",
+ "1.0-SNAPSHOT",
+ null,
+ false,
+ "jar",
+ null);
+ dao.updateBuildOnCompletion(
+ "my-upstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11);
List generatedArtifacts = dao.getGeneratedArtifacts("my-upstream-pipeline-1", 1);
- System.out.println("GeneratedArtifacts " + generatedArtifacts.stream()
- .map(mavenArtifact -> mavenArtifact.getId() + ", version: " + mavenArtifact.getVersion() + ", baseVersion: " + mavenArtifact.getBaseVersion())
- .collect(Collectors.joining(", ")));
+ System.out.println("GeneratedArtifacts "
+ + generatedArtifacts.stream()
+ .map(mavenArtifact -> mavenArtifact.getId() + ", version: " + mavenArtifact.getVersion()
+ + ", baseVersion: " + mavenArtifact.getBaseVersion())
+ .collect(Collectors.joining(", ")));
assertThat(generatedArtifacts).hasSize(1);
MavenArtifact jar = generatedArtifacts.get(0);
@@ -1047,13 +1802,27 @@ public void get_generated_artifacts_with_non_timestamped_snapshot_version() {
public void get_generated_artifacts_with_null_version() {
dao.getOrCreateBuildPrimaryKey("my-upstream-pipeline-1", 1);
- dao.recordGeneratedArtifact("my-upstream-pipeline-1", 1, "com.mycompany", "core", null, "jar", "1.0-SNAPSHOT", null, false, "jar", null);
- dao.updateBuildOnCompletion("my-upstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11);
+ dao.recordGeneratedArtifact(
+ "my-upstream-pipeline-1",
+ 1,
+ "com.mycompany",
+ "core",
+ null,
+ "jar",
+ "1.0-SNAPSHOT",
+ null,
+ false,
+ "jar",
+ null);
+ dao.updateBuildOnCompletion(
+ "my-upstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11);
List generatedArtifacts = dao.getGeneratedArtifacts("my-upstream-pipeline-1", 1);
- System.out.println("GeneratedArtifacts " + generatedArtifacts.stream()
- .map(mavenArtifact -> mavenArtifact.getId() + ", version: " + mavenArtifact.getVersion() + ", baseVersion: " + mavenArtifact.getBaseVersion())
- .collect(Collectors.joining(", ")));
+ System.out.println("GeneratedArtifacts "
+ + generatedArtifacts.stream()
+ .map(mavenArtifact -> mavenArtifact.getId() + ", version: " + mavenArtifact.getVersion()
+ + ", baseVersion: " + mavenArtifact.getBaseVersion())
+ .collect(Collectors.joining(", ")));
assertThat(generatedArtifacts).hasSize(1);
MavenArtifact jar = generatedArtifacts.get(0);
@@ -1067,17 +1836,30 @@ public void get_generated_artifacts_with_null_version() {
public void list_downstream_jobs_by_parent_pom_timestamped_snapshot_version() {
dao.getOrCreateBuildPrimaryKey("my-upstream-pipeline-1", 1);
- dao.recordGeneratedArtifact("my-upstream-pipeline-1", 1, "com.mycompany", "parent-pom", "1.0-20170808.155524-63", "pom", "1.0-SNAPSHOT", null, false,
- "pom", null);
- dao.updateBuildOnCompletion("my-upstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11);
+ dao.recordGeneratedArtifact(
+ "my-upstream-pipeline-1",
+ 1,
+ "com.mycompany",
+ "parent-pom",
+ "1.0-20170808.155524-63",
+ "pom",
+ "1.0-SNAPSHOT",
+ null,
+ false,
+ "pom",
+ null);
+ dao.updateBuildOnCompletion(
+ "my-upstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11);
dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-1", 1);
dao.recordParentProject("my-downstream-pipeline-1", 1, "com.mycompany", "parent-pom", "1.0-SNAPSHOT", false);
- dao.updateBuildOnCompletion("my-downstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 70, 22);
+ dao.updateBuildOnCompletion(
+ "my-downstream-pipeline-1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 70, 22);
dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-2", 1);
dao.recordParentProject("my-downstream-pipeline-2", 1, "com.mycompany", "parent-pom", "1.0-SNAPSHOT", false);
- dao.updateBuildOnCompletion("my-downstream-pipeline-2", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 50, 22);
+ dao.updateBuildOnCompletion(
+ "my-downstream-pipeline-2", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 50, 22);
{
MavenArtifact expectedMavenArtifact = new MavenArtifact();
@@ -1088,7 +1870,8 @@ public void list_downstream_jobs_by_parent_pom_timestamped_snapshot_version() {
expectedMavenArtifact.setType("pom");
expectedMavenArtifact.setExtension("pom");
- Map> downstreamJobsByArtifactForBuild1 = dao.listDownstreamJobsByArtifact("my-upstream-pipeline-1", 1);
+ Map> downstreamJobsByArtifactForBuild1 =
+ dao.listDownstreamJobsByArtifact("my-upstream-pipeline-1", 1);
SortedSet actualJobs = downstreamJobsByArtifactForBuild1.get(expectedMavenArtifact);
assertThat(actualJobs).contains("my-downstream-pipeline-1", "my-downstream-pipeline-2");
@@ -1097,17 +1880,30 @@ public void list_downstream_jobs_by_parent_pom_timestamped_snapshot_version() {
}
dao.getOrCreateBuildPrimaryKey("my-upstream-pipeline-1", 2);
- dao.recordGeneratedArtifact("my-upstream-pipeline-1", 2, "com.mycompany", "parent-pom", "1.1-20170808.155524-65", "pom", "1.1-SNAPSHOT", null, false,
- "pom", null);
- dao.updateBuildOnCompletion("my-upstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 20, 9);
+ dao.recordGeneratedArtifact(
+ "my-upstream-pipeline-1",
+ 2,
+ "com.mycompany",
+ "parent-pom",
+ "1.1-20170808.155524-65",
+ "pom",
+ "1.1-SNAPSHOT",
+ null,
+ false,
+ "pom",
+ null);
+ dao.updateBuildOnCompletion(
+ "my-upstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 20, 9);
dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-1", 2);
dao.recordParentProject("my-downstream-pipeline-1", 2, "com.mycompany", "parent-pom", "1.1-SNAPSHOT", false);
- dao.updateBuildOnCompletion("my-downstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 20, 9);
+ dao.updateBuildOnCompletion(
+ "my-downstream-pipeline-1", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 20, 9);
dao.getOrCreateBuildPrimaryKey("my-downstream-pipeline-2", 2);
dao.recordParentProject("my-downstream-pipeline-2", 2, "com.mycompany", "parent-pom", "1.0-SNAPSHOT", false);
- dao.updateBuildOnCompletion("my-downstream-pipeline-2", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 20, 9);
+ dao.updateBuildOnCompletion(
+ "my-downstream-pipeline-2", 2, Result.SUCCESS.ordinal, System.currentTimeMillis() - 20, 9);
{
MavenArtifact expectedMavenArtifact = new MavenArtifact();
@@ -1118,7 +1914,8 @@ public void list_downstream_jobs_by_parent_pom_timestamped_snapshot_version() {
expectedMavenArtifact.setType("pom");
expectedMavenArtifact.setExtension("pom");
- Map> downstreamJobsByArtifactForBuild1 = dao.listDownstreamJobsByArtifact("my-upstream-pipeline-1", 2);
+ Map> downstreamJobsByArtifactForBuild1 =
+ dao.listDownstreamJobsByArtifact("my-upstream-pipeline-1", 2);
SortedSet actualJobs = downstreamJobsByArtifactForBuild1.get(expectedMavenArtifact);
assertThat(actualJobs).contains("my-downstream-pipeline-1");
@@ -1132,68 +1929,148 @@ public void list_downstream_jobs_by_parent_pom_timestamped_snapshot_version() {
public void list_upstream_pipelines_based_on_maven_dependencies() {
dao.getOrCreateBuildPrimaryKey("pipeline-framework", 1);
- dao.recordGeneratedArtifact("pipeline-framework", 1, "com.mycompany", "framework", "1.0-SNAPSHOT", "jar", "1.0-SNAPSHOT", null, false, "jar", null);
- dao.updateBuildOnCompletion("pipeline-framework", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11);
+ dao.recordGeneratedArtifact(
+ "pipeline-framework",
+ 1,
+ "com.mycompany",
+ "framework",
+ "1.0-SNAPSHOT",
+ "jar",
+ "1.0-SNAPSHOT",
+ null,
+ false,
+ "jar",
+ null);
+ dao.updateBuildOnCompletion(
+ "pipeline-framework", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11);
dao.getOrCreateBuildPrimaryKey("pipeline-core", 1);
- dao.recordDependency("pipeline-core", 1, "com.mycompany", "framework", "1.0-SNAPSHOT", "jar", "compile", false, null);
- dao.recordGeneratedArtifact("pipeline-core", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "1.0-SNAPSHOT", null, false, "jar", null);
+ dao.recordDependency(
+ "pipeline-core", 1, "com.mycompany", "framework", "1.0-SNAPSHOT", "jar", "compile", false, null);
+ dao.recordGeneratedArtifact(
+ "pipeline-core",
+ 1,
+ "com.mycompany",
+ "core",
+ "1.0-SNAPSHOT",
+ "jar",
+ "1.0-SNAPSHOT",
+ null,
+ false,
+ "jar",
+ null);
dao.updateBuildOnCompletion("pipeline-core", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11);
SqlTestsUtils.dump("select * from JOB_DEPENDENCIES", this.ds, System.out);
SqlTestsUtils.dump("select * from JOB_GENERATED_ARTIFACTS", this.ds, System.out);
SqlTestsUtils.dump("select * from JENKINS_JOB", this.ds, System.out);
- Map upstreamPipelinesForBuild1 = dao.listUpstreamPipelinesBasedOnMavenDependencies("pipeline-core", 1);
+ Map upstreamPipelinesForBuild1 =
+ dao.listUpstreamPipelinesBasedOnMavenDependencies("pipeline-core", 1);
assertThat(upstreamPipelinesForBuild1.keySet()).contains("pipeline-framework");
-
}
@Test
public void list_upstream_pipelines_based_on_maven_dependencies_with_classifier() {
dao.getOrCreateBuildPrimaryKey("pipeline-framework", 1);
- dao.recordGeneratedArtifact("pipeline-framework", 1, "com.mycompany", "framework", "1.0-SNAPSHOT", "aType", "1.0-SNAPSHOT", null, false, "jar", null);
- dao.recordGeneratedArtifact("pipeline-framework", 1, "com.mycompany", "framework", "1.0-SNAPSHOT", "anotherType", "1.0-SNAPSHOT", null, false, "jar",
+ dao.recordGeneratedArtifact(
+ "pipeline-framework",
+ 1,
+ "com.mycompany",
+ "framework",
+ "1.0-SNAPSHOT",
+ "aType",
+ "1.0-SNAPSHOT",
+ null,
+ false,
+ "jar",
+ null);
+ dao.recordGeneratedArtifact(
+ "pipeline-framework",
+ 1,
+ "com.mycompany",
+ "framework",
+ "1.0-SNAPSHOT",
+ "anotherType",
+ "1.0-SNAPSHOT",
+ null,
+ false,
+ "jar",
"aClassifier");
- dao.updateBuildOnCompletion("pipeline-framework", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11);
+ dao.updateBuildOnCompletion(
+ "pipeline-framework", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11);
dao.getOrCreateBuildPrimaryKey("pipeline-core1", 1);
- dao.recordDependency("pipeline-core1", 1, "com.mycompany", "framework", "1.0-SNAPSHOT", "aType", "compile", false, null);
+ dao.recordDependency(
+ "pipeline-core1", 1, "com.mycompany", "framework", "1.0-SNAPSHOT", "aType", "compile", false, null);
dao.updateBuildOnCompletion("pipeline-core1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11);
dao.getOrCreateBuildPrimaryKey("pipeline-core2", 1);
- dao.recordDependency("pipeline-core2", 1, "com.mycompany", "framework", "1.0-SNAPSHOT", "aType", "compile", false, "whatever");
+ dao.recordDependency(
+ "pipeline-core2",
+ 1,
+ "com.mycompany",
+ "framework",
+ "1.0-SNAPSHOT",
+ "aType",
+ "compile",
+ false,
+ "whatever");
dao.updateBuildOnCompletion("pipeline-core2", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11);
dao.getOrCreateBuildPrimaryKey("pipeline-core3", 1);
- dao.recordDependency("pipeline-core3", 1, "com.mycompany", "framework", "1.0-SNAPSHOT", "whatever", "compile", false, null);
+ dao.recordDependency(
+ "pipeline-core3", 1, "com.mycompany", "framework", "1.0-SNAPSHOT", "whatever", "compile", false, null);
dao.updateBuildOnCompletion("pipeline-core3", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11);
dao.getOrCreateBuildPrimaryKey("pipeline-core4", 1);
- dao.recordDependency("pipeline-core4", 1, "com.mycompany", "framework", "1.0-SNAPSHOT", "whatever", "compile", false, "aClassifier");
+ dao.recordDependency(
+ "pipeline-core4",
+ 1,
+ "com.mycompany",
+ "framework",
+ "1.0-SNAPSHOT",
+ "whatever",
+ "compile",
+ false,
+ "aClassifier");
dao.updateBuildOnCompletion("pipeline-core4", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11);
dao.getOrCreateBuildPrimaryKey("pipeline-core5", 1);
- dao.recordDependency("pipeline-core5", 1, "com.mycompany", "framework", "1.0-SNAPSHOT", "anotherType", "compile", false, "aClassifier");
+ dao.recordDependency(
+ "pipeline-core5",
+ 1,
+ "com.mycompany",
+ "framework",
+ "1.0-SNAPSHOT",
+ "anotherType",
+ "compile",
+ false,
+ "aClassifier");
dao.updateBuildOnCompletion("pipeline-core5", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11);
SqlTestsUtils.dump("select * from JOB_DEPENDENCIES", this.ds, System.out);
SqlTestsUtils.dump("select * from JOB_GENERATED_ARTIFACTS", this.ds, System.out);
SqlTestsUtils.dump("select * from JENKINS_JOB", this.ds, System.out);
- Map upstreamPipelinesForBuild1 = dao.listUpstreamPipelinesBasedOnMavenDependencies("pipeline-core1", 1);
+ Map upstreamPipelinesForBuild1 =
+ dao.listUpstreamPipelinesBasedOnMavenDependencies("pipeline-core1", 1);
assertThat(upstreamPipelinesForBuild1.keySet()).contains("pipeline-framework");
- Map upstreamPipelinesForBuild2 = dao.listUpstreamPipelinesBasedOnMavenDependencies("pipeline-core2", 1);
+ Map upstreamPipelinesForBuild2 =
+ dao.listUpstreamPipelinesBasedOnMavenDependencies("pipeline-core2", 1);
assertThat(upstreamPipelinesForBuild2.keySet()).isEmpty();
- Map upstreamPipelinesForBuild3 = dao.listUpstreamPipelinesBasedOnMavenDependencies("pipeline-core3", 1);
+ Map upstreamPipelinesForBuild3 =
+ dao.listUpstreamPipelinesBasedOnMavenDependencies("pipeline-core3", 1);
assertThat(upstreamPipelinesForBuild3.keySet()).isEmpty();
- Map upstreamPipelinesForBuild4 = dao.listUpstreamPipelinesBasedOnMavenDependencies("pipeline-core4", 1);
+ Map upstreamPipelinesForBuild4 =
+ dao.listUpstreamPipelinesBasedOnMavenDependencies("pipeline-core4", 1);
assertThat(upstreamPipelinesForBuild4.keySet()).isEmpty();
- Map upstreamPipelinesForBuild5 = dao.listUpstreamPipelinesBasedOnMavenDependencies("pipeline-core5", 1);
+ Map upstreamPipelinesForBuild5 =
+ dao.listUpstreamPipelinesBasedOnMavenDependencies("pipeline-core5", 1);
assertThat(upstreamPipelinesForBuild5.keySet()).contains("pipeline-framework");
}
@@ -1201,41 +2078,102 @@ public void list_upstream_pipelines_based_on_maven_dependencies_with_classifier(
public void list_upstream_pipelines_based_on_parent_project() {
dao.getOrCreateBuildPrimaryKey("pipeline-parent-pom", 1);
- dao.recordGeneratedArtifact("pipeline-parent-pom", 1, "com.mycompany", "company-parent-pom", "1.0-SNAPSHOT", "pom", "1.0-SNAPSHOT", null, false, "pom",
+ dao.recordGeneratedArtifact(
+ "pipeline-parent-pom",
+ 1,
+ "com.mycompany",
+ "company-parent-pom",
+ "1.0-SNAPSHOT",
+ "pom",
+ "1.0-SNAPSHOT",
+ null,
+ false,
+ "pom",
null);
- dao.updateBuildOnCompletion("pipeline-parent-pom", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11);
+ dao.updateBuildOnCompletion(
+ "pipeline-parent-pom", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11);
dao.getOrCreateBuildPrimaryKey("pipeline-core", 1);
dao.recordParentProject("pipeline-core", 1, "com.mycompany", "company-parent-pom", "1.0-SNAPSHOT", false);
- dao.recordDependency("pipeline-core", 1, "com.mycompany", "framework", "1.0-SNAPSHOT", "jar", "compile", false, null);
- dao.recordGeneratedArtifact("pipeline-core", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "1.0-SNAPSHOT", null, false, "jar", null);
+ dao.recordDependency(
+ "pipeline-core", 1, "com.mycompany", "framework", "1.0-SNAPSHOT", "jar", "compile", false, null);
+ dao.recordGeneratedArtifact(
+ "pipeline-core",
+ 1,
+ "com.mycompany",
+ "core",
+ "1.0-SNAPSHOT",
+ "jar",
+ "1.0-SNAPSHOT",
+ null,
+ false,
+ "jar",
+ null);
dao.updateBuildOnCompletion("pipeline-core", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11);
SqlTestsUtils.dump("select * from JOB_DEPENDENCIES", this.ds, System.out);
SqlTestsUtils.dump("select * from JOB_GENERATED_ARTIFACTS", this.ds, System.out);
SqlTestsUtils.dump("select * from JENKINS_JOB", this.ds, System.out);
- Map upstreamPipelinesForBuild1 = dao.listUpstreamPipelinesBasedOnParentProjectDependencies("pipeline-core", 1);
+ Map upstreamPipelinesForBuild1 =
+ dao.listUpstreamPipelinesBasedOnParentProjectDependencies("pipeline-core", 1);
assertThat(upstreamPipelinesForBuild1.keySet()).contains("pipeline-parent-pom");
-
}
@Test
public void list_transitive_upstream_jobs() {
dao.getOrCreateBuildPrimaryKey("pipeline-framework", 1);
- dao.recordGeneratedArtifact("pipeline-framework", 1, "com.mycompany", "framework", "1.0-SNAPSHOT", "jar", "1.0-SNAPSHOT", null, false, "jar", null);
- dao.updateBuildOnCompletion("pipeline-framework", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11);
+ dao.recordGeneratedArtifact(
+ "pipeline-framework",
+ 1,
+ "com.mycompany",
+ "framework",
+ "1.0-SNAPSHOT",
+ "jar",
+ "1.0-SNAPSHOT",
+ null,
+ false,
+ "jar",
+ null);
+ dao.updateBuildOnCompletion(
+ "pipeline-framework", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11);
dao.getOrCreateBuildPrimaryKey("pipeline-core", 1);
- dao.recordDependency("pipeline-core", 1, "com.mycompany", "framework", "1.0-SNAPSHOT", "jar", "compile", false, null);
- dao.recordGeneratedArtifact("pipeline-core", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "1.0-SNAPSHOT", null, false, "jar", null);
+ dao.recordDependency(
+ "pipeline-core", 1, "com.mycompany", "framework", "1.0-SNAPSHOT", "jar", "compile", false, null);
+ dao.recordGeneratedArtifact(
+ "pipeline-core",
+ 1,
+ "com.mycompany",
+ "core",
+ "1.0-SNAPSHOT",
+ "jar",
+ "1.0-SNAPSHOT",
+ null,
+ false,
+ "jar",
+ null);
dao.updateBuildOnCompletion("pipeline-core", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11);
dao.getOrCreateBuildPrimaryKey("pipeline-service", 1);
- dao.recordDependency("pipeline-service", 1, "com.mycompany", "framework", "1.0-SNAPSHOT", "jar", "compile", false, null);
- dao.recordDependency("pipeline-service", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null);
- dao.recordGeneratedArtifact("pipeline-service", 1, "com.mycompany", "service", "1.0-SNAPSHOT", "war", "1.0-SNAPSHOT", null, false, "war", null);
- dao.updateBuildOnCompletion("pipeline-service", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 22);
+ dao.recordDependency(
+ "pipeline-service", 1, "com.mycompany", "framework", "1.0-SNAPSHOT", "jar", "compile", false, null);
+ dao.recordDependency(
+ "pipeline-service", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "compile", false, null);
+ dao.recordGeneratedArtifact(
+ "pipeline-service",
+ 1,
+ "com.mycompany",
+ "service",
+ "1.0-SNAPSHOT",
+ "war",
+ "1.0-SNAPSHOT",
+ null,
+ false,
+ "war",
+ null);
+ dao.updateBuildOnCompletion(
+ "pipeline-service", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 22);
SqlTestsUtils.dump("select * from JOB_DEPENDENCIES", this.ds, System.out);
SqlTestsUtils.dump("select * from JOB_GENERATED_ARTIFACTS", this.ds, System.out);
@@ -1248,24 +2186,66 @@ public void list_transitive_upstream_jobs() {
public void list_transitive_upstream_jobs_with_classifier() {
dao.getOrCreateBuildPrimaryKey("pipeline-framework", 1);
- dao.recordGeneratedArtifact("pipeline-framework", 1, "com.mycompany", "framework", "1.0-SNAPSHOT", "aType", "1.0-SNAPSHOT", null, false, "jar",
+ dao.recordGeneratedArtifact(
+ "pipeline-framework",
+ 1,
+ "com.mycompany",
+ "framework",
+ "1.0-SNAPSHOT",
+ "aType",
+ "1.0-SNAPSHOT",
+ null,
+ false,
+ "jar",
"aClassifier");
- dao.updateBuildOnCompletion("pipeline-framework", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11);
+ dao.updateBuildOnCompletion(
+ "pipeline-framework", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11);
dao.getOrCreateBuildPrimaryKey("pipeline-core1", 1);
- dao.recordDependency("pipeline-core1", 1, "com.mycompany", "framework", "1.0-SNAPSHOT", "aType", "compile", false, "aClassifier");
- dao.recordGeneratedArtifact("pipeline-core1", 1, "com.mycompany", "core1", "1.0-SNAPSHOT", "jar", "1.0-SNAPSHOT", null, false, "jar", "aClassifier");
+ dao.recordDependency(
+ "pipeline-core1",
+ 1,
+ "com.mycompany",
+ "framework",
+ "1.0-SNAPSHOT",
+ "aType",
+ "compile",
+ false,
+ "aClassifier");
+ dao.recordGeneratedArtifact(
+ "pipeline-core1",
+ 1,
+ "com.mycompany",
+ "core1",
+ "1.0-SNAPSHOT",
+ "jar",
+ "1.0-SNAPSHOT",
+ null,
+ false,
+ "jar",
+ "aClassifier");
dao.updateBuildOnCompletion("pipeline-core1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11);
dao.getOrCreateBuildPrimaryKey("pipeline-service1", 1);
- dao.recordDependency("pipeline-service1", 1, "com.mycompany", "core1", "1.0-SNAPSHOT", "jar", "compile", false, "aClassifier");
- dao.updateBuildOnCompletion("pipeline-service1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 22);
+ dao.recordDependency(
+ "pipeline-service1",
+ 1,
+ "com.mycompany",
+ "core1",
+ "1.0-SNAPSHOT",
+ "jar",
+ "compile",
+ false,
+ "aClassifier");
+ dao.updateBuildOnCompletion(
+ "pipeline-service1", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 22);
SqlTestsUtils.dump("select * from JOB_DEPENDENCIES", this.ds, System.out);
SqlTestsUtils.dump("select * from JOB_GENERATED_ARTIFACTS", this.ds, System.out);
SqlTestsUtils.dump("select * from JENKINS_JOB", this.ds, System.out);
- assertThat(dao.listTransitiveUpstreamJobs("pipeline-service1", 1).keySet()).contains("pipeline-framework", "pipeline-core1");
+ assertThat(dao.listTransitiveUpstreamJobs("pipeline-service1", 1).keySet())
+ .contains("pipeline-framework", "pipeline-core1");
}
@Deprecated
@@ -1273,18 +2253,41 @@ public void list_transitive_upstream_jobs_with_classifier() {
public void list_downstream_jobs_with_failed_last_build() {
dao.getOrCreateBuildPrimaryKey("pipeline-framework", 1);
- dao.recordGeneratedArtifact("pipeline-framework", 1, "com.mycompany", "framework", "1.0-SNAPSHOT", "jar", "1.0-SNAPSHOT", null, false, "jar", null);
- dao.updateBuildOnCompletion("pipeline-framework", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11);
+ dao.recordGeneratedArtifact(
+ "pipeline-framework",
+ 1,
+ "com.mycompany",
+ "framework",
+ "1.0-SNAPSHOT",
+ "jar",
+ "1.0-SNAPSHOT",
+ null,
+ false,
+ "jar",
+ null);
+ dao.updateBuildOnCompletion(
+ "pipeline-framework", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11);
dao.getOrCreateBuildPrimaryKey("pipeline-core", 1);
- dao.recordDependency("pipeline-core", 1, "com.mycompany", "framework", "1.0-SNAPSHOT", "jar", "compile", false, null);
- dao.recordGeneratedArtifact("pipeline-core", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "1.0-SNAPSHOT", null, false, "jar", null);
+ dao.recordDependency(
+ "pipeline-core", 1, "com.mycompany", "framework", "1.0-SNAPSHOT", "jar", "compile", false, null);
+ dao.recordGeneratedArtifact(
+ "pipeline-core",
+ 1,
+ "com.mycompany",
+ "core",
+ "1.0-SNAPSHOT",
+ "jar",
+ "1.0-SNAPSHOT",
+ null,
+ false,
+ "jar",
+ null);
dao.updateBuildOnCompletion("pipeline-core", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11);
{
List downstreamJobs = dao.listDownstreamJobs("pipeline-framework", 1);
assertThat(downstreamJobs).contains("pipeline-core");
-
}
// pipeline-core#2 fails before dependencies have been tracked
@@ -1298,7 +2301,6 @@ public void list_downstream_jobs_with_failed_last_build() {
{
List downstreamJobs = dao.listDownstreamJobs("pipeline-framework", 1);
assertThat(downstreamJobs).contains("pipeline-core");
-
}
}
@@ -1306,14 +2308,48 @@ public void list_downstream_jobs_with_failed_last_build() {
public void list_downstream_jobs_by_artifact_with_failed_last_build() {
dao.getOrCreateBuildPrimaryKey("pipeline-framework", 1);
- dao.recordGeneratedArtifact("pipeline-framework", 1, "com.mycompany", "framework", "1.0-SNAPSHOT", "jar", "1.0-SNAPSHOT", null, false, "jar", null);
- dao.recordGeneratedArtifact("pipeline-framework", 1, "com.mycompany", "framework", "1.0-SNAPSHOT", "jar", "1.0-SNAPSHOT", null, false, "jar",
+ dao.recordGeneratedArtifact(
+ "pipeline-framework",
+ 1,
+ "com.mycompany",
+ "framework",
+ "1.0-SNAPSHOT",
+ "jar",
+ "1.0-SNAPSHOT",
+ null,
+ false,
+ "jar",
+ null);
+ dao.recordGeneratedArtifact(
+ "pipeline-framework",
+ 1,
+ "com.mycompany",
+ "framework",
+ "1.0-SNAPSHOT",
+ "jar",
+ "1.0-SNAPSHOT",
+ null,
+ false,
+ "jar",
"sources");
- dao.updateBuildOnCompletion("pipeline-framework", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11);
+ dao.updateBuildOnCompletion(
+ "pipeline-framework", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11);
dao.getOrCreateBuildPrimaryKey("pipeline-core", 1);
- dao.recordDependency("pipeline-core", 1, "com.mycompany", "framework", "1.0-SNAPSHOT", "jar", "compile", false, null);
- dao.recordGeneratedArtifact("pipeline-core", 1, "com.mycompany", "core", "1.0-SNAPSHOT", "jar", "1.0-SNAPSHOT", null, false, "jar", null);
+ dao.recordDependency(
+ "pipeline-core", 1, "com.mycompany", "framework", "1.0-SNAPSHOT", "jar", "compile", false, null);
+ dao.recordGeneratedArtifact(
+ "pipeline-core",
+ 1,
+ "com.mycompany",
+ "core",
+ "1.0-SNAPSHOT",
+ "jar",
+ "1.0-SNAPSHOT",
+ null,
+ false,
+ "jar",
+ null);
dao.updateBuildOnCompletion("pipeline-core", 1, Result.SUCCESS.ordinal, System.currentTimeMillis() - 100, 11);
MavenArtifact expectedMavenArtifact = new MavenArtifact();
@@ -1325,12 +2361,12 @@ public void list_downstream_jobs_by_artifact_with_failed_last_build() {
expectedMavenArtifact.setExtension("jar");
{
- Map> downstreamJobsByArtifact = dao.listDownstreamJobsByArtifact("pipeline-framework", 1);
+ Map> downstreamJobsByArtifact =
+ dao.listDownstreamJobsByArtifact("pipeline-framework", 1);
SortedSet actualJobs = downstreamJobsByArtifact.get(expectedMavenArtifact);
assertThat(actualJobs).contains("pipeline-core");
assertThat(downstreamJobsByArtifact).hasSize(1);
-
}
// pipeline-core#2 fails before dependencies have been tracked
@@ -1342,7 +2378,8 @@ public void list_downstream_jobs_by_artifact_with_failed_last_build() {
SqlTestsUtils.dump("select * from JOB_DEPENDENCIES", this.ds, System.out);
{
- Map> downstreamJobsByArtifact = dao.listDownstreamJobsByArtifact("pipeline-framework", 1);
+ Map> downstreamJobsByArtifact =
+ dao.listDownstreamJobsByArtifact("pipeline-framework", 1);
SortedSet actualJobs = downstreamJobsByArtifact.get(expectedMavenArtifact);
assertThat(actualJobs).contains("pipeline-core");
assertThat(downstreamJobsByArtifact).hasSize(1);
diff --git a/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginH2DaoInitializationTest.java b/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginH2DaoInitializationTest.java
index f39701e6..cc673b6a 100644
--- a/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginH2DaoInitializationTest.java
+++ b/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginH2DaoInitializationTest.java
@@ -26,17 +26,15 @@
import static org.assertj.core.api.Assertions.assertThat;
+import edu.umd.cs.findbugs.annotations.NonNull;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
-
import org.h2.jdbcx.JdbcConnectionPool;
import org.jenkinsci.plugins.pipeline.maven.db.migration.MigrationStep;
import org.junit.jupiter.api.Test;
-import edu.umd.cs.findbugs.annotations.NonNull;
-
/**
* @author Cyrille Le Clerc
*/
diff --git a/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginH2DaoTest.java b/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginH2DaoTest.java
index 1b02739e..60172381 100644
--- a/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginH2DaoTest.java
+++ b/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginH2DaoTest.java
@@ -24,11 +24,10 @@
package org.jenkinsci.plugins.pipeline.maven.db;
+import javax.sql.DataSource;
import org.h2.jdbcx.JdbcConnectionPool;
import org.jenkinsci.plugins.pipeline.maven.db.migration.MigrationStep;
-import javax.sql.DataSource;
-
/**
* @author Cyrille Le Clerc
*/
diff --git a/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginMariaDbDaoIT.java b/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginMariaDbDaoIT.java
index 895bf8a7..2aeee36c 100644
--- a/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginMariaDbDaoIT.java
+++ b/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginMariaDbDaoIT.java
@@ -30,28 +30,24 @@
import static org.mockito.Mockito.when;
import static org.testcontainers.images.PullPolicy.alwaysPull;
-import java.util.Collections;
-
-import javax.sql.DataSource;
-
-import org.jenkinsci.plugins.pipeline.maven.dao.PipelineMavenPluginDao;
-import org.jenkinsci.plugins.pipeline.maven.db.migration.MigrationStep;
-import org.junit.jupiter.api.Test;
-import org.mockito.MockedStatic;
-import org.testcontainers.containers.MariaDBContainer;
-import org.testcontainers.junit.jupiter.Container;
-import org.testcontainers.junit.jupiter.Testcontainers;
-
import com.cloudbees.plugins.credentials.CredentialsMatchers;
import com.cloudbees.plugins.credentials.CredentialsProvider;
import com.cloudbees.plugins.credentials.common.UsernamePasswordCredentials;
import com.zaxxer.hikari.HikariConfig;
import com.zaxxer.hikari.HikariDataSource;
-
import hudson.security.ACL;
import hudson.util.FormValidation;
import hudson.util.Secret;
+import java.util.Collections;
+import javax.sql.DataSource;
import jenkins.model.Jenkins;
+import org.jenkinsci.plugins.pipeline.maven.dao.PipelineMavenPluginDao;
+import org.jenkinsci.plugins.pipeline.maven.db.migration.MigrationStep;
+import org.junit.jupiter.api.Test;
+import org.mockito.MockedStatic;
+import org.testcontainers.containers.MariaDBContainer;
+import org.testcontainers.junit.jupiter.Container;
+import org.testcontainers.junit.jupiter.Testcontainers;
/**
* @author Cyrille Le Clerc
@@ -60,7 +56,8 @@
public class PipelineMavenPluginMariaDbDaoIT extends PipelineMavenPluginDaoAbstractTest {
@Container
- public static MariaDBContainer> DB = new MariaDBContainer<>(MariaDBContainer.NAME).withImagePullPolicy(alwaysPull());
+ public static MariaDBContainer> DB =
+ new MariaDBContainer<>(MariaDBContainer.NAME).withImagePullPolicy(alwaysPull());
@Override
public DataSource before_newDataSource() {
@@ -96,13 +93,16 @@ public void ensureValidateConfiguration() throws Exception {
try (MockedStatic j = mockStatic(Jenkins.class);
MockedStatic m = mockStatic(CredentialsMatchers.class);
MockedStatic c = mockStatic(CredentialsProvider.class)) {
- PipelineMavenPluginDao.Builder.Config config = new PipelineMavenPluginDao.Builder.Config().jdbcUrl(DB.getJdbcUrl()).credentialsId("credsId");
+ PipelineMavenPluginDao.Builder.Config config = new PipelineMavenPluginDao.Builder.Config()
+ .jdbcUrl(DB.getJdbcUrl())
+ .credentialsId("credsId");
UsernamePasswordCredentials credentials = mock(UsernamePasswordCredentials.class);
Secret password = Secret.fromString(DB.getPassword());
String version = DB.createConnection("").getMetaData().getDatabaseProductVersion();
j.when(Jenkins::get).thenReturn(null);
m.when(() -> CredentialsMatchers.withId("credsId")).thenReturn(null);
- c.when(() -> CredentialsProvider.lookupCredentials(UsernamePasswordCredentials.class, (Jenkins) null, ACL.SYSTEM, Collections.EMPTY_LIST))
+ c.when(() -> CredentialsProvider.lookupCredentials(
+ UsernamePasswordCredentials.class, (Jenkins) null, ACL.SYSTEM, Collections.EMPTY_LIST))
.thenReturn(null);
c.when(() -> CredentialsMatchers.firstOrNull(null, null)).thenReturn(credentials);
when(credentials.getUsername()).thenReturn(DB.getUsername());
diff --git a/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginMySqlDaoIT.java b/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginMySqlDaoIT.java
index 4e537230..1c070f9c 100644
--- a/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginMySqlDaoIT.java
+++ b/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginMySqlDaoIT.java
@@ -30,28 +30,24 @@
import static org.mockito.Mockito.when;
import static org.testcontainers.images.PullPolicy.alwaysPull;
-import java.util.Collections;
-
-import javax.sql.DataSource;
-
-import org.jenkinsci.plugins.pipeline.maven.dao.PipelineMavenPluginDao;
-import org.jenkinsci.plugins.pipeline.maven.db.migration.MigrationStep;
-import org.junit.jupiter.api.Test;
-import org.mockito.MockedStatic;
-import org.testcontainers.containers.MySQLContainer;
-import org.testcontainers.junit.jupiter.Container;
-import org.testcontainers.junit.jupiter.Testcontainers;
-
import com.cloudbees.plugins.credentials.CredentialsMatchers;
import com.cloudbees.plugins.credentials.CredentialsProvider;
import com.cloudbees.plugins.credentials.common.UsernamePasswordCredentials;
import com.zaxxer.hikari.HikariConfig;
import com.zaxxer.hikari.HikariDataSource;
-
import hudson.security.ACL;
import hudson.util.FormValidation;
import hudson.util.Secret;
+import java.util.Collections;
+import javax.sql.DataSource;
import jenkins.model.Jenkins;
+import org.jenkinsci.plugins.pipeline.maven.dao.PipelineMavenPluginDao;
+import org.jenkinsci.plugins.pipeline.maven.db.migration.MigrationStep;
+import org.junit.jupiter.api.Test;
+import org.mockito.MockedStatic;
+import org.testcontainers.containers.MySQLContainer;
+import org.testcontainers.junit.jupiter.Container;
+import org.testcontainers.junit.jupiter.Testcontainers;
/**
* @author Cyrille Le Clerc
@@ -96,13 +92,16 @@ public void ensureValidateConfiguration() throws Exception {
try (MockedStatic j = mockStatic(Jenkins.class);
MockedStatic m = mockStatic(CredentialsMatchers.class);
MockedStatic c = mockStatic(CredentialsProvider.class)) {
- PipelineMavenPluginDao.Builder.Config config = new PipelineMavenPluginDao.Builder.Config().jdbcUrl(DB.getJdbcUrl()).credentialsId("credsId");
+ PipelineMavenPluginDao.Builder.Config config = new PipelineMavenPluginDao.Builder.Config()
+ .jdbcUrl(DB.getJdbcUrl())
+ .credentialsId("credsId");
UsernamePasswordCredentials credentials = mock(UsernamePasswordCredentials.class);
Secret password = Secret.fromString(DB.getPassword());
String version = DB.createConnection("").getMetaData().getDatabaseProductVersion();
j.when(Jenkins::get).thenReturn(null);
m.when(() -> CredentialsMatchers.withId("credsId")).thenReturn(null);
- c.when(() -> CredentialsProvider.lookupCredentials(UsernamePasswordCredentials.class, (Jenkins) null, ACL.SYSTEM, Collections.EMPTY_LIST))
+ c.when(() -> CredentialsProvider.lookupCredentials(
+ UsernamePasswordCredentials.class, (Jenkins) null, ACL.SYSTEM, Collections.EMPTY_LIST))
.thenReturn(null);
c.when(() -> CredentialsMatchers.firstOrNull(null, null)).thenReturn(credentials);
when(credentials.getUsername()).thenReturn(DB.getUsername());
diff --git a/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginMySqlDaoInitializationTest.java b/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginMySqlDaoInitializationTest.java
index ea487def..24864cdd 100644
--- a/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginMySqlDaoInitializationTest.java
+++ b/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginMySqlDaoInitializationTest.java
@@ -26,17 +26,15 @@
import static org.assertj.core.api.Assertions.assertThat;
+import edu.umd.cs.findbugs.annotations.NonNull;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
-
import org.h2.jdbcx.JdbcConnectionPool;
import org.jenkinsci.plugins.pipeline.maven.db.migration.MigrationStep;
import org.junit.jupiter.api.Test;
-import edu.umd.cs.findbugs.annotations.NonNull;
-
/**
* @author Cyrille Le Clerc
*/
diff --git a/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginMySqlDaoTest.java b/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginMySqlDaoTest.java
index 3c998c00..5ac409da 100644
--- a/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginMySqlDaoTest.java
+++ b/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginMySqlDaoTest.java
@@ -27,7 +27,6 @@
import static org.assertj.core.api.Assertions.assertThat;
import javax.sql.DataSource;
-
import org.h2.jdbcx.JdbcConnectionPool;
import org.jenkinsci.plugins.pipeline.maven.db.migration.MigrationStep;
import org.junit.jupiter.api.Test;
@@ -75,7 +74,8 @@ public void test_mariadb_version_parsing_JENKINS_55378() {
*/
@Test
public void test_mariadb_version_parsing_mariadb_as_docker_container() {
- String actual = PipelineMavenPluginMySqlDao.extractMariaDbVersion("5.5.5-10.3.11-MariaDB-1:10.3.11+maria~bionic");
+ String actual =
+ PipelineMavenPluginMySqlDao.extractMariaDbVersion("5.5.5-10.3.11-MariaDB-1:10.3.11+maria~bionic");
assertThat(actual).isEqualTo("10.3.11");
}
}
diff --git a/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginPostgreSqlDaoIT.java b/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginPostgreSqlDaoIT.java
index 458ffb5a..d32697c6 100644
--- a/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginPostgreSqlDaoIT.java
+++ b/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginPostgreSqlDaoIT.java
@@ -30,28 +30,24 @@
import static org.mockito.Mockito.when;
import static org.testcontainers.images.PullPolicy.alwaysPull;
-import java.util.Collections;
-
-import javax.sql.DataSource;
-
-import org.jenkinsci.plugins.pipeline.maven.dao.PipelineMavenPluginDao;
-import org.jenkinsci.plugins.pipeline.maven.db.migration.MigrationStep;
-import org.junit.jupiter.api.Test;
-import org.mockito.MockedStatic;
-import org.testcontainers.containers.PostgreSQLContainer;
-import org.testcontainers.junit.jupiter.Container;
-import org.testcontainers.junit.jupiter.Testcontainers;
-
import com.cloudbees.plugins.credentials.CredentialsMatchers;
import com.cloudbees.plugins.credentials.CredentialsProvider;
import com.cloudbees.plugins.credentials.common.UsernamePasswordCredentials;
import com.zaxxer.hikari.HikariConfig;
import com.zaxxer.hikari.HikariDataSource;
-
import hudson.security.ACL;
import hudson.util.FormValidation;
import hudson.util.Secret;
+import java.util.Collections;
+import javax.sql.DataSource;
import jenkins.model.Jenkins;
+import org.jenkinsci.plugins.pipeline.maven.dao.PipelineMavenPluginDao;
+import org.jenkinsci.plugins.pipeline.maven.db.migration.MigrationStep;
+import org.junit.jupiter.api.Test;
+import org.mockito.MockedStatic;
+import org.testcontainers.containers.PostgreSQLContainer;
+import org.testcontainers.junit.jupiter.Container;
+import org.testcontainers.junit.jupiter.Testcontainers;
/**
* @author Cyrille Le Clerc
@@ -60,7 +56,8 @@
public class PipelineMavenPluginPostgreSqlDaoIT extends PipelineMavenPluginDaoAbstractTest {
@Container
- public static PostgreSQLContainer> DB = new PostgreSQLContainer<>(PostgreSQLContainer.IMAGE).withImagePullPolicy(alwaysPull());
+ public static PostgreSQLContainer> DB =
+ new PostgreSQLContainer<>(PostgreSQLContainer.IMAGE).withImagePullPolicy(alwaysPull());
@Override
public DataSource before_newDataSource() throws Exception {
@@ -97,13 +94,16 @@ public void ensureValidateConfiguration() throws Exception {
try (MockedStatic j = mockStatic(Jenkins.class);
MockedStatic m = mockStatic(CredentialsMatchers.class);
MockedStatic c = mockStatic(CredentialsProvider.class)) {
- PipelineMavenPluginDao.Builder.Config config = new PipelineMavenPluginDao.Builder.Config().jdbcUrl(DB.getJdbcUrl()).credentialsId("credsId");
+ PipelineMavenPluginDao.Builder.Config config = new PipelineMavenPluginDao.Builder.Config()
+ .jdbcUrl(DB.getJdbcUrl())
+ .credentialsId("credsId");
UsernamePasswordCredentials credentials = mock(UsernamePasswordCredentials.class);
Secret password = Secret.fromString(DB.getPassword());
String version = DB.createConnection("").getMetaData().getDatabaseProductVersion();
j.when(Jenkins::get).thenReturn(null);
m.when(() -> CredentialsMatchers.withId("credsId")).thenReturn(null);
- c.when(() -> CredentialsProvider.lookupCredentials(UsernamePasswordCredentials.class, (Jenkins) null, ACL.SYSTEM, Collections.EMPTY_LIST))
+ c.when(() -> CredentialsProvider.lookupCredentials(
+ UsernamePasswordCredentials.class, (Jenkins) null, ACL.SYSTEM, Collections.EMPTY_LIST))
.thenReturn(null);
c.when(() -> CredentialsMatchers.firstOrNull(null, null)).thenReturn(credentials);
when(credentials.getUsername()).thenReturn(DB.getUsername());
diff --git a/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginPostgreSqlDaoTest.java b/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginPostgreSqlDaoTest.java
index 8a3c6015..f8b66e8d 100644
--- a/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginPostgreSqlDaoTest.java
+++ b/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/PipelineMavenPluginPostgreSqlDaoTest.java
@@ -24,11 +24,10 @@
package org.jenkinsci.plugins.pipeline.maven.db;
+import javax.sql.DataSource;
import org.h2.jdbcx.JdbcConnectionPool;
import org.jenkinsci.plugins.pipeline.maven.db.migration.MigrationStep;
-import javax.sql.DataSource;
-
/**
* @author Cyrille Le Clerc
*/
@@ -58,5 +57,4 @@ public String getMasterRootUrl() {
}
};
}
-
}
diff --git a/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/util/ClassUtilsTest.java b/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/util/ClassUtilsTest.java
index 481fcb6f..a045914d 100644
--- a/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/util/ClassUtilsTest.java
+++ b/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/util/ClassUtilsTest.java
@@ -12,6 +12,7 @@ public class ClassUtilsTest {
@Test
public void testGetResource() {
- assertThat(getResourceAsStream("org/jenkinsci/plugins/pipeline/maven/util/classutils-test-1.txt")).isNotNull();
+ assertThat(getResourceAsStream("org/jenkinsci/plugins/pipeline/maven/util/classutils-test-1.txt"))
+ .isNotNull();
}
}
diff --git a/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/util/SqlTestsUtils.java b/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/util/SqlTestsUtils.java
index 2c5734bd..ae550afb 100644
--- a/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/util/SqlTestsUtils.java
+++ b/pipeline-maven-database/src/test/java/org/jenkinsci/plugins/pipeline/maven/db/util/SqlTestsUtils.java
@@ -1,14 +1,8 @@
package org.jenkinsci.plugins.pipeline.maven.db.util;
-import org.h2.api.ErrorCode;
-
-import edu.umd.cs.findbugs.annotations.NonNull;
-import org.jenkinsci.plugins.pipeline.maven.db.util.RuntimeSqlException;
-
-import javax.sql.DataSource;
-
import static java.util.Optional.ofNullable;
+import edu.umd.cs.findbugs.annotations.NonNull;
import java.io.PrintStream;
import java.sql.Connection;
import java.sql.PreparedStatement;
@@ -16,13 +10,14 @@
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
+import javax.sql.DataSource;
+import org.h2.api.ErrorCode;
/**
* @author Cyrille Le Clerc
*/
public class SqlTestsUtils {
-
public static void dump(String sql, DataSource ds, PrintStream out) throws RuntimeSqlException {
try (Connection connection = ds.getConnection()) {
out.println("# DUMP " + sql);
@@ -66,11 +61,12 @@ public static int countRows(@NonNull String sql, @NonNull DataSource ds, Object.
}
public static int countRows(@NonNull String sql, @NonNull Connection cnn, Object... params) throws SQLException {
- String sqlQuery ;
- if (sql.startsWith("select * from")){
+ String sqlQuery;
+ if (sql.startsWith("select * from")) {
sqlQuery = "select count(*) from " + sql.substring("select * from".length());
} else {
- sqlQuery = "select count(*) from (" + sql + ")"; }
+ sqlQuery = "select count(*) from (" + sql + ")";
+ }
try (PreparedStatement stmt = cnn.prepareStatement(sqlQuery)) {
int idx = 1;
diff --git a/pipeline-maven-spy/pom.xml b/pipeline-maven-spy/pom.xml
index 3d883907..8c9a9ffc 100644
--- a/pipeline-maven-spy/pom.xml
+++ b/pipeline-maven-spy/pom.xml
@@ -43,16 +43,16 @@
javax.inject
1
-
- org.apache.maven
- maven-core
- provided
-
org.sonatype.aether
aether-api
1.13.1
+
+ org.apache.maven
+ maven-core
+ provided
+
org.slf4j
diff --git a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/JenkinsMavenEventSpy.java b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/JenkinsMavenEventSpy.java
index ce181aae..a808677c 100644
--- a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/JenkinsMavenEventSpy.java
+++ b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/JenkinsMavenEventSpy.java
@@ -24,6 +24,16 @@
package org.jenkinsci.plugins.pipeline.maven.eventspy;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import javax.inject.Named;
+import javax.inject.Singleton;
import org.apache.maven.eventspy.AbstractEventSpy;
import org.apache.maven.eventspy.EventSpy;
import org.codehaus.plexus.util.xml.Xpp3Dom;
@@ -52,17 +62,6 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import javax.inject.Named;
-import javax.inject.Singleton;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
/**
* Maven {@link EventSpy} to capture build details consumed by the Jenkins Pipeline Maven Plugin
* and the {@code withMaven(){...}} pipeline step.
@@ -73,9 +72,10 @@
@Singleton
public class JenkinsMavenEventSpy extends AbstractEventSpy {
- public final static String DISABLE_MAVEN_EVENT_SPY_PROPERTY_NAME = JenkinsMavenEventSpy.class.getName() + ".disabled";
+ public static final String DISABLE_MAVEN_EVENT_SPY_PROPERTY_NAME =
+ JenkinsMavenEventSpy.class.getName() + ".disabled";
- public final static String DISABLE_MAVEN_EVENT_SPY_ENVIRONMENT_VARIABLE_NAME = "JENKINS_MAVEN_AGENT_DISABLED";
+ public static final String DISABLE_MAVEN_EVENT_SPY_ENVIRONMENT_VARIABLE_NAME = "JENKINS_MAVEN_AGENT_DISABLED";
private final Logger logger = LoggerFactory.getLogger(getClass());
@@ -87,10 +87,11 @@ public class JenkinsMavenEventSpy extends AbstractEventSpy {
protected final boolean disabled;
private Set blackList = new HashSet();
- private Set ignoredList = new HashSet(Collections.singletonList(
- /*"org.eclipse.aether.RepositoryEvent",*/
- "org.apache.maven.settings.building.DefaultSettingsBuildingResult"/*,
- "org.apache.maven.execution.DefaultMavenExecutionResult"*/));
+ private Set ignoredList = new HashSet(
+ Collections.singletonList(
+ /*"org.eclipse.aether.RepositoryEvent",*/
+ "org.apache.maven.settings.building.DefaultSettingsBuildingResult" /*,
+ "org.apache.maven.execution.DefaultMavenExecutionResult"*/));
private List handlers = new ArrayList();
@@ -150,8 +151,7 @@ public void init(EventSpy.Context context) throws Exception {
@Override
public void onEvent(Object event) throws Exception {
- if (disabled)
- return;
+ if (disabled) return;
try {
if (blackList.contains(event.getClass())) {
@@ -174,7 +174,6 @@ public void onEvent(Object event) throws Exception {
}
}
-
@Override
public void close() {
if (disabled) {
@@ -187,9 +186,9 @@ public void close() {
/**
* Visible for testing
*/
- protected boolean isEventSpyDisabled(){
- return "true".equalsIgnoreCase(System.getProperty(DISABLE_MAVEN_EVENT_SPY_PROPERTY_NAME)) ||
- "true".equalsIgnoreCase(System.getenv(DISABLE_MAVEN_EVENT_SPY_ENVIRONMENT_VARIABLE_NAME));
+ protected boolean isEventSpyDisabled() {
+ return "true".equalsIgnoreCase(System.getProperty(DISABLE_MAVEN_EVENT_SPY_PROPERTY_NAME))
+ || "true".equalsIgnoreCase(System.getenv(DISABLE_MAVEN_EVENT_SPY_ENVIRONMENT_VARIABLE_NAME));
}
public MavenEventReporter getReporter() {
diff --git a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/AbstractExecutionHandler.java b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/AbstractExecutionHandler.java
index d449da5a..72097a12 100644
--- a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/AbstractExecutionHandler.java
+++ b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/AbstractExecutionHandler.java
@@ -24,16 +24,14 @@
package org.jenkinsci.plugins.pipeline.maven.eventspy.handler;
+import edu.umd.cs.findbugs.annotations.NonNull;
+import edu.umd.cs.findbugs.annotations.Nullable;
import java.util.List;
-
import org.apache.maven.execution.ExecutionEvent;
import org.apache.maven.plugin.MojoExecution;
import org.codehaus.plexus.util.xml.Xpp3Dom;
import org.jenkinsci.plugins.pipeline.maven.eventspy.reporter.MavenEventReporter;
-import edu.umd.cs.findbugs.annotations.NonNull;
-import edu.umd.cs.findbugs.annotations.Nullable;
-
/**
* @author Cyrille Le Clerc
*/
@@ -60,7 +58,9 @@ public boolean handle(@NonNull Object event) {
String[] gag = supportedGoal.split(":");
if (gag.length == 3) {
MojoExecution execution = executionEvent.getMojoExecution();
- if (execution.getGroupId().equals(gag[0]) && execution.getArtifactId().equals(gag[1]) && execution.getGoal().equals(gag[2])) {
+ if (execution.getGroupId().equals(gag[0])
+ && execution.getArtifactId().equals(gag[1])
+ && execution.getGoal().equals(gag[2])) {
_handle(executionEvent);
return true;
} else {
@@ -71,7 +71,6 @@ public boolean handle(@NonNull Object event) {
return false;
}
}
-
}
@Override
@@ -106,7 +105,8 @@ public boolean _handle(@NonNull ExecutionEvent executionEvent) {
}
for (String configurationParameter : configurationParameters) {
- Xpp3Dom element = fullClone(configurationParameter, execution.getConfiguration().getChild(configurationParameter));
+ Xpp3Dom element = fullClone(
+ configurationParameter, execution.getConfiguration().getChild(configurationParameter));
if (element != null) {
plugin.addChild(element);
}
@@ -115,7 +115,7 @@ public boolean _handle(@NonNull ExecutionEvent executionEvent) {
addDetails(executionEvent, root);
- if(executionEvent.getException() != null) {
+ if (executionEvent.getException() != null) {
root.addChild(newElement("exception", executionEvent.getException()));
}
@@ -124,9 +124,7 @@ public boolean _handle(@NonNull ExecutionEvent executionEvent) {
return true;
}
- protected void addDetails(@NonNull ExecutionEvent executionEvent, @NonNull Xpp3Dom root) {
-
- }
+ protected void addDetails(@NonNull ExecutionEvent executionEvent, @NonNull Xpp3Dom root) {}
@NonNull
protected abstract List getConfigurationParametersToReport(ExecutionEvent executionEvent);
@@ -138,7 +136,6 @@ protected void addDetails(@NonNull ExecutionEvent executionEvent, @NonNull Xpp3D
@Nullable
protected abstract ExecutionEvent.Type getSupportedType();
-
/**
*
* @return {@code null} if all goals are supported by this {@link AbstractExecutionHandler}
@@ -156,7 +153,9 @@ public String toString() {
@Nullable
protected String getMojoConfigurationValue(@NonNull MojoExecution execution, @NonNull String elementName) {
Xpp3Dom element = execution.getConfiguration().getChild(elementName);
- return element == null ? null : element.getValue() == null ? element.getAttribute("default-value") : element.getValue();
+ return element == null
+ ? null
+ : element.getValue() == null ? element.getAttribute("default-value") : element.getValue();
}
@Nullable
diff --git a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/AbstractMavenEventHandler.java b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/AbstractMavenEventHandler.java
index cd885f9c..a6de09b7 100644
--- a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/AbstractMavenEventHandler.java
+++ b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/AbstractMavenEventHandler.java
@@ -24,6 +24,8 @@
package org.jenkinsci.plugins.pipeline.maven.eventspy.handler;
+import edu.umd.cs.findbugs.annotations.NonNull;
+import edu.umd.cs.findbugs.annotations.Nullable;
import java.io.File;
import java.io.IOException;
import java.io.PrintWriter;
@@ -31,7 +33,6 @@
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.util.regex.Pattern;
-
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.handler.ArtifactHandler;
import org.apache.maven.model.Build;
@@ -44,9 +45,6 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import edu.umd.cs.findbugs.annotations.NonNull;
-import edu.umd.cs.findbugs.annotations.Nullable;
-
/**
* @author Cyrille Le Clerc
*/
@@ -61,12 +59,10 @@ public abstract class AbstractMavenEventHandler implements MavenEventHandler<
*/
private static final Pattern ANSI_PATTERN = Pattern.compile("\\x1b\\[[0-9;]*m");
-
protected AbstractMavenEventHandler(MavenEventReporter reporter) {
this.reporter = reporter;
}
-
@Override
public boolean handle(Object event) {
Type type = getSupportedType();
@@ -88,7 +84,6 @@ public String toString() {
return getClass().getName() + "[type=" + getSupportedType() + "]";
}
-
public Xpp3Dom newElement(String name, String value) {
Xpp3Dom element = new Xpp3Dom(name);
element.setValue(value);
@@ -124,27 +119,34 @@ public Xpp3Dom newElement(@NonNull String name, @Nullable final MavenProject pro
throw new RuntimeIOException(e);
}
- if (absolutePath.endsWith(File.separator + "pom.xml") || absolutePath.endsWith(File.separator + ".flattened-pom.xml")) {
+ if (absolutePath.endsWith(File.separator + "pom.xml")
+ || absolutePath.endsWith(File.separator + ".flattened-pom.xml")) {
// JENKINS-43616: flatten-maven-plugin replaces the original pom as artifact with a .flattened-pom.xml
// no tweak
} else if (absolutePath.endsWith(File.separator + "dependency-reduced-pom.xml")) {
// JENKINS-42302: maven-shade-plugin creates a temporary project file dependency-reduced-pom.xml
// TODO see if there is a better way to implement this "workaround"
- absolutePath = absolutePath.replace(File.separator + "dependency-reduced-pom.xml", File.separator + "pom.xml");
+ absolutePath =
+ absolutePath.replace(File.separator + "dependency-reduced-pom.xml", File.separator + "pom.xml");
} else if (absolutePath.endsWith(File.separator + ".git-versioned-pom.xml")) {
- // JENKINS-56666 maven-git-versioning-extension causes warnings due to temporary pom.xml file name '.git-versioned-pom.xml'
+ // JENKINS-56666 maven-git-versioning-extension causes warnings due to temporary pom.xml file name
+ // '.git-versioned-pom.xml'
// https://github.com/qoomon/maven-git-versioning-extension/blob/v4.1.0/src/main/java/me/qoomon/maven/gitversioning/VersioningMojo.java#L39
// TODO see if there is a better way to implement this "workaround"
- absolutePath = absolutePath.replace(File.separator + ".git-versioned-pom.xml", File.separator + "pom.xml");
+ absolutePath =
+ absolutePath.replace(File.separator + ".git-versioned-pom.xml", File.separator + "pom.xml");
} else {
String flattenedPomFilename = getMavenFlattenPluginFlattenedPomFilename(project);
if (flattenedPomFilename == null) {
- logger.warn("[jenkins-event-spy] Unexpected Maven project file name '" + projectFile.getName() + "', problems may occur");
+ logger.warn("[jenkins-event-spy] Unexpected Maven project file name '" + projectFile.getName()
+ + "', problems may occur");
} else {
if (absolutePath.endsWith(File.separator + flattenedPomFilename)) {
- absolutePath = absolutePath.replace(File.separator + flattenedPomFilename, File.separator + "pom.xml");
+ absolutePath =
+ absolutePath.replace(File.separator + flattenedPomFilename, File.separator + "pom.xml");
} else {
- logger.warn("[jenkins-event-spy] Unexpected Maven project file name '" + projectFile.getName() + "', problems may occur");
+ logger.warn("[jenkins-event-spy] Unexpected Maven project file name '" + projectFile.getName()
+ + "', problems may occur");
}
}
}
@@ -177,10 +179,10 @@ public Xpp3Dom newElement(@NonNull String name, @Nullable final MavenProject pro
*/
@Nullable
protected String getMavenFlattenPluginFlattenedPomFilename(@NonNull MavenProject project) {
- for(Plugin buildPlugin : project.getBuildPlugins()) {
+ for (Plugin buildPlugin : project.getBuildPlugins()) {
if ("org.codehaus.mojo:flatten-maven-plugin".equals(buildPlugin.getKey())) {
String mavenConfigurationElement = "flattenedPomFilename";
- for(PluginExecution execution: buildPlugin.getExecutions()) {
+ for (PluginExecution execution : buildPlugin.getExecutions()) {
if (execution.getGoals().contains("flatten")) {
if (execution.getConfiguration() instanceof Xpp3Dom) {
Xpp3Dom configuration = (Xpp3Dom) execution.getConfiguration();
@@ -204,12 +206,12 @@ protected String getMavenFlattenPluginFlattenedPomFilename(@NonNull MavenProject
}
private static String removeAnsiColor(String input) {
- if (input!=null) {
- input = ANSI_PATTERN.matcher(input).replaceAll("");
- }
- return input;
+ if (input != null) {
+ input = ANSI_PATTERN.matcher(input).replaceAll("");
+ }
+ return input;
}
-
+
public Xpp3Dom newElement(@NonNull String name, @Nullable Throwable t) {
Xpp3Dom rootElt = new Xpp3Dom(name);
if (t == null) {
diff --git a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/ArtifactDeployedEventHandler.java b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/ArtifactDeployedEventHandler.java
index acd133bb..3cf0337e 100644
--- a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/ArtifactDeployedEventHandler.java
+++ b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/ArtifactDeployedEventHandler.java
@@ -1,12 +1,11 @@
package org.jenkinsci.plugins.pipeline.maven.eventspy.handler;
+import edu.umd.cs.findbugs.annotations.NonNull;
+import edu.umd.cs.findbugs.annotations.Nullable;
import org.codehaus.plexus.util.xml.Xpp3Dom;
import org.eclipse.aether.RepositoryEvent;
import org.jenkinsci.plugins.pipeline.maven.eventspy.reporter.MavenEventReporter;
-import edu.umd.cs.findbugs.annotations.NonNull;
-import edu.umd.cs.findbugs.annotations.Nullable;
-
public class ArtifactDeployedEventHandler implements MavenEventHandler {
protected final MavenEventReporter reporter;
diff --git a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/CatchAllExecutionHandler.java b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/CatchAllExecutionHandler.java
index 0e2404e8..e9e82303 100644
--- a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/CatchAllExecutionHandler.java
+++ b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/CatchAllExecutionHandler.java
@@ -24,18 +24,16 @@
package org.jenkinsci.plugins.pipeline.maven.eventspy.handler;
+import edu.umd.cs.findbugs.annotations.NonNull;
+import edu.umd.cs.findbugs.annotations.Nullable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
-
import org.apache.maven.execution.ExecutionEvent;
import org.apache.maven.plugin.MojoExecution;
import org.codehaus.plexus.util.xml.Xpp3Dom;
import org.jenkinsci.plugins.pipeline.maven.eventspy.reporter.MavenEventReporter;
-import edu.umd.cs.findbugs.annotations.NonNull;
-import edu.umd.cs.findbugs.annotations.Nullable;
-
/**
* @author Cyrille Le Clerc
*/
diff --git a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/DefaultSettingsBuildingRequestHandler.java b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/DefaultSettingsBuildingRequestHandler.java
index 972154c8..639df509 100644
--- a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/DefaultSettingsBuildingRequestHandler.java
+++ b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/DefaultSettingsBuildingRequestHandler.java
@@ -48,5 +48,4 @@ public boolean _handle(DefaultSettingsBuildingRequest request) {
reporter.print(root);
return true;
}
-
}
diff --git a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/DependencyResolutionResultHandler.java b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/DependencyResolutionResultHandler.java
index 134922c8..437b73d8 100644
--- a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/DependencyResolutionResultHandler.java
+++ b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/DependencyResolutionResultHandler.java
@@ -24,16 +24,15 @@
package org.jenkinsci.plugins.pipeline.maven.eventspy.handler;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.Set;
import org.apache.maven.project.DependencyResolutionResult;
import org.codehaus.plexus.util.xml.Xpp3Dom;
import org.eclipse.aether.artifact.Artifact;
import org.eclipse.aether.graph.Dependency;
import org.jenkinsci.plugins.pipeline.maven.eventspy.reporter.MavenEventReporter;
-import java.util.Arrays;
-import java.util.HashSet;
-import java.util.Set;
-
/**
* @author Cyrille Le Clerc
*/
@@ -69,13 +68,13 @@ protected boolean _handle(DependencyResolutionResult result) {
for (Dependency dependency : result.getResolvedDependencies()) {
Artifact artifact = dependency.getArtifact();
- if ( !includedScopes.contains(dependency.getScope())) {
+ if (!includedScopes.contains(dependency.getScope())) {
continue;
}
if (!includeSnapshots && artifact.isSnapshot()) {
continue;
}
- if(!includeReleases && !artifact.isSnapshot()) {
+ if (!includeReleases && !artifact.isSnapshot()) {
continue;
}
diff --git a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/DeployDeployExecutionHandler.java b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/DeployDeployExecutionHandler.java
index 6d0b08df..b50e8b34 100644
--- a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/DeployDeployExecutionHandler.java
+++ b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/DeployDeployExecutionHandler.java
@@ -1,13 +1,12 @@
package org.jenkinsci.plugins.pipeline.maven.eventspy.handler;
+import edu.umd.cs.findbugs.annotations.NonNull;
+import edu.umd.cs.findbugs.annotations.Nullable;
import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.execution.ExecutionEvent;
import org.codehaus.plexus.util.xml.Xpp3Dom;
import org.jenkinsci.plugins.pipeline.maven.eventspy.reporter.MavenEventReporter;
-import edu.umd.cs.findbugs.annotations.NonNull;
-import edu.umd.cs.findbugs.annotations.Nullable;
-
/**
* @author Cyrille Le Clerc
*/
@@ -19,8 +18,8 @@ public DeployDeployExecutionHandler(@NonNull MavenEventReporter reporter) {
@Override
protected void addDetails(@NonNull ExecutionEvent executionEvent, @NonNull Xpp3Dom root) {
super.addDetails(executionEvent, root);
- ArtifactRepository artifactRepository = executionEvent.getProject()
- .getDistributionManagementArtifactRepository();
+ ArtifactRepository artifactRepository =
+ executionEvent.getProject().getDistributionManagementArtifactRepository();
Xpp3Dom artifactRepositoryElt = new Xpp3Dom("artifactRepository");
root.addChild(artifactRepositoryElt);
if (artifactRepository == null) {
@@ -34,7 +33,6 @@ protected void addDetails(@NonNull ExecutionEvent executionEvent, @NonNull Xpp3D
urlElt.setValue(artifactRepository.getUrl());
artifactRepositoryElt.addChild(urlElt);
}
-
}
@Nullable
diff --git a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/DeployDeployFileExecutionHandler.java b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/DeployDeployFileExecutionHandler.java
index 38983100..f6854c80 100644
--- a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/DeployDeployFileExecutionHandler.java
+++ b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/DeployDeployFileExecutionHandler.java
@@ -1,17 +1,15 @@
package org.jenkinsci.plugins.pipeline.maven.eventspy.handler;
+import edu.umd.cs.findbugs.annotations.NonNull;
+import edu.umd.cs.findbugs.annotations.Nullable;
import java.util.ArrayList;
import java.util.List;
-
import org.apache.maven.execution.ExecutionEvent;
import org.apache.maven.execution.ExecutionEvent.Type;
import org.apache.maven.plugin.MojoExecution;
import org.codehaus.plexus.util.xml.Xpp3Dom;
import org.jenkinsci.plugins.pipeline.maven.eventspy.reporter.MavenEventReporter;
-import edu.umd.cs.findbugs.annotations.NonNull;
-import edu.umd.cs.findbugs.annotations.Nullable;
-
/**
* Handler to alter the
* org.apache.maven.plugins:maven-deploy-plugin:deploy-file
goal : it will
@@ -59,7 +57,7 @@ protected Type getSupportedType() {
return ExecutionEvent.Type.MojoSucceeded;
}
- @Nullable
+ @Nullable
@Override
protected String getSupportedPluginGoal() {
return "org.apache.maven.plugins:maven-deploy-plugin:deploy-file";
diff --git a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/FailsafeTestExecutionHandler.java b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/FailsafeTestExecutionHandler.java
index 4c7afa40..7e09c0ef 100644
--- a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/FailsafeTestExecutionHandler.java
+++ b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/FailsafeTestExecutionHandler.java
@@ -24,15 +24,13 @@
package org.jenkinsci.plugins.pipeline.maven.eventspy.handler;
+import edu.umd.cs.findbugs.annotations.NonNull;
+import edu.umd.cs.findbugs.annotations.Nullable;
import java.util.Arrays;
import java.util.List;
-
import org.apache.maven.execution.ExecutionEvent;
import org.jenkinsci.plugins.pipeline.maven.eventspy.reporter.MavenEventReporter;
-import edu.umd.cs.findbugs.annotations.NonNull;
-import edu.umd.cs.findbugs.annotations.Nullable;
-
/**
* @author Cyrille Le Clerc
*/
diff --git a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/InvokerRunExecutionHandler.java b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/InvokerRunExecutionHandler.java
index 3bf29288..56fec3f5 100644
--- a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/InvokerRunExecutionHandler.java
+++ b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/InvokerRunExecutionHandler.java
@@ -24,15 +24,13 @@
package org.jenkinsci.plugins.pipeline.maven.eventspy.handler;
+import edu.umd.cs.findbugs.annotations.NonNull;
+import edu.umd.cs.findbugs.annotations.Nullable;
import java.util.Arrays;
import java.util.List;
-
import org.apache.maven.execution.ExecutionEvent;
import org.jenkinsci.plugins.pipeline.maven.eventspy.reporter.MavenEventReporter;
-import edu.umd.cs.findbugs.annotations.NonNull;
-import edu.umd.cs.findbugs.annotations.Nullable;
-
public class InvokerRunExecutionHandler extends AbstractExecutionHandler {
public InvokerRunExecutionHandler(MavenEventReporter reporter) {
diff --git a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/InvokerStartExecutionHandler.java b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/InvokerStartExecutionHandler.java
index 7fb8fe53..8fa0ede6 100644
--- a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/InvokerStartExecutionHandler.java
+++ b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/InvokerStartExecutionHandler.java
@@ -26,18 +26,16 @@
import static org.jenkinsci.plugins.pipeline.maven.eventspy.JenkinsMavenEventSpy.DISABLE_MAVEN_EVENT_SPY_ENVIRONMENT_VARIABLE_NAME;
+import edu.umd.cs.findbugs.annotations.NonNull;
+import edu.umd.cs.findbugs.annotations.Nullable;
import java.util.ArrayList;
import java.util.List;
-
import org.apache.maven.execution.ExecutionEvent;
import org.codehaus.plexus.util.xml.Xpp3Dom;
import org.jenkinsci.plugins.pipeline.maven.eventspy.reporter.MavenEventReporter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import edu.umd.cs.findbugs.annotations.NonNull;
-import edu.umd.cs.findbugs.annotations.Nullable;
-
/**
* Handler to alter the
* org.apache.maven.plugins:maven-invoker-plugin:run
goal : it will
diff --git a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/JarJarExecutionHandler.java b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/JarJarExecutionHandler.java
index 40c82b6f..a77df05c 100644
--- a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/JarJarExecutionHandler.java
+++ b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/JarJarExecutionHandler.java
@@ -24,14 +24,12 @@
package org.jenkinsci.plugins.pipeline.maven.eventspy.handler;
+import edu.umd.cs.findbugs.annotations.Nullable;
import java.util.Arrays;
import java.util.List;
-
import org.apache.maven.execution.ExecutionEvent;
import org.jenkinsci.plugins.pipeline.maven.eventspy.reporter.MavenEventReporter;
-import edu.umd.cs.findbugs.annotations.Nullable;
-
/**
* @author Cyrille Le Clerc
*/
diff --git a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/ProjectFailedExecutionHandler.java b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/ProjectFailedExecutionHandler.java
index 7923ae7d..a870ff40 100644
--- a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/ProjectFailedExecutionHandler.java
+++ b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/ProjectFailedExecutionHandler.java
@@ -24,11 +24,10 @@
package org.jenkinsci.plugins.pipeline.maven.eventspy.handler;
-import org.apache.maven.execution.ExecutionEvent;
-import org.jenkinsci.plugins.pipeline.maven.eventspy.reporter.MavenEventReporter;
-
import java.util.Collections;
import java.util.List;
+import org.apache.maven.execution.ExecutionEvent;
+import org.jenkinsci.plugins.pipeline.maven.eventspy.reporter.MavenEventReporter;
/**
* @author Cyrille Le Clerc
diff --git a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/ProjectStartedExecutionHandler.java b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/ProjectStartedExecutionHandler.java
index d440a37b..927588fc 100644
--- a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/ProjectStartedExecutionHandler.java
+++ b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/ProjectStartedExecutionHandler.java
@@ -24,16 +24,14 @@
package org.jenkinsci.plugins.pipeline.maven.eventspy.handler;
+import edu.umd.cs.findbugs.annotations.NonNull;
import java.util.Collections;
import java.util.List;
-
import org.apache.maven.execution.ExecutionEvent;
import org.apache.maven.project.MavenProject;
import org.codehaus.plexus.util.xml.Xpp3Dom;
import org.jenkinsci.plugins.pipeline.maven.eventspy.reporter.MavenEventReporter;
-import edu.umd.cs.findbugs.annotations.NonNull;
-
/**
* @author Cyrille Le Clerc
*/
diff --git a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/ProjectSucceededExecutionHandler.java b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/ProjectSucceededExecutionHandler.java
index be03856c..750a7604 100644
--- a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/ProjectSucceededExecutionHandler.java
+++ b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/ProjectSucceededExecutionHandler.java
@@ -24,6 +24,10 @@
package org.jenkinsci.plugins.pipeline.maven.eventspy.handler;
+import java.io.File;
+import java.io.IOException;
+import java.util.Collections;
+import java.util.List;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.execution.ExecutionEvent;
import org.apache.maven.project.MavenProject;
@@ -31,11 +35,6 @@
import org.jenkinsci.plugins.pipeline.maven.eventspy.RuntimeIOException;
import org.jenkinsci.plugins.pipeline.maven.eventspy.reporter.MavenEventReporter;
-import java.io.File;
-import java.io.IOException;
-import java.util.Collections;
-import java.util.List;
-
/**
* @author Cyrille Le Clerc
*/
@@ -81,7 +80,6 @@ protected void addDetails(ExecutionEvent executionEvent, Xpp3Dom element) {
}
attachedArtifactsElt.addChild(artifactElt);
}
-
}
@Override
diff --git a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/RepositoryEventHandler.java b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/RepositoryEventHandler.java
index c40bb1ec..2aa3c72e 100644
--- a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/RepositoryEventHandler.java
+++ b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/RepositoryEventHandler.java
@@ -65,6 +65,7 @@ protected boolean _handle(RepositoryEvent repositoryEvent) {
}
private void print(RepositoryEvent repositoryEvent, long durationInNanos) {
- reporter.print(repositoryEvent.getArtifact().toString() + "-" + repositoryEvent.getType() + "-" + durationInNanos + "nanos");
+ reporter.print(repositoryEvent.getArtifact().toString() + "-" + repositoryEvent.getType() + "-"
+ + durationInNanos + "nanos");
}
}
diff --git a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/SessionEndedHandler.java b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/SessionEndedHandler.java
index 4e57c148..1529f1c6 100644
--- a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/SessionEndedHandler.java
+++ b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/SessionEndedHandler.java
@@ -24,15 +24,13 @@
package org.jenkinsci.plugins.pipeline.maven.eventspy.handler;
+import edu.umd.cs.findbugs.annotations.NonNull;
+import edu.umd.cs.findbugs.annotations.Nullable;
import java.util.Collections;
import java.util.List;
-
import org.apache.maven.execution.ExecutionEvent;
import org.jenkinsci.plugins.pipeline.maven.eventspy.reporter.MavenEventReporter;
-import edu.umd.cs.findbugs.annotations.NonNull;
-import edu.umd.cs.findbugs.annotations.Nullable;
-
/**
* Don't generate an entry in the report for
* {@link ExecutionEvent.Type#SessionEnded}
diff --git a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/SurefireTestExecutionHandler.java b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/SurefireTestExecutionHandler.java
index 14d9efe2..91aa6773 100644
--- a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/SurefireTestExecutionHandler.java
+++ b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/SurefireTestExecutionHandler.java
@@ -24,15 +24,13 @@
package org.jenkinsci.plugins.pipeline.maven.eventspy.handler;
+import edu.umd.cs.findbugs.annotations.NonNull;
+import edu.umd.cs.findbugs.annotations.Nullable;
import java.util.Arrays;
import java.util.List;
-
import org.apache.maven.execution.ExecutionEvent;
import org.jenkinsci.plugins.pipeline.maven.eventspy.reporter.MavenEventReporter;
-import edu.umd.cs.findbugs.annotations.NonNull;
-import edu.umd.cs.findbugs.annotations.Nullable;
-
/**
* @author Cyrille Le Clerc
*/
diff --git a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/reporter/DevNullMavenEventReporter.java b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/reporter/DevNullMavenEventReporter.java
index aa8467c0..e96fdbf2 100644
--- a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/reporter/DevNullMavenEventReporter.java
+++ b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/reporter/DevNullMavenEventReporter.java
@@ -9,17 +9,11 @@
*/
public class DevNullMavenEventReporter implements MavenEventReporter {
@Override
- public void print(Object message) {
-
- }
+ public void print(Object message) {}
@Override
- public void print(Xpp3Dom element) {
-
- }
+ public void print(Xpp3Dom element) {}
@Override
- public void close() {
-
- }
+ public void close() {}
}
diff --git a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/reporter/FileMavenEventReporter.java b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/reporter/FileMavenEventReporter.java
index e5501d60..6c74962b 100644
--- a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/reporter/FileMavenEventReporter.java
+++ b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/reporter/FileMavenEventReporter.java
@@ -32,7 +32,6 @@
import java.sql.Timestamp;
import java.text.SimpleDateFormat;
import java.util.Date;
-
import org.codehaus.plexus.util.xml.PrettyPrintXMLWriter;
import org.codehaus.plexus.util.xml.XMLWriter;
import org.codehaus.plexus.util.xml.XmlWriterUtil;
@@ -54,6 +53,7 @@ public class FileMavenEventReporter implements MavenEventReporter {
* extension and gets renamed "maven-spy-*.log" at the end of the execution
*/
File outFile;
+
PrintWriter out;
XMLWriter xmlWriter;
/**
diff --git a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/reporter/OutputStreamEventReporter.java b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/reporter/OutputStreamEventReporter.java
index 0cc54f8c..59185c2a 100644
--- a/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/reporter/OutputStreamEventReporter.java
+++ b/pipeline-maven-spy/src/main/java/org/jenkinsci/plugins/pipeline/maven/eventspy/reporter/OutputStreamEventReporter.java
@@ -30,7 +30,6 @@
import java.io.Writer;
import java.nio.charset.Charset;
import java.sql.Timestamp;
-
import org.codehaus.plexus.util.xml.PrettyPrintXMLWriter;
import org.codehaus.plexus.util.xml.XMLWriter;
import org.codehaus.plexus.util.xml.XmlWriterUtil;
@@ -57,7 +56,6 @@ public OutputStreamEventReporter(Writer out) {
}
this.xmlWriter = new PrettyPrintXMLWriter(out);
xmlWriter.startElement("mavenExecution");
-
}
@Override
diff --git a/pipeline-maven-spy/src/test/java/org/jenkinsci/plugins/pipeline/maven/eventspy/JenkinsMavenEventSpyDisablementTest.java b/pipeline-maven-spy/src/test/java/org/jenkinsci/plugins/pipeline/maven/eventspy/JenkinsMavenEventSpyDisablementTest.java
index 424d224e..b3c8106d 100644
--- a/pipeline-maven-spy/src/test/java/org/jenkinsci/plugins/pipeline/maven/eventspy/JenkinsMavenEventSpyDisablementTest.java
+++ b/pipeline-maven-spy/src/test/java/org/jenkinsci/plugins/pipeline/maven/eventspy/JenkinsMavenEventSpyDisablementTest.java
@@ -30,7 +30,6 @@
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
-
import org.apache.maven.eventspy.EventSpy;
import org.apache.maven.execution.DefaultMavenExecutionRequest;
import org.codehaus.plexus.util.xml.Xpp3Dom;
@@ -104,5 +103,4 @@ public Map getData() {
spy.onEvent(request);
spy.close();
}
-
}
diff --git a/pipeline-maven-spy/src/test/java/org/jenkinsci/plugins/pipeline/maven/eventspy/JenkinsMavenEventSpyMTTest.java b/pipeline-maven-spy/src/test/java/org/jenkinsci/plugins/pipeline/maven/eventspy/JenkinsMavenEventSpyMTTest.java
index ac7c1a58..b38c2902 100644
--- a/pipeline-maven-spy/src/test/java/org/jenkinsci/plugins/pipeline/maven/eventspy/JenkinsMavenEventSpyMTTest.java
+++ b/pipeline-maven-spy/src/test/java/org/jenkinsci/plugins/pipeline/maven/eventspy/JenkinsMavenEventSpyMTTest.java
@@ -35,11 +35,9 @@
import java.util.Vector;
import java.util.concurrent.CyclicBarrier;
import java.util.concurrent.atomic.AtomicInteger;
-
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
-
import org.apache.maven.eventspy.EventSpy;
import org.apache.maven.execution.DefaultMavenExecutionRequest;
import org.apache.maven.model.Model;
@@ -77,7 +75,9 @@ public Map getData() {
});
MavenXpp3Reader mavenXpp3Reader = new MavenXpp3Reader();
- InputStream in = Thread.currentThread().getContextClassLoader().getResourceAsStream("org/jenkinsci/plugins/pipeline/maven/eventspy/pom.xml");
+ InputStream in = Thread.currentThread()
+ .getContextClassLoader()
+ .getResourceAsStream("org/jenkinsci/plugins/pipeline/maven/eventspy/pom.xml");
assertThat(in).isNotNull();
Model model = mavenXpp3Reader.read(in);
@@ -92,7 +92,8 @@ public Map getData() {
@Test // Issue JENKINS-46579
public void testMavenExecutionMTSpyReporters() throws Exception {
int numThreads = 100;
- final CyclicBarrier barrier = new CyclicBarrier(numThreads + 1); // we need to also stop the test thread (current)
+ final CyclicBarrier barrier =
+ new CyclicBarrier(numThreads + 1); // we need to also stop the test thread (current)
final AtomicInteger counter = new AtomicInteger(0);
final ExceptionHolder exceptionHolder = new ExceptionHolder();
@@ -103,27 +104,28 @@ public void testMavenExecutionMTSpyReporters() throws Exception {
// persisted state, the rest will read it a couple of times.
for (int i = 0; i < numThreads; i++) {
new Thread(new Runnable() {
- @Override
- public void run() {
- try {
- barrier.await();
- // Thread.sleep(RandomUtils.nextInt(0, 500));
- JenkinsMavenEventSpy spy = createSpy();
- spyList.add(spy);
- DefaultMavenExecutionRequest request = new DefaultMavenExecutionRequest();
- request.setPom(new File("path/to/pom.xml"));
- request.setGoals(Arrays.asList("clean", "source:jar", "deploy"));
-
- for (int i = 0; i < 100; i++) {
- spy.onEvent(request);
+ @Override
+ public void run() {
+ try {
+ barrier.await();
+ // Thread.sleep(RandomUtils.nextInt(0, 500));
+ JenkinsMavenEventSpy spy = createSpy();
+ spyList.add(spy);
+ DefaultMavenExecutionRequest request = new DefaultMavenExecutionRequest();
+ request.setPom(new File("path/to/pom.xml"));
+ request.setGoals(Arrays.asList("clean", "source:jar", "deploy"));
+
+ for (int i = 0; i < 100; i++) {
+ spy.onEvent(request);
+ }
+
+ } catch (Exception e) {
+ exceptionHolder.e = e;
+ }
+ counter.incrementAndGet();
}
-
- } catch (Exception e) {
- exceptionHolder.e = e;
- }
- counter.incrementAndGet();
- }
- }).start();
+ })
+ .start();
}
barrier.await();
@@ -164,7 +166,8 @@ public void run() {
@Test // Issue JENKINS-46579
public void testMavenExecutionMTRequestsSingleSpyReporter() throws Exception {
int numThreads = 100;
- final CyclicBarrier barrier = new CyclicBarrier(numThreads + 1); // we need to also stop the test thread (current)
+ final CyclicBarrier barrier =
+ new CyclicBarrier(numThreads + 1); // we need to also stop the test thread (current)
final AtomicInteger counter = new AtomicInteger(0);
final ExceptionHolder exceptionHolder = new ExceptionHolder();
@@ -175,25 +178,26 @@ public void testMavenExecutionMTRequestsSingleSpyReporter() throws Exception {
// persisted state, the rest will read it a couple of times.
for (int i = 0; i < numThreads; i++) {
new Thread(new Runnable() {
- @Override
- public void run() {
- try {
- barrier.await();
- // Thread.sleep(RandomUtils.nextInt(0, 500));
-
- DefaultMavenExecutionRequest request = new DefaultMavenExecutionRequest();
- request.setPom(new File("path/to/pom.xml"));
- request.setGoals(Arrays.asList("clean", "source:jar", "deploy"));
- for (int i = 0; i < 100; i++) {
- spy.onEvent(request);
+ @Override
+ public void run() {
+ try {
+ barrier.await();
+ // Thread.sleep(RandomUtils.nextInt(0, 500));
+
+ DefaultMavenExecutionRequest request = new DefaultMavenExecutionRequest();
+ request.setPom(new File("path/to/pom.xml"));
+ request.setGoals(Arrays.asList("clean", "source:jar", "deploy"));
+ for (int i = 0; i < 100; i++) {
+ spy.onEvent(request);
+ }
+
+ } catch (Exception e) {
+ exceptionHolder.e = e;
+ }
+ counter.incrementAndGet();
}
-
- } catch (Exception e) {
- exceptionHolder.e = e;
- }
- counter.incrementAndGet();
- }
- }).start();
+ })
+ .start();
}
barrier.await();
@@ -246,7 +250,6 @@ public void validateXMLDocument(File document) {
e.printStackTrace();
fail("Failed to parse spylog: " + document + " error:" + e);
}
-
}
public static class ExceptionHolder {
diff --git a/pipeline-maven-spy/src/test/java/org/jenkinsci/plugins/pipeline/maven/eventspy/JenkinsMavenEventSpyTest.java b/pipeline-maven-spy/src/test/java/org/jenkinsci/plugins/pipeline/maven/eventspy/JenkinsMavenEventSpyTest.java
index 3244152e..246c5909 100644
--- a/pipeline-maven-spy/src/test/java/org/jenkinsci/plugins/pipeline/maven/eventspy/JenkinsMavenEventSpyTest.java
+++ b/pipeline-maven-spy/src/test/java/org/jenkinsci/plugins/pipeline/maven/eventspy/JenkinsMavenEventSpyTest.java
@@ -32,7 +32,6 @@
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
-
import org.apache.maven.eventspy.EventSpy;
import org.apache.maven.execution.DefaultMavenExecutionRequest;
import org.apache.maven.execution.ExecutionEvent;
@@ -74,7 +73,9 @@ public Map getData() {
});
MavenXpp3Reader mavenXpp3Reader = new MavenXpp3Reader();
- InputStream in = Thread.currentThread().getContextClassLoader().getResourceAsStream("org/jenkinsci/plugins/pipeline/maven/eventspy/pom.xml");
+ InputStream in = Thread.currentThread()
+ .getContextClassLoader()
+ .getResourceAsStream("org/jenkinsci/plugins/pipeline/maven/eventspy/pom.xml");
assertThat(in).isNotNull();
Model model = mavenXpp3Reader.read(in);
diff --git a/pipeline-maven-spy/src/test/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/AbstractMavenEventHandlerTest.java b/pipeline-maven-spy/src/test/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/AbstractMavenEventHandlerTest.java
index f518ab96..2c8cce3b 100644
--- a/pipeline-maven-spy/src/test/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/AbstractMavenEventHandlerTest.java
+++ b/pipeline-maven-spy/src/test/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/AbstractMavenEventHandlerTest.java
@@ -4,7 +4,6 @@
import java.io.IOException;
import java.io.InputStream;
-
import org.apache.maven.model.Model;
import org.apache.maven.model.io.xpp3.MavenXpp3Reader;
import org.apache.maven.project.MavenProject;
@@ -19,13 +18,15 @@ public class AbstractMavenEventHandlerTest {
@Test
public void test_getMavenFlattenPluginFlattenedPomFilename_nameDefinedAtTheExecutionLevel() throws Exception {
- test_getMavenFlattenPluginFlattenedPomFilename("org/jenkinsci/plugins/pipeline/maven/eventspy/pom-flatten-plugin-flattenedPomFilename.xml",
+ test_getMavenFlattenPluginFlattenedPomFilename(
+ "org/jenkinsci/plugins/pipeline/maven/eventspy/pom-flatten-plugin-flattenedPomFilename.xml",
"${project.artifactId}-${project.version}.pom");
}
@Test
public void test_getMavenFlattenPluginFlattenedPomFilename_nameDefinedAtThePluginLevel() throws Exception {
- test_getMavenFlattenPluginFlattenedPomFilename("org/jenkinsci/plugins/pipeline/maven/eventspy/pom-flatten-plugin-flattenedPomFilename2.xml",
+ test_getMavenFlattenPluginFlattenedPomFilename(
+ "org/jenkinsci/plugins/pipeline/maven/eventspy/pom-flatten-plugin-flattenedPomFilename2.xml",
"${project.artifactId}-${project.version}.flatten-pom");
}
@@ -34,17 +35,19 @@ public void test_getMavenFlattenPluginFlattenedPomFilename_nameNotDefined() thro
test_getMavenFlattenPluginFlattenedPomFilename("org/jenkinsci/plugins/pipeline/maven/eventspy/pom.xml", null);
}
- protected void test_getMavenFlattenPluginFlattenedPomFilename(String pomFile, String expected) throws IOException, XmlPullParserException {
+ protected void test_getMavenFlattenPluginFlattenedPomFilename(String pomFile, String expected)
+ throws IOException, XmlPullParserException {
InputStream in = Thread.currentThread().getContextClassLoader().getResourceAsStream(pomFile);
Model mavenProjectModel = new MavenXpp3Reader().read(in);
MavenProject mavenProject = new MavenProject(mavenProjectModel);
- AbstractMavenEventHandler mavenEventHandler = new AbstractMavenEventHandler(new OutputStreamEventReporter(System.err)) {
- @Override
- protected boolean _handle(Object o) {
- return false;
- }
- };
+ AbstractMavenEventHandler mavenEventHandler =
+ new AbstractMavenEventHandler(new OutputStreamEventReporter(System.err)) {
+ @Override
+ protected boolean _handle(Object o) {
+ return false;
+ }
+ };
String actual = mavenEventHandler.getMavenFlattenPluginFlattenedPomFilename(mavenProject);
// this unit test does not expand Maven variables
assertThat(actual).isEqualTo(expected);
diff --git a/pipeline-maven-spy/src/test/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/DeployDeployFileExecutionHandlerTest.java b/pipeline-maven-spy/src/test/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/DeployDeployFileExecutionHandlerTest.java
index 068c6839..ee7449ba 100644
--- a/pipeline-maven-spy/src/test/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/DeployDeployFileExecutionHandlerTest.java
+++ b/pipeline-maven-spy/src/test/java/org/jenkinsci/plugins/pipeline/maven/eventspy/handler/DeployDeployFileExecutionHandlerTest.java
@@ -5,7 +5,6 @@
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
-
import org.apache.maven.execution.ExecutionEvent;
import org.apache.maven.execution.MavenSession;
import org.apache.maven.model.Model;
diff --git a/pipeline-maven/pom.xml b/pipeline-maven/pom.xml
index 4b238e81..86b44a11 100644
--- a/pipeline-maven/pom.xml
+++ b/pipeline-maven/pom.xml
@@ -40,48 +40,11 @@
https://github.com/jenkinsci/pipeline-maven-plugin/
- https://github.com/jenkinsci/pipeline-maven-plugin/releases
1333
+ https://github.com/jenkinsci/pipeline-maven-plugin/releases
-
- org.jenkins-ci.plugins
- pipeline-maven-api
-
-
-
-
- org.jenkins-ci.plugins
- pipeline-maven-database
- ${project.version}
- test
-
-
- io.jenkins.plugins
- h2-api
- ${jenkins-plugin-h2.version}
- test
-
-
- io.jenkins.plugins
- mysql-api
- ${jenkins-plugin-mysql.version}
- test
-
-
- com.google.protobuf
- protobuf-java
-
-
-
-
- io.jenkins.plugins
- postgresql-api
- ${jenkins-plugin-postgresql.version}
- test
-
org.jenkins-ci.main
maven-plugin
@@ -180,7 +143,6 @@
org.jenkins-ci.plugins
jacoco
- ${jenkins-plugin-jacoco.version}
true
@@ -258,6 +220,10 @@
pipeline-build-step
true
+
+ org.jenkins-ci.plugins
+ pipeline-maven-api
+
org.jenkins-ci.plugins
script-security
@@ -292,14 +258,14 @@
org.jvnet.hudson.plugins.findbugs
library
-
- xml-apis
- xml-apis
-
xerces
xercesImpl
+
+ xml-apis
+ xml-apis
+
@@ -313,6 +279,13 @@
slf4j-simple
+
+ ${project.groupId}
+ pipeline-maven-spy
+ ${project.version}
+ provided
+
+
io.jenkins
configuration-as-code
@@ -323,6 +296,30 @@
test-harness
test
+
+ io.jenkins.plugins
+ h2-api
+ ${jenkins-plugin-h2.version}
+ test
+
+
+ io.jenkins.plugins
+ mysql-api
+ ${jenkins-plugin-mysql.version}
+ test
+
+
+ com.google.protobuf
+ protobuf-java
+
+
+
+
+ io.jenkins.plugins
+ postgresql-api
+ ${jenkins-plugin-postgresql.version}
+ test
+
org.jenkins-ci.plugins
git
@@ -347,6 +344,14 @@
tests
test
+
+
+ org.jenkins-ci.plugins
+ pipeline-maven-database
+ ${project.version}
+ test
+
org.jenkins-ci.plugins
pipeline-stage-step
@@ -503,9 +508,12 @@
com.spotify
dockerfile-maven-plugin
+
+ false
+
- build-sshd-image
+ build-first-sshd-image
build
@@ -560,9 +568,6 @@
-
- false
-
org.codehaus.mojo
diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/GlobalPipelineMavenConfig.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/GlobalPipelineMavenConfig.java
index a3ebed0c..a6d6aa37 100644
--- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/GlobalPipelineMavenConfig.java
+++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/GlobalPipelineMavenConfig.java
@@ -28,6 +28,9 @@
import com.cloudbees.plugins.credentials.CredentialsProvider;
import com.cloudbees.plugins.credentials.common.StandardListBoxModel;
import com.cloudbees.plugins.credentials.common.UsernamePasswordCredentials;
+import edu.umd.cs.findbugs.annotations.CheckForNull;
+import edu.umd.cs.findbugs.annotations.NonNull;
+import edu.umd.cs.findbugs.annotations.Nullable;
import hudson.Extension;
import hudson.ExtensionList;
import hudson.init.Terminator;
@@ -35,6 +38,15 @@
import hudson.security.ACL;
import hudson.util.FormValidation;
import hudson.util.ListBoxModel;
+import java.io.IOException;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Objects;
+import java.util.Optional;
+import java.util.Set;
+import java.util.logging.Level;
+import java.util.logging.Logger;
import jenkins.model.GlobalConfiguration;
import jenkins.model.GlobalConfigurationCategory;
import jenkins.model.Jenkins;
@@ -51,19 +63,6 @@
import org.kohsuke.stapler.StaplerRequest;
import org.kohsuke.stapler.verb.POST;
-import edu.umd.cs.findbugs.annotations.CheckForNull;
-import edu.umd.cs.findbugs.annotations.NonNull;
-import edu.umd.cs.findbugs.annotations.Nullable;
-import java.io.IOException;
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Objects;
-import java.util.Optional;
-import java.util.Set;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
/**
* @author Cyrille Le Clerc
*/
@@ -71,7 +70,7 @@
@Symbol("pipelineMaven")
public class GlobalPipelineMavenConfig extends GlobalConfiguration {
- private final static Logger LOGGER = Logger.getLogger(GlobalPipelineMavenConfig.class.getName());
+ private static final Logger LOGGER = Logger.getLogger(GlobalPipelineMavenConfig.class.getName());
private transient volatile PipelineMavenPluginDao dao;
@@ -110,10 +109,10 @@ public void setDaoClass(String daoClass) {
}
private Optional findDaoFromExtension(String daoClass) {
- return ExtensionList.lookup(PipelineMavenPluginDao.class)
- .stream()
- .filter(pipelineMavenPluginDao -> StringUtils.equals(pipelineMavenPluginDao.getClass().getName(), daoClass))
- .findFirst();
+ return ExtensionList.lookup(PipelineMavenPluginDao.class).stream()
+ .filter(pipelineMavenPluginDao ->
+ StringUtils.equals(pipelineMavenPluginDao.getClass().getName(), daoClass))
+ .findFirst();
}
@Override
@@ -227,7 +226,7 @@ public synchronized void setJdbcCredentialsId(String jdbcCredentialsId) {
@Override
public boolean configure(StaplerRequest req, JSONObject json) throws FormException {
- if(!StringUtils.equals(json.getString("daoClass"), daoClass)) {
+ if (!StringUtils.equals(json.getString("daoClass"), daoClass)) {
closeDatasource();
this.dao = null;
}
@@ -279,16 +278,11 @@ public synchronized PipelineTriggerService getPipelineTriggerService() {
@NonNull
public Set getTriggerDownstreamBuildsResultsCriteria() {
Set result = new HashSet<>(5);
- if (this.triggerDownstreamUponResultSuccess)
- result.add(Result.SUCCESS);
- if (this.triggerDownstreamUponResultUnstable)
- result.add(Result.UNSTABLE);
- if (this.triggerDownstreamUponResultAborted)
- result.add(Result.ABORTED);
- if (this.triggerDownstreamUponResultNotBuilt)
- result.add(Result.NOT_BUILT);
- if (this.triggerDownstreamUponResultFailure)
- result.add(Result.FAILURE);
+ if (this.triggerDownstreamUponResultSuccess) result.add(Result.SUCCESS);
+ if (this.triggerDownstreamUponResultUnstable) result.add(Result.UNSTABLE);
+ if (this.triggerDownstreamUponResultAborted) result.add(Result.ABORTED);
+ if (this.triggerDownstreamUponResultNotBuilt) result.add(Result.NOT_BUILT);
+ if (this.triggerDownstreamUponResultFailure) result.add(Result.FAILURE);
return result;
}
@@ -306,21 +300,19 @@ public ListBoxModel doFillJdbcCredentialsIdItems() {
.includeEmptyValue()
.withMatching(
CredentialsMatchers.always(),
- CredentialsProvider.lookupCredentials(UsernamePasswordCredentials.class,
- Jenkins.get(),
- ACL.SYSTEM,
- Collections.EMPTY_LIST));
+ CredentialsProvider.lookupCredentials(
+ UsernamePasswordCredentials.class, Jenkins.get(), ACL.SYSTEM, Collections.EMPTY_LIST));
}
@POST
public FormValidation doValidateJdbcConnection(
- @QueryParameter String jdbcUrl,
- @QueryParameter String properties,
- @QueryParameter String jdbcCredentialsId,
- @QueryParameter String daoClass) {
+ @QueryParameter String jdbcUrl,
+ @QueryParameter String properties,
+ @QueryParameter String jdbcCredentialsId,
+ @QueryParameter String daoClass) {
Jenkins.get().checkPermission(Jenkins.ADMINISTER);
Optional optionalPipelineMavenPluginDao = findDaoFromExtension(daoClass);
- if(optionalPipelineMavenPluginDao.isEmpty()) {
+ if (optionalPipelineMavenPluginDao.isEmpty()) {
return FormValidation.ok("OK");
}
diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/MavenConfigFolderOverrideProperty.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/MavenConfigFolderOverrideProperty.java
index c793170a..1b51a189 100644
--- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/MavenConfigFolderOverrideProperty.java
+++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/MavenConfigFolderOverrideProperty.java
@@ -3,14 +3,13 @@
import com.cloudbees.hudson.plugins.folder.AbstractFolder;
import com.cloudbees.hudson.plugins.folder.AbstractFolderProperty;
import com.cloudbees.hudson.plugins.folder.AbstractFolderPropertyDescriptor;
+import edu.umd.cs.findbugs.annotations.NonNull;
import hudson.Extension;
import jenkins.mvn.GlobalSettingsProvider;
import jenkins.mvn.SettingsProvider;
import org.kohsuke.stapler.DataBoundConstructor;
import org.kohsuke.stapler.DataBoundSetter;
-import edu.umd.cs.findbugs.annotations.NonNull;
-
/**
* Provides a way to override maven configuration at a folder level
*/
@@ -37,8 +36,7 @@ public class MavenConfigFolderOverrideProperty extends AbstractFolderProperty publishersExceptions) {
- super(publishersExceptions.size() + " exceptions occured within the publishers of the withMaven pipeline step:\n"
- + publishersExceptions.stream().map(e -> {
- StringBuilder builder = new StringBuilder("- ");
- builder.append(e.getMessage());
- if (e.getCause() != null) {
- builder.append(": ").append(e.getCause().getMessage());
- }
- return builder.toString();
- }).collect(joining()));
+ super(publishersExceptions.size()
+ + " exceptions occured within the publishers of the withMaven pipeline step:\n"
+ + publishersExceptions.stream()
+ .map(e -> {
+ StringBuilder builder = new StringBuilder("- ");
+ builder.append(e.getMessage());
+ if (e.getCause() != null) {
+ builder.append(": ").append(e.getCause().getMessage());
+ }
+ return builder.toString();
+ })
+ .collect(joining()));
}
}
diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/MavenPublisher.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/MavenPublisher.java
index b0f86207..966046f2 100644
--- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/MavenPublisher.java
+++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/MavenPublisher.java
@@ -1,26 +1,26 @@
package org.jenkinsci.plugins.pipeline.maven;
+import edu.umd.cs.findbugs.annotations.NonNull;
+import edu.umd.cs.findbugs.annotations.Nullable;
import hudson.ExtensionPoint;
import hudson.model.AbstractDescribableImpl;
import hudson.model.Descriptor;
-import org.jenkinsci.plugins.workflow.steps.StepContext;
-import org.kohsuke.stapler.DataBoundSetter;
-import org.w3c.dom.Element;
-
-import edu.umd.cs.findbugs.annotations.NonNull;
-import edu.umd.cs.findbugs.annotations.Nullable;
import java.io.IOException;
import java.io.Serializable;
import java.util.logging.Logger;
+import org.jenkinsci.plugins.workflow.steps.StepContext;
+import org.kohsuke.stapler.DataBoundSetter;
+import org.w3c.dom.Element;
/**
* Experimental interface, likely to change in the future.
*
* @author Cyrille Le Clerc
*/
-public abstract class MavenPublisher extends AbstractDescribableImpl implements ExtensionPoint, Comparable, Serializable {
+public abstract class MavenPublisher extends AbstractDescribableImpl
+ implements ExtensionPoint, Comparable, Serializable {
- private final static Logger LOGGER = Logger.getLogger(MavenPublisher.class.getName());
+ private static final Logger LOGGER = Logger.getLogger(MavenPublisher.class.getName());
private boolean disabled;
@@ -39,7 +39,8 @@ public void setDisabled(boolean disabled) {
* @throws IOException
* @throws InterruptedException
*/
- public abstract void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsElt) throws IOException, InterruptedException;
+ public abstract void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsElt)
+ throws IOException, InterruptedException;
@Override
public DescriptorImpl getDescriptor() {
@@ -53,12 +54,11 @@ public int compareTo(MavenPublisher o) {
@Override
public String toString() {
- return getClass().getName() + "[" +
- "disabled=" + disabled +
- ']';
+ return getClass().getName() + "[" + "disabled=" + disabled + ']';
}
- public static abstract class DescriptorImpl extends Descriptor implements Comparable {
+ public abstract static class DescriptorImpl extends Descriptor
+ implements Comparable {
/**
* @return the ordinal of this reporter to execute publishers in predictable order. The smallest ordinal is executed first.
* @see #compareTo(MavenPublisher)
@@ -73,8 +73,7 @@ public int ordinal() {
* @return name of the marker file. {@code null} if no marker file is defined for this reporter
*/
@Nullable
- abstract public String getSkipFileName();
-
+ public abstract String getSkipFileName();
@Override
public int compareTo(DescriptorImpl o) {
@@ -87,6 +86,4 @@ public int compareTo(DescriptorImpl o) {
return compare;
}
}
-
-
}
diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/MavenPublisherStrategy.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/MavenPublisherStrategy.java
index f7a7f1c0..be98c58d 100644
--- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/MavenPublisherStrategy.java
+++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/MavenPublisherStrategy.java
@@ -1,12 +1,10 @@
package org.jenkinsci.plugins.pipeline.maven;
+import edu.umd.cs.findbugs.annotations.NonNull;
+import edu.umd.cs.findbugs.annotations.Nullable;
import hudson.DescriptorExtensionList;
import hudson.model.Descriptor;
import hudson.model.TaskListener;
-import jenkins.model.Jenkins;
-
-import edu.umd.cs.findbugs.annotations.NonNull;
-import edu.umd.cs.findbugs.annotations.Nullable;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.Collections;
@@ -15,12 +13,12 @@
import java.util.Map;
import java.util.logging.Level;
import java.util.logging.Logger;
+import jenkins.model.Jenkins;
/**
* @author Cyrille Le Clerc
*/
public enum MavenPublisherStrategy {
-
IMPLICIT(Messages.publisher_strategy_implicit_description()) {
/**
* Build the list of {@link MavenPublisher}s that should be invoked for the build execution of the given {@link TaskListener}
@@ -38,7 +36,8 @@ public enum MavenPublisherStrategy {
* @param listener
*/
@NonNull
- public List buildPublishersList(@NonNull List configuredPublishers, @NonNull TaskListener listener) {
+ public List buildPublishersList(
+ @NonNull List configuredPublishers, @NonNull TaskListener listener) {
// configuration passed as parameter of "withMaven(options=[...]){}"
// mavenPublisher.descriptor.id -> mavenPublisher
@@ -55,23 +54,28 @@ public List buildPublishersList(@NonNull List co
Map globallyConfiguredPublishersById = new HashMap<>();
GlobalPipelineMavenConfig globalPipelineMavenConfig = GlobalPipelineMavenConfig.get();
- List globallyConfiguredPublishers = globalPipelineMavenConfig == null ? Collections.emptyList() : globalPipelineMavenConfig.getPublisherOptions();
+ List globallyConfiguredPublishers = globalPipelineMavenConfig == null
+ ? Collections.emptyList()
+ : globalPipelineMavenConfig.getPublisherOptions();
if (globallyConfiguredPublishers == null) {
globallyConfiguredPublishers = Collections.emptyList();
}
for (MavenPublisher mavenPublisher : globallyConfiguredPublishers) {
- globallyConfiguredPublishersById.put(mavenPublisher.getDescriptor().getId(), mavenPublisher);
+ globallyConfiguredPublishersById.put(
+ mavenPublisher.getDescriptor().getId(), mavenPublisher);
}
-
// mavenPublisher.descriptor.id -> mavenPublisher
Map defaultPublishersById = new HashMap<>();
- DescriptorExtensionList> descriptorList = Jenkins.get().getDescriptorList(MavenPublisher.class);
+ DescriptorExtensionList> descriptorList =
+ Jenkins.get().getDescriptorList(MavenPublisher.class);
for (Descriptor descriptor : descriptorList) {
try {
defaultPublishersById.put(descriptor.getId(), descriptor.clazz.newInstance());
} catch (InstantiationException | IllegalAccessException e) {
- PrintWriter error = listener.error("[withMaven] Exception instantiation default config for Maven Publisher '" + descriptor.getDisplayName() + "' / " + descriptor.getId() + ": " + e);
+ PrintWriter error =
+ listener.error("[withMaven] Exception instantiation default config for Maven Publisher '"
+ + descriptor.getDisplayName() + "' / " + descriptor.getId() + ": " + e);
e.printStackTrace(error);
error.close();
LOGGER.log(Level.WARNING, "Exception instantiating " + descriptor.clazz + ": " + e, e);
@@ -79,11 +83,16 @@ public List buildPublishersList(@NonNull List co
}
}
-
if (LOGGER.isLoggable(Level.FINE)) {
- listener.getLogger().println("[withMaven] Maven Publishers with configuration provided by the pipeline: " + configuredPublishersById.values());
- listener.getLogger().println("[withMaven] Maven Publishers with configuration defined globally: " + globallyConfiguredPublishersById.values());
- listener.getLogger().println("[withMaven] Maven Publishers with default configuration: " + defaultPublishersById.values());
+ listener.getLogger()
+ .println("[withMaven] Maven Publishers with configuration provided by the pipeline: "
+ + configuredPublishersById.values());
+ listener.getLogger()
+ .println("[withMaven] Maven Publishers with configuration defined globally: "
+ + globallyConfiguredPublishersById.values());
+ listener.getLogger()
+ .println("[withMaven] Maven Publishers with default configuration: "
+ + defaultPublishersById.values());
}
// TODO FILTER
@@ -105,12 +114,12 @@ public List buildPublishersList(@NonNull List co
EXPLICIT(Messages.publisher_strategy_explicit_description()) {
@NonNull
@Override
- public List buildPublishersList
- (@NonNull List configuredPublishers, @NonNull TaskListener listener) {
+ public List buildPublishersList(
+ @NonNull List configuredPublishers, @NonNull TaskListener listener) {
// filter null entries caused by missing plugins
List result = new ArrayList<>();
- for(MavenPublisher publisher: configuredPublishers) {
+ for (MavenPublisher publisher : configuredPublishers) {
if (publisher != null) {
result.add(publisher);
}
@@ -128,7 +137,11 @@ public List buildPublishersList(@NonNull List co
this.description = description;
}
- public MavenPublisher buildConfiguredMavenPublisher(@Nullable MavenPublisher pipelinePublisher, @Nullable MavenPublisher globallyConfiguredPublisher, @NonNull MavenPublisher defaultPublisher, @NonNull TaskListener listener) {
+ public MavenPublisher buildConfiguredMavenPublisher(
+ @Nullable MavenPublisher pipelinePublisher,
+ @Nullable MavenPublisher globallyConfiguredPublisher,
+ @NonNull MavenPublisher defaultPublisher,
+ @NonNull TaskListener listener) {
MavenPublisher result;
String logMessage;
@@ -142,32 +155,37 @@ public MavenPublisher buildConfiguredMavenPublisher(@Nullable MavenPublisher pip
} else if (pipelinePublisher != null && globallyConfiguredPublisher == null) {
result = pipelinePublisher;
logMessage = "pipeline";
- } else if (pipelinePublisher != null && globallyConfiguredPublisher != null) {
+ } else if (pipelinePublisher != null && globallyConfiguredPublisher != null) {
// workaround FindBugs "Bug kind and pattern: NP - NP_NULL_ON_SOME_PATH"
// check pipelinePublisher and globallyConfiguredPublisher are non null even if it is useless
result = pipelinePublisher;
logMessage = "pipeline";
- listener.getLogger().println("[withMaven] WARNING merging publisher configuration defined in the 'Global Tool Configuration' and at the pipeline level is not yet supported." +
- " Use pipeline level configuration for '" + result.getDescriptor().getDisplayName() + "'");
-//
-// PropertyDescriptor[] propertyDescriptors = PropertyUtils.getPropertyDescriptors(defaultPublisher);
-// for(PropertyDescriptor propertyDescriptor: propertyDescriptors) {
-// Method readMethod = propertyDescriptor.getReadMethod();
-// Method writeMethod = propertyDescriptor.getWriteMethod();
-//
-// Object defaultValue = readMethod.invoke(defaultPublisher);
-// Object globallyDefinedValue = readMethod.invoke(globallyConfiguredPublisher);
-// Object pipelineValue = readMethod.invoke(pipelinePublisher);
-// }
+ listener.getLogger()
+ .println(
+ "[withMaven] WARNING merging publisher configuration defined in the 'Global Tool Configuration' and at the pipeline level is not yet supported."
+ + " Use pipeline level configuration for '"
+ + result.getDescriptor().getDisplayName() + "'");
+ //
+ // PropertyDescriptor[] propertyDescriptors =
+ // PropertyUtils.getPropertyDescriptors(defaultPublisher);
+ // for(PropertyDescriptor propertyDescriptor: propertyDescriptors) {
+ // Method readMethod = propertyDescriptor.getReadMethod();
+ // Method writeMethod = propertyDescriptor.getWriteMethod();
+ //
+ // Object defaultValue = readMethod.invoke(defaultPublisher);
+ // Object globallyDefinedValue = readMethod.invoke(globallyConfiguredPublisher);
+ // Object pipelineValue = readMethod.invoke(pipelinePublisher);
+ // }
} else {
throw new IllegalStateException("Should not happen, workaround for Findbugs NP_NULL_ON_SOME_PATH above");
}
if (LOGGER.isLoggable(Level.FINE))
- listener.getLogger().println("[withMaven] Use " + logMessage + " defined publisher for '" + result.getDescriptor().getDisplayName() + "'");
+ listener.getLogger()
+ .println("[withMaven] Use " + logMessage + " defined publisher for '"
+ + result.getDescriptor().getDisplayName() + "'");
return result;
-
}
public String getDescription() {
@@ -190,7 +208,8 @@ public String getDescription() {
* @param listener
*/
@NonNull
- public abstract List buildPublishersList(@NonNull List configuredPublishers, @NonNull TaskListener listener);
+ public abstract List buildPublishersList(
+ @NonNull List configuredPublishers, @NonNull TaskListener listener);
- private final static Logger LOGGER = Logger.getLogger(MavenPublisherStrategy.class.getName());
+ private static final Logger LOGGER = Logger.getLogger(MavenPublisherStrategy.class.getName());
}
diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/MavenSpyLogProcessor.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/MavenSpyLogProcessor.java
index c8fa4c6c..0bc7ac36 100644
--- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/MavenSpyLogProcessor.java
+++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/MavenSpyLogProcessor.java
@@ -24,23 +24,10 @@
package org.jenkinsci.plugins.pipeline.maven;
+import edu.umd.cs.findbugs.annotations.NonNull;
import hudson.FilePath;
import hudson.model.Run;
import hudson.model.TaskListener;
-import jenkins.model.InterruptedBuildAction;
-import org.apache.commons.lang.StringUtils;
-import org.jenkinsci.plugins.pipeline.maven.publishers.JenkinsMavenEventSpyLogsPublisher;
-import org.jenkinsci.plugins.pipeline.maven.publishers.MavenPipelinePublisherException;
-import org.jenkinsci.plugins.pipeline.maven.util.XmlUtils;
-import org.jenkinsci.plugins.workflow.steps.StepContext;
-import org.w3c.dom.Element;
-import org.xml.sax.SAXException;
-
-import edu.umd.cs.findbugs.annotations.NonNull;
-import javax.xml.XMLConstants;
-import javax.xml.parsers.DocumentBuilder;
-import javax.xml.parsers.DocumentBuilderFactory;
-import javax.xml.parsers.ParserConfigurationException;
import java.io.IOException;
import java.io.InputStream;
import java.io.PrintWriter;
@@ -53,6 +40,18 @@
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.stream.Collectors;
+import javax.xml.XMLConstants;
+import javax.xml.parsers.DocumentBuilder;
+import javax.xml.parsers.DocumentBuilderFactory;
+import javax.xml.parsers.ParserConfigurationException;
+import jenkins.model.InterruptedBuildAction;
+import org.apache.commons.lang.StringUtils;
+import org.jenkinsci.plugins.pipeline.maven.publishers.JenkinsMavenEventSpyLogsPublisher;
+import org.jenkinsci.plugins.pipeline.maven.publishers.MavenPipelinePublisherException;
+import org.jenkinsci.plugins.pipeline.maven.util.XmlUtils;
+import org.jenkinsci.plugins.workflow.steps.StepContext;
+import org.w3c.dom.Element;
+import org.xml.sax.SAXException;
/**
* @author Cyrille Le Clerc
@@ -63,13 +62,19 @@ public class MavenSpyLogProcessor implements Serializable {
private static final Logger LOGGER = Logger.getLogger(MavenSpyLogProcessor.class.getName());
- public void processMavenSpyLogs(@NonNull StepContext context, @NonNull FilePath mavenSpyLogFolder, @NonNull List options,
- @NonNull MavenPublisherStrategy publisherStrategy) throws IOException, InterruptedException {
+ public void processMavenSpyLogs(
+ @NonNull StepContext context,
+ @NonNull FilePath mavenSpyLogFolder,
+ @NonNull List options,
+ @NonNull MavenPublisherStrategy publisherStrategy)
+ throws IOException, InterruptedException {
long nanosBefore = System.nanoTime();
FilePath[] mavenSpyLogsList = mavenSpyLogFolder.list("maven-spy-*.log");
- LOGGER.log(Level.FINE, "Found {0} maven execution reports in {1}", new Object[]{mavenSpyLogsList.length, mavenSpyLogFolder});
+ LOGGER.log(Level.FINE, "Found {0} maven execution reports in {1}", new Object[] {
+ mavenSpyLogsList.length, mavenSpyLogFolder
+ });
TaskListener listener = context.get(TaskListener.class);
FilePath workspace = context.get(FilePath.class);
@@ -106,7 +111,8 @@ public void processMavenSpyLogs(@NonNull StepContext context, @NonNull FilePath
documentBuilder = dbf.newDocumentBuilder();
- // See https://github.com/jenkinsci/jenkins/blob/jenkins-2.176/core/src/main/java/jenkins/util/xml/XMLUtils.java#L114
+ // See
+ // https://github.com/jenkinsci/jenkins/blob/jenkins-2.176/core/src/main/java/jenkins/util/xml/XMLUtils.java#L114
documentBuilder.setEntityResolver(XmlUtils.RestrictiveEntityResolver.INSTANCE);
} catch (ParserConfigurationException e) {
throw new IllegalStateException("Failure to create a DocumentBuilder", e);
@@ -129,10 +135,12 @@ public void processMavenSpyLogs(@NonNull StepContext context, @NonNull FilePath
new JenkinsMavenEventSpyLogsPublisher().process(context, mavenSpyLogs);
}
- Element mavenSpyLogsElt = documentBuilder.parse(mavenSpyLogsInputStream).getDocumentElement();
+ Element mavenSpyLogsElt =
+ documentBuilder.parse(mavenSpyLogsInputStream).getDocumentElement();
- if (LOGGER.isLoggable(Level.FINE)){
- listener.getLogger().println("[withMaven] Maven Publisher Strategy: " + publisherStrategy.getDescription());
+ if (LOGGER.isLoggable(Level.FINE)) {
+ listener.getLogger()
+ .println("[withMaven] Maven Publisher Strategy: " + publisherStrategy.getDescription());
}
List mavenPublishers = publisherStrategy.buildPublishersList(options, listener);
List exceptions = new ArrayList<>();
@@ -140,34 +148,52 @@ public void processMavenSpyLogs(@NonNull StepContext context, @NonNull FilePath
String skipFileName = mavenPublisher.getDescriptor().getSkipFileName();
if (Boolean.TRUE.equals(mavenPublisher.isDisabled())) {
if (LOGGER.isLoggable(Level.FINE)) {
- listener.getLogger().println("[withMaven] Skip '" + mavenPublisher.getDescriptor().getDisplayName() + "' disabled by configuration");
+ listener.getLogger()
+ .println("[withMaven] Skip '"
+ + mavenPublisher.getDescriptor().getDisplayName()
+ + "' disabled by configuration");
}
- } else if (StringUtils.isNotEmpty(skipFileName) && workspace.child(skipFileName).exists()) {
+ } else if (StringUtils.isNotEmpty(skipFileName)
+ && workspace.child(skipFileName).exists()) {
if (LOGGER.isLoggable(Level.FINE)) {
- listener.getLogger().println("[withMaven] Skip '" + mavenPublisher.getDescriptor().getDisplayName() + "' disabled by marker file '" + skipFileName + "'");
+ listener.getLogger()
+ .println("[withMaven] Skip '"
+ + mavenPublisher.getDescriptor().getDisplayName()
+ + "' disabled by marker file '" + skipFileName + "'");
}
} else {
long nanosBeforePublisher = System.nanoTime();
if (LOGGER.isLoggable(Level.FINE)) {
- listener.getLogger().println("[withMaven] Run '" + mavenPublisher.getDescriptor().getDisplayName() + "'...");
+ listener.getLogger()
+ .println("[withMaven] Run '"
+ + mavenPublisher.getDescriptor().getDisplayName() + "'...");
}
try {
mavenPublisher.process(context, mavenSpyLogsElt);
} catch (InterruptedException e) {
- listener.error("[withMaven] Processing of Maven build outputs interrupted in " + mavenPublisher.toString() + " after " +
- TimeUnit.MILLISECONDS.convert(System.nanoTime() - nanosBefore, TimeUnit.NANOSECONDS) + "ms.");
- Thread.currentThread().interrupt(); // set interrupt flag
+ listener.error("[withMaven] Processing of Maven build outputs interrupted in "
+ + mavenPublisher.toString() + " after "
+ + TimeUnit.MILLISECONDS.convert(
+ System.nanoTime() - nanosBefore, TimeUnit.NANOSECONDS)
+ + "ms.");
+ Thread.currentThread().interrupt(); // set interrupt flag
throw e;
} catch (MavenPipelinePublisherException e) {
exceptions.add(e);
} catch (Exception e) {
- PrintWriter error = listener.error("[withMaven] WARNING Exception executing Maven reporter '" + mavenPublisher.getDescriptor().getDisplayName() +
- "' / " + mavenPublisher.getDescriptor().getId() + "." +
- " Please report a bug associated for the component 'pipeline-maven-plugin' at https://issues.jenkins-ci.org ");
+ PrintWriter error = listener.error(
+ "[withMaven] WARNING Exception executing Maven reporter '"
+ + mavenPublisher.getDescriptor().getDisplayName() + "' / "
+ + mavenPublisher.getDescriptor().getId() + "."
+ + " Please report a bug associated for the component 'pipeline-maven-plugin' at https://issues.jenkins-ci.org ");
e.printStackTrace(error);
- exceptions.add(new MavenPipelinePublisherException(mavenPublisher.getDescriptor().getDisplayName(), "", e));
+ exceptions.add(new MavenPipelinePublisherException(
+ mavenPublisher.getDescriptor().getDisplayName(), "", e));
} finally {
- durationInMillisPerPublisher.add(new AbstractMap.SimpleImmutableEntry(mavenPublisher.getDescriptor().getDisplayName(), TimeUnit.MILLISECONDS.convert(System.nanoTime() - nanosBeforePublisher, TimeUnit.NANOSECONDS)));
+ durationInMillisPerPublisher.add(new AbstractMap.SimpleImmutableEntry(
+ mavenPublisher.getDescriptor().getDisplayName(),
+ TimeUnit.MILLISECONDS.convert(
+ System.nanoTime() - nanosBeforePublisher, TimeUnit.NANOSECONDS)));
}
}
}
@@ -180,43 +206,55 @@ public void processMavenSpyLogs(@NonNull StepContext context, @NonNull FilePath
Run run = context.get(Run.class);
String msg = "";
if (run.getActions(InterruptedBuildAction.class).isEmpty()) {
- msg = "[withMaven] WARNING Exception parsing the logs generated by the Jenkins Maven Event Spy " + mavenSpyLogs + ", ignore file. " +
- " Please report a bug associated for the component 'pipeline-maven-plugin' at https://issues.jenkins-ci.org ";
+ msg = "[withMaven] WARNING Exception parsing the logs generated by the Jenkins Maven Event Spy "
+ + mavenSpyLogs + ", ignore file. "
+ + " Please report a bug associated for the component 'pipeline-maven-plugin' at https://issues.jenkins-ci.org ";
} else {
// job has been aborted (see InterruptedBuildAction)
- msg = "[withMaven] WARNING logs generated by the Jenkins Maven Event Spy " + mavenSpyLogs + " are invalid, probably due to the interruption of the job, ignore file.";
+ msg = "[withMaven] WARNING logs generated by the Jenkins Maven Event Spy " + mavenSpyLogs
+ + " are invalid, probably due to the interruption of the job, ignore file.";
}
PrintWriter errorWriter = listener.error(msg);
e.printStackTrace(errorWriter);
throw new MavenPipelineException(e);
} catch (InterruptedException e) {
- PrintWriter errorWriter = listener.error("[withMaven] Processing of Maven build outputs interrupted after " +
- TimeUnit.MILLISECONDS.convert(System.nanoTime() - nanosBefore, TimeUnit.NANOSECONDS) + "ms.");
+ PrintWriter errorWriter =
+ listener.error("[withMaven] Processing of Maven build outputs interrupted after "
+ + TimeUnit.MILLISECONDS.convert(System.nanoTime() - nanosBefore, TimeUnit.NANOSECONDS)
+ + "ms.");
if (LOGGER.isLoggable(Level.FINE)) {
e.printStackTrace(errorWriter);
}
- Thread.currentThread().interrupt(); // set interrupt flag
+ Thread.currentThread().interrupt(); // set interrupt flag
return;
} catch (Exception e) {
- PrintWriter errorWriter = listener.error("[withMaven] WARNING Exception processing the logs generated by the Jenkins Maven Event Spy " + mavenSpyLogs + ", ignore file. " +
- " Please report a bug associated for the component 'pipeline-maven-plugin' at https://issues.jenkins-ci.org ");
+ PrintWriter errorWriter = listener.error(
+ "[withMaven] WARNING Exception processing the logs generated by the Jenkins Maven Event Spy "
+ + mavenSpyLogs + ", ignore file. "
+ + " Please report a bug associated for the component 'pipeline-maven-plugin' at https://issues.jenkins-ci.org ");
e.printStackTrace(errorWriter);
throw new MavenPipelineException(e);
} finally {
if (LOGGER.isLoggable(Level.INFO)) {
- listener.getLogger().println("[withMaven] Publishers: " +
- durationInMillisPerPublisher.stream().filter(entry -> entry.getValue() > 0).
- map(entry -> entry.getKey() + ": " + entry.getValue() + " ms").
- collect(Collectors.joining(", ")));
+ listener.getLogger()
+ .println("[withMaven] Publishers: "
+ + durationInMillisPerPublisher.stream()
+ .filter(entry -> entry.getValue() > 0)
+ .map(entry -> entry.getKey() + ": " + entry.getValue() + " ms")
+ .collect(Collectors.joining(", ")));
}
}
}
FilePath[] mavenSpyLogsInterruptedList = mavenSpyLogFolder.list("maven-spy-*.log.tmp");
if (mavenSpyLogsInterruptedList.length > 0) {
- listener.getLogger().print("[withMaven] One or multiple Maven executions have been ignored by the " +
- "Jenkins Pipeline Maven Plugin because they have been interrupted before completion " +
- "(" + mavenSpyLogsInterruptedList.length + "). See ");
- listener.hyperlink("https://github.com/jenkinsci/pipeline-maven-plugin/blob/master/FAQ.adoc#how-to-use-the-pipeline-maven-plugin-with-docker", "Pipeline Maven Plugin FAQ");
+ listener.getLogger()
+ .print("[withMaven] One or multiple Maven executions have been ignored by the "
+ + "Jenkins Pipeline Maven Plugin because they have been interrupted before completion "
+ + "("
+ + mavenSpyLogsInterruptedList.length + "). See ");
+ listener.hyperlink(
+ "https://github.com/jenkinsci/pipeline-maven-plugin/blob/master/FAQ.adoc#how-to-use-the-pipeline-maven-plugin-with-docker",
+ "Pipeline Maven Plugin FAQ");
listener.getLogger().println(" for more details.");
if (LOGGER.isLoggable(Level.FINE)) {
for (FilePath mavenSpyLogsInterruptedLogs : mavenSpyLogsInterruptedList) {
@@ -227,26 +265,23 @@ public void processMavenSpyLogs(@NonNull StepContext context, @NonNull FilePath
}
/*
-
- */
+
+ */
public static class PluginInvocation {
public String groupId, artifactId, version, goal, executionId;
public String getId() {
- return artifactId + ":" +
- goal + " " +
- "(" + executionId + ")";
+ return artifactId + ":" + goal + " " + "(" + executionId + ")";
}
@Override
public String toString() {
- return "PluginInvocation{" +
- groupId + ":" +
- artifactId + ":" +
- version + "@" +
- goal + " " +
- " " + executionId +
- '}';
+ return "PluginInvocation{" + groupId
+ + ":" + artifactId
+ + ":" + version
+ + "@" + goal
+ + " " + " "
+ + executionId + '}';
}
}
}
diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/NeededPipelineMavenDatabasePluginAdminMonitor.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/NeededPipelineMavenDatabasePluginAdminMonitor.java
index 58f9b728..b5fb9d3f 100644
--- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/NeededPipelineMavenDatabasePluginAdminMonitor.java
+++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/NeededPipelineMavenDatabasePluginAdminMonitor.java
@@ -26,14 +26,9 @@
import hudson.Extension;
import hudson.model.AdministrativeMonitor;
+import java.util.Objects;
import jenkins.model.Jenkins;
import org.apache.commons.lang.StringUtils;
-import org.kohsuke.stapler.StaplerRequest;
-import org.kohsuke.stapler.StaplerResponse;
-import org.kohsuke.stapler.interceptor.RequirePOST;
-
-import java.io.IOException;
-import java.util.Objects;
/**
*
@@ -45,8 +40,8 @@ public class NeededPipelineMavenDatabasePluginAdminMonitor extends Administrativ
public boolean isActivated() {
String jdbcUrl = Objects.requireNonNull(GlobalPipelineMavenConfig.get()).getJdbcUrl();
return (StringUtils.startsWith(jdbcUrl, "jdbc:h2")
- || StringUtils.startsWith(jdbcUrl, "jdbc:mysql")
- || StringUtils.startsWith(jdbcUrl, "jdbc:postgresql"))
+ || StringUtils.startsWith(jdbcUrl, "jdbc:mysql")
+ || StringUtils.startsWith(jdbcUrl, "jdbc:postgresql"))
&& Jenkins.get().getPlugin("pipeline-maven-database") == null;
}
@@ -54,5 +49,4 @@ public boolean isActivated() {
public String getDisplayName() {
return Messages.admin_monitor_needed_pipeline_maven_database_plugin_description();
}
-
-}
\ No newline at end of file
+}
diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/WithMavenStep.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/WithMavenStep.java
index 2e21c0a6..85714aab 100644
--- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/WithMavenStep.java
+++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/WithMavenStep.java
@@ -38,6 +38,9 @@
import hudson.tasks.Maven;
import hudson.tasks.Maven.MavenInstallation;
import hudson.util.ListBoxModel;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Set;
import jenkins.model.Jenkins;
import jenkins.mvn.GlobalMavenConfig;
import jenkins.mvn.SettingsProvider;
@@ -55,10 +58,6 @@
import org.kohsuke.stapler.DataBoundConstructor;
import org.kohsuke.stapler.DataBoundSetter;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Set;
-
/**
* Configures maven environment to use within a pipeline job by calling sh mvn
or bat mvn
.
* The selected maven installation will be configured and prepended to the path.
@@ -79,8 +78,7 @@ public class WithMavenStep extends Step {
private Boolean traceability = null;
@DataBoundConstructor
- public WithMavenStep() {
- }
+ public WithMavenStep() {}
public String getTempBinDir() {
return tempBinDir;
@@ -272,9 +270,10 @@ public ListBoxModel doFillJdkItems(@AncestorInPath Item item) {
}
return r;
}
-
+
@Restricted(NoExternalUse.class) // Only for UI calls
- public ListBoxModel doFillMavenSettingsConfigItems(@AncestorInPath Item item, @AncestorInPath ItemGroup context) {
+ public ListBoxModel doFillMavenSettingsConfigItems(
+ @AncestorInPath Item item, @AncestorInPath ItemGroup context) {
ListBoxModel r = new ListBoxModel();
if (item == null) {
return r; // it's empty
@@ -288,7 +287,8 @@ public ListBoxModel doFillMavenSettingsConfigItems(@AncestorInPath Item item, @A
}
@Restricted(NoExternalUse.class) // Only for UI calls
- public ListBoxModel doFillGlobalMavenSettingsConfigItems(@AncestorInPath Item item, @AncestorInPath ItemGroup context) {
+ public ListBoxModel doFillGlobalMavenSettingsConfigItems(
+ @AncestorInPath Item item, @AncestorInPath ItemGroup context) {
ListBoxModel r = new ListBoxModel();
if (item == null) {
return r; // it's empty
@@ -308,7 +308,7 @@ public ListBoxModel doFillPublisherStrategyItems(@AncestorInPath Item item, @Anc
return r; // it's empty
}
item.checkPermission(Item.EXTENDED_READ);
- for(MavenPublisherStrategy publisherStrategy: MavenPublisherStrategy.values()) {
+ for (MavenPublisherStrategy publisherStrategy : MavenPublisherStrategy.values()) {
r.add(publisherStrategy.getDescription(), publisherStrategy.name());
}
return r;
@@ -320,6 +320,5 @@ public ListBoxModel doFillPublisherStrategyItems(@AncestorInPath Item item, @Anc
public DescriptorExtensionList getOptionsDescriptors() {
return Jenkins.get().getDescriptorList(MavenPublisher.class);
}
-
}
}
diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/WithMavenStepExecution.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/WithMavenStepExecution.java
index 2e8f041b..03c4e06b 100644
--- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/WithMavenStepExecution.java
+++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/WithMavenStepExecution.java
@@ -24,7 +24,6 @@
package org.jenkinsci.plugins.pipeline.maven;
-
import org.jenkinsci.plugins.workflow.steps.AbstractStepExecutionImpl;
import org.jenkinsci.plugins.workflow.steps.BodyExecutionCallback;
import org.jenkinsci.plugins.workflow.steps.StepContext;
@@ -50,8 +49,7 @@ public boolean start() {
private class WithMavenStepExecutionCallBack extends BodyExecutionCallback.TailCall {
private static final long serialVersionUID = 1L;
- private WithMavenStepExecutionCallBack() {
- }
+ private WithMavenStepExecutionCallBack() {}
@Override
protected void finished(StepContext context) {
diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/WithMavenStepExecution2.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/WithMavenStepExecution2.java
index 988cee2b..3fadbdd7 100644
--- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/WithMavenStepExecution2.java
+++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/WithMavenStepExecution2.java
@@ -24,25 +24,26 @@
package org.jenkinsci.plugins.pipeline.maven;
-
import com.cloudbees.hudson.plugins.folder.AbstractFolder;
import com.cloudbees.plugins.credentials.Credentials;
import com.cloudbees.plugins.credentials.CredentialsProvider;
import com.cloudbees.plugins.credentials.common.IdCredentials;
import com.cloudbees.plugins.credentials.common.StandardUsernameCredentials;
-import com.cloudbees.plugins.credentials.common.UsernameCredentials;
import com.cloudbees.plugins.credentials.domains.DomainRequirement;
+import edu.umd.cs.findbugs.annotations.CheckForNull;
+import edu.umd.cs.findbugs.annotations.NonNull;
+import edu.umd.cs.findbugs.annotations.Nullable;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
import hudson.AbortException;
import hudson.EnvVars;
import hudson.ExtensionList;
import hudson.FilePath;
+import hudson.Functions;
import hudson.Launcher;
import hudson.Launcher.ProcStarter;
import hudson.Proc;
import hudson.Util;
import hudson.console.ConsoleLogFilter;
-import hudson.model.BuildListener;
import hudson.model.Computer;
import hudson.model.Item;
import hudson.model.ItemGroup;
@@ -54,6 +55,24 @@
import hudson.slaves.WorkspaceList;
import hudson.tasks.Maven;
import hudson.tasks.Maven.MavenInstallation;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.URL;
+import java.security.CodeSource;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.UUID;
+import java.util.function.Function;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+import java.util.stream.Collectors;
import jenkins.model.Jenkins;
import jenkins.mvn.DefaultGlobalSettingsProvider;
import jenkins.mvn.DefaultSettingsProvider;
@@ -84,30 +103,9 @@
import org.jenkinsci.plugins.workflow.steps.StepContext;
import org.springframework.util.ClassUtils;
-import edu.umd.cs.findbugs.annotations.CheckForNull;
-import edu.umd.cs.findbugs.annotations.NonNull;
-import edu.umd.cs.findbugs.annotations.Nullable;
-import hudson.Functions;
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.net.URL;
-import java.security.CodeSource;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.UUID;
-import java.util.function.Function;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-import java.util.stream.Collectors;
-
-@SuppressFBWarnings(value = "SE_TRANSIENT_FIELD_NOT_RESTORED", justification = "Contextual fields used only in start(); no onResume needed")
+@SuppressFBWarnings(
+ value = "SE_TRANSIENT_FIELD_NOT_RESTORED",
+ justification = "Contextual fields used only in start(); no onResume needed")
class WithMavenStepExecution2 extends GeneralNonBlockingStepExecution {
private static final long serialVersionUID = 1L;
@@ -154,7 +152,8 @@ class WithMavenStepExecution2 extends GeneralNonBlockingStepExecution {
build = context.get(Run.class);
}
- @Override public boolean start() throws Exception {
+ @Override
+ public boolean start() throws Exception {
run(this::doStart);
return false;
}
@@ -179,16 +178,20 @@ protected boolean doStart() throws Exception {
console.trace("[withMaven] Options: " + step.getOptions());
ExtensionList availableMavenPublishers = Jenkins.get().getExtensionList(MavenPublisher.class);
- console.trace("[withMaven] Available options: " + availableMavenPublishers.stream().map(
- MavenPublisher::toString).collect(Collectors.joining(",")));
+ console.trace("[withMaven] Available options: "
+ + availableMavenPublishers.stream()
+ .map(MavenPublisher::toString)
+ .collect(Collectors.joining(",")));
getComputer();
withContainer = detectWithContainer();
if (withContainer) {
- console.trace("[withMaven] IMPORTANT \"withMaven(){...}\" step running within a Docker container. See " );
- console.traceHyperlink("https://github.com/jenkinsci/pipeline-maven-plugin/blob/master/FAQ.adoc#how-to-use-the-pipeline-maven-plugin-with-docker", "Pipeline Maven Plugin FAQ");
+ console.trace("[withMaven] IMPORTANT \"withMaven(){...}\" step running within a Docker container. See ");
+ console.traceHyperlink(
+ "https://github.com/jenkinsci/pipeline-maven-plugin/blob/master/FAQ.adoc#how-to-use-the-pipeline-maven-plugin-with-docker",
+ "Pipeline Maven Plugin FAQ");
console.trace(" in case of problem.");
}
@@ -199,23 +202,35 @@ protected boolean doStart() throws Exception {
setupMaven(credentials);
if (LOGGER.isLoggable(Level.FINE)) {
- LOGGER.log(Level.FINE, this.build + " - Track usage and mask password of credentials " + credentials.stream().map(new CredentialsToPrettyString()).collect(Collectors.joining(",")));
+ LOGGER.log(
+ Level.FINE,
+ this.build + " - Track usage and mask password of credentials "
+ + credentials.stream()
+ .map(new CredentialsToPrettyString())
+ .collect(Collectors.joining(",")));
}
CredentialsProvider.trackAll(build, new ArrayList<>(credentials));
ConsoleLogFilter originalFilter = getContext().get(ConsoleLogFilter.class);
- ConsoleLogFilter maskSecretsFilter = MaskPasswordsConsoleLogFilter.newMaskPasswordsConsoleLogFilter(credentials, getComputer().getDefaultCharset());
- MavenColorizerConsoleLogFilter mavenColorizerFilter = new MavenColorizerConsoleLogFilter(getComputer().getDefaultCharset().name());
+ ConsoleLogFilter maskSecretsFilter = MaskPasswordsConsoleLogFilter.newMaskPasswordsConsoleLogFilter(
+ credentials, getComputer().getDefaultCharset());
+ MavenColorizerConsoleLogFilter mavenColorizerFilter = new MavenColorizerConsoleLogFilter(
+ getComputer().getDefaultCharset().name());
ConsoleLogFilter newFilter = BodyInvoker.mergeConsoleLogFilters(
- BodyInvoker.mergeConsoleLogFilters(originalFilter, maskSecretsFilter),
- mavenColorizerFilter);
+ BodyInvoker.mergeConsoleLogFilters(originalFilter, maskSecretsFilter), mavenColorizerFilter);
- EnvironmentExpander envEx = EnvironmentExpander.merge(getContext().get(EnvironmentExpander.class), new ExpanderImpl(envOverride));
+ EnvironmentExpander envEx =
+ EnvironmentExpander.merge(getContext().get(EnvironmentExpander.class), new ExpanderImpl(envOverride));
LOGGER.log(Level.FINEST, "envOverride: {0}", envOverride); // JENKINS-40484
- getContext().newBodyInvoker().withContexts(envEx, newFilter).withCallback(new WithMavenStepExecutionCallBack(tempBinDir, step.getOptions(), step.getPublisherStrategy())).start();
+ getContext()
+ .newBodyInvoker()
+ .withContexts(envEx, newFilter)
+ .withCallback(
+ new WithMavenStepExecutionCallBack(tempBinDir, step.getOptions(), step.getPublisherStrategy()))
+ .start();
return false;
}
@@ -247,7 +262,8 @@ private boolean detectWithContainer() {
if (launcherClassName.contains("org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator")) {
LOGGER.log(Level.FINE, "Step running within Kubernetes withContainer(): {1}", launcherClassName);
return false;
- } if (launcherClassName.contains("WithContainerStep")) {
+ }
+ if (launcherClassName.contains("WithContainerStep")) {
LOGGER.log(Level.FINE, "Step running within docker.image(): {1}", launcherClassName);
return true;
} else if (launcherClassName.contains("ContainerExecDecorator")) {
@@ -272,9 +288,10 @@ private void setupJDK() throws AbortException, IOException, InterruptedException
if (withContainer) {
// see #detectWithContainer()
LOGGER.log(Level.FINE, "Ignoring JDK installation parameter: {0}", jdkInstallationName);
- console.println("WARNING: \"withMaven(){...}\" step running within a container," +
- " tool installations are not available see https://issues.jenkins-ci.org/browse/JENKINS-36159. " +
- "You have specified a JDK installation \"" + jdkInstallationName + "\", which will be ignored.");
+ console.println("WARNING: \"withMaven(){...}\" step running within a container,"
+ + " tool installations are not available see https://issues.jenkins-ci.org/browse/JENKINS-36159. "
+ + "You have specified a JDK installation \""
+ + jdkInstallationName + "\", which will be ignored.");
return;
}
@@ -282,15 +299,16 @@ private void setupJDK() throws AbortException, IOException, InterruptedException
JDK jdk = Jenkins.get().getJDK(jdkInstallationName);
if (jdk == null) {
- throw new AbortException("Could not find the JDK installation: " + jdkInstallationName + ". Make sure it is configured on the Global Tool Configuration page");
+ throw new AbortException("Could not find the JDK installation: " + jdkInstallationName
+ + ". Make sure it is configured on the Global Tool Configuration page");
}
Node node = getComputer().getNode();
if (node == null) {
- throw new AbortException("Could not obtain the Node for the computer: " + getComputer().getName());
+ throw new AbortException("Could not obtain the Node for the computer: "
+ + getComputer().getName());
}
jdk = jdk.forNode(node, listener).forEnvironment(env);
jdk.buildEnvVars(envOverride);
-
}
/**
@@ -299,18 +317,23 @@ private void setupJDK() throws AbortException, IOException, InterruptedException
* @throws InterruptedException
*/
private void setupMaven(@NonNull Collection credentials) throws IOException, InterruptedException {
- // Temp dir with the wrapper that will be prepended to the path and the temporary files used by withMaven (settings files...)
+ // Temp dir with the wrapper that will be prepended to the path and the temporary files used by withMaven
+ // (settings files...)
if (step.getTempBinDir() != null && !step.getTempBinDir().isEmpty()) {
String expandedTargetLocation = step.getTempBinDir();
try {
expandedTargetLocation = TokenMacro.expandAll(build, ws, listener, expandedTargetLocation);
} catch (MacroEvaluationException e) {
- listener.getLogger().println("[ERROR] failed to expand variables in target location '" + expandedTargetLocation + "' : " + e.getMessage());
+ listener.getLogger()
+ .println("[ERROR] failed to expand variables in target location '" + expandedTargetLocation
+ + "' : " + e.getMessage());
}
tempBinDir = new FilePath(ws, expandedTargetLocation);
}
if (tempBinDir == null) {
- tempBinDir = tempDir(ws).child("withMaven" + Util.getDigestOf(UUID.randomUUID().toString()).substring(0, 8));
+ tempBinDir = tempDir(ws)
+ .child("withMaven"
+ + Util.getDigestOf(UUID.randomUUID().toString()).substring(0, 8));
}
tempBinDir.mkdirs();
envOverride.put("MVN_CMD_DIR", tempBinDir.getRemote());
@@ -332,8 +355,8 @@ private void setupMaven(@NonNull Collection credentials) throws IOE
if (StringUtils.isNotEmpty(javaToolsOptions)) {
javaToolsOptions += " ";
}
- javaToolsOptions += "-Dmaven.ext.class.path=\"" + mavenSpyJarPath.getRemote() + "\" " +
- "-Dorg.jenkinsci.plugins.pipeline.maven.reportsFolder=\"" + this.tempBinDir.getRemote() + "\" ";
+ javaToolsOptions += "-Dmaven.ext.class.path=\"" + mavenSpyJarPath.getRemote() + "\" "
+ + "-Dorg.jenkinsci.plugins.pipeline.maven.reportsFolder=\"" + this.tempBinDir.getRemote() + "\" ";
envOverride.put("JAVA_TOOL_OPTIONS", javaToolsOptions);
//
@@ -343,18 +366,24 @@ private void setupMaven(@NonNull Collection credentials) throws IOE
mavenConfig.append("--batch-mode ");
ifTraceabilityEnabled(() -> mavenConfig.append("--show-version "));
if (StringUtils.isNotEmpty(settingsFilePath)) {
- // JENKINS-57324 escape '%' as '%%'. See https://en.wikibooks.org/wiki/Windows_Batch_Scripting#Quoting_and_escaping
- if (!isUnix) settingsFilePath=settingsFilePath.replace("%", "%%");
+ // JENKINS-57324 escape '%' as '%%'. See
+ // https://en.wikibooks.org/wiki/Windows_Batch_Scripting#Quoting_and_escaping
+ if (!isUnix) settingsFilePath = settingsFilePath.replace("%", "%%");
mavenConfig.append("--settings \"").append(settingsFilePath).append("\" ");
}
if (StringUtils.isNotEmpty(globalSettingsFilePath)) {
- // JENKINS-57324 escape '%' as '%%'. See https://en.wikibooks.org/wiki/Windows_Batch_Scripting#Quoting_and_escaping
- if (!isUnix) globalSettingsFilePath=globalSettingsFilePath.replace("%", "%%");
- mavenConfig.append("--global-settings \"").append(globalSettingsFilePath).append("\" ");
+ // JENKINS-57324 escape '%' as '%%'. See
+ // https://en.wikibooks.org/wiki/Windows_Batch_Scripting#Quoting_and_escaping
+ if (!isUnix) globalSettingsFilePath = globalSettingsFilePath.replace("%", "%%");
+ mavenConfig
+ .append("--global-settings \"")
+ .append(globalSettingsFilePath)
+ .append("\" ");
}
if (StringUtils.isNotEmpty(mavenLocalRepo)) {
- // JENKINS-57324 escape '%' as '%%'. See https://en.wikibooks.org/wiki/Windows_Batch_Scripting#Quoting_and_escaping
- if (!isUnix) mavenLocalRepo=mavenLocalRepo.replace("%", "%%");
+ // JENKINS-57324 escape '%' as '%%'. See
+ // https://en.wikibooks.org/wiki/Windows_Batch_Scripting#Quoting_and_escaping
+ if (!isUnix) mavenLocalRepo = mavenLocalRepo.replace("%", "%%");
mavenConfig.append("\"-Dmaven.repo.local=").append(mavenLocalRepo).append("\" ");
}
@@ -384,12 +413,12 @@ private void setupMaven(@NonNull Collection credentials) throws IOE
String content = generateMavenWrapperScriptContent(mvnExec, mavenConfig.toString());
// ADD MAVEN WRAPPER SCRIPT PARENT DIRECTORY TO PATH
- // WARNING MUST BE INVOKED AFTER obtainMavenExec(), THERE SEEM TO BE A BUG IN ENVIRONMENT VARIABLE HANDLING IN obtainMavenExec()
+ // WARNING MUST BE INVOKED AFTER obtainMavenExec(), THERE SEEM TO BE A BUG IN ENVIRONMENT VARIABLE HANDLING
+ // IN obtainMavenExec()
envOverride.put("PATH+MAVEN", tempBinDir.getRemote());
createWrapperScript(tempBinDir, mvnExec.getName(), content);
}
-
}
private FilePath setupMavenSpy() throws IOException, InterruptedException {
@@ -407,18 +436,26 @@ private FilePath setupMavenSpy() throws IOException, InterruptedException {
// Don't use Thread.currentThread().getContextClassLoader() as it doesn't show the resources of the plugin
Class clazz = WithMavenStepExecution2.class;
ClassLoader classLoader = clazz.getClassLoader();
- LOGGER.log(Level.FINE, "Load " + embeddedMavenSpyJarPath + " using classloader " + classLoader.getClass() + ": " + classLoader);
+ LOGGER.log(
+ Level.FINE,
+ "Load " + embeddedMavenSpyJarPath + " using classloader " + classLoader.getClass() + ": "
+ + classLoader);
in = classLoader.getResourceAsStream(embeddedMavenSpyJarPath);
if (in == null) {
CodeSource codeSource = clazz.getProtectionDomain().getCodeSource();
- String msg = "Embedded maven spy jar not found at " + embeddedMavenSpyJarPath + " in the pipeline-maven-plugin classpath. " +
- "Maven Spy Jar URL can be defined with the system property: '" + MAVEN_SPY_JAR_URL + "'" +
- "Classloader " + classLoader.getClass() + ": " + classLoader + ". " +
- "Class " + clazz.getName() + " loaded from " + (codeSource == null ? "#unknown#" : codeSource.getLocation());
+ String msg = "Embedded maven spy jar not found at " + embeddedMavenSpyJarPath
+ + " in the pipeline-maven-plugin classpath. "
+ + "Maven Spy Jar URL can be defined with the system property: '"
+ + MAVEN_SPY_JAR_URL + "'" + "Classloader "
+ + classLoader.getClass() + ": " + classLoader + ". " + "Class "
+ + clazz.getName() + " loaded from "
+ + (codeSource == null ? "#unknown#" : codeSource.getLocation());
throw new IllegalStateException(msg);
}
} else {
- LOGGER.log(Level.FINE, "Load maven spy jar provided by system property '" + MAVEN_SPY_JAR_URL + "': " + mavenSpyJarUrl);
+ LOGGER.log(
+ Level.FINE,
+ "Load maven spy jar provided by system property '" + MAVEN_SPY_JAR_URL + "': " + mavenSpyJarUrl);
in = new URL(mavenSpyJarUrl).openStream();
}
@@ -447,14 +484,17 @@ private String obtainMavenExec() throws IOException, InterruptedException {
consoleMessage.append(" using Maven installation provided by the build agent");
} else if (withContainer) {
console.println(
- "[withMaven] WARNING: Specified Maven '" + mavenInstallationName + "' cannot be installed, will be ignored. " +
- "Step running within a container, tool installations are not available see https://issues.jenkins-ci.org/browse/JENKINS-36159. ");
- LOGGER.log(Level.FINE, "Running in docker-pipeline, ignore Maven Installation parameter: {0}", mavenInstallationName);
+ "[withMaven] WARNING: Specified Maven '" + mavenInstallationName
+ + "' cannot be installed, will be ignored. "
+ + "Step running within a container, tool installations are not available see https://issues.jenkins-ci.org/browse/JENKINS-36159. ");
+ LOGGER.log(
+ Level.FINE,
+ "Running in docker-pipeline, ignore Maven Installation parameter: {0}",
+ mavenInstallationName);
} else {
return obtainMvnExecutableFromMavenInstallation(mavenInstallationName);
}
-
// in case there are no installations available we fallback to the OS maven installation
// first we try MAVEN_HOME and M2_HOME
LOGGER.fine("Searching for Maven through MAVEN_HOME and M2_HOME environment variables...");
@@ -466,14 +506,20 @@ private String obtainMavenExec() throws IOException, InterruptedException {
if (mavenHome == null) {
mavenHome = readFromProcess("printenv", M2_HOME);
if (StringUtils.isNotEmpty(mavenHome)) {
- consoleMessage.append(" with the environment variable M2_HOME=").append(mavenHome);
+ consoleMessage
+ .append(" with the environment variable M2_HOME=")
+ .append(mavenHome);
}
} else {
- consoleMessage.append(" with the environment variable MAVEN_HOME=").append(mavenHome);
+ consoleMessage
+ .append(" with the environment variable MAVEN_HOME=")
+ .append(mavenHome);
}
if (mavenHome == null) {
- LOGGER.log(Level.FINE, "NO maven installation discovered on docker container through MAVEN_HOME and M2_HOME environment variables");
+ LOGGER.log(
+ Level.FINE,
+ "NO maven installation discovered on docker container through MAVEN_HOME and M2_HOME environment variables");
mvnExecPath = null;
} else {
LOGGER.log(Level.FINE, "Found maven installation on {0}", mavenHome);
@@ -487,13 +533,19 @@ private String obtainMavenExec() throws IOException, InterruptedException {
if (mavenHome == null) {
mavenHome = env.get(M2_HOME);
if (StringUtils.isNotEmpty(mavenHome)) {
- consoleMessage.append(" with the environment variable M2_HOME=").append(mavenHome);
+ consoleMessage
+ .append(" with the environment variable M2_HOME=")
+ .append(mavenHome);
}
} else {
- consoleMessage.append(" with the environment variable MAVEN_HOME=").append(mavenHome);
+ consoleMessage
+ .append(" with the environment variable MAVEN_HOME=")
+ .append(mavenHome);
}
if (mavenHome == null) {
- LOGGER.log(Level.FINE, "NO maven installation discovered on build agent through MAVEN_HOME and M2_HOME environment variables");
+ LOGGER.log(
+ Level.FINE,
+ "NO maven installation discovered on build agent through MAVEN_HOME and M2_HOME environment variables");
mvnExecPath = null;
} else {
LOGGER.log(Level.FINE, "Found maven installation on {0}", mavenHome);
@@ -507,7 +559,8 @@ private String obtainMavenExec() throws IOException, InterruptedException {
// if at this point mvnExecPath is still null try to use which/where command to find a maven executable
if (mvnExecPath == null) {
if (LOGGER.isLoggable(Level.FINE)) {
- console.trace("[withMaven] No Maven Installation or MAVEN_HOME found, looking for mvn executable by using which/where command");
+ console.trace(
+ "[withMaven] No Maven Installation or MAVEN_HOME found, looking for mvn executable by using which/where command");
}
if (Boolean.TRUE.equals(getComputer().isUnix())) {
mvnExecPath = readFromProcess("/bin/sh", "-c", "which mvn");
@@ -522,11 +575,14 @@ private String obtainMavenExec() throws IOException, InterruptedException {
String mvnwScript = isUnix ? "mvnw" : "mvnw.cmd";
boolean mvnwScriptExists = ws.child(mvnwScript).exists();
if (mvnwScriptExists) {
- consoleMessage = new StringBuilder("[withMaven] Maven installation not specified in the 'withMaven()' step " +
- "and not found on the build agent but '" + mvnwScript + "' script found in the workspace.");
+ consoleMessage =
+ new StringBuilder("[withMaven] Maven installation not specified in the 'withMaven()' step "
+ + "and not found on the build agent but '" + mvnwScript
+ + "' script found in the workspace.");
} else {
- consoleMessage = new StringBuilder("[withMaven] Maven installation not specified in the 'withMaven()' step " +
- "and not found on the build agent");
+ consoleMessage =
+ new StringBuilder("[withMaven] Maven installation not specified in the 'withMaven()' step "
+ + "and not found on the build agent");
}
} else {
consoleMessage.append(" with executable ").append(mvnExecPath);
@@ -539,13 +595,16 @@ private String obtainMavenExec() throws IOException, InterruptedException {
return mvnExecPath;
}
- private String obtainMvnExecutableFromMavenInstallation(String mavenInstallationName) throws IOException, InterruptedException {
+ private String obtainMvnExecutableFromMavenInstallation(String mavenInstallationName)
+ throws IOException, InterruptedException {
MavenInstallation mavenInstallation = null;
for (MavenInstallation i : getMavenInstallations()) {
if (mavenInstallationName.equals(i.getName())) {
mavenInstallation = i;
- LOGGER.log(Level.FINE, "Found maven installation {0} with installation home {1}", new Object[]{mavenInstallation.getName(), mavenInstallation.getHome()});
+ LOGGER.log(Level.FINE, "Found maven installation {0} with installation home {1}", new Object[] {
+ mavenInstallation.getName(), mavenInstallation.getHome()
+ });
break;
}
}
@@ -554,7 +613,8 @@ private String obtainMvnExecutableFromMavenInstallation(String mavenInstallation
}
Node node = getComputer().getNode();
if (node == null) {
- throw new AbortException("Could not obtain the Node for the computer: " + getComputer().getName());
+ throw new AbortException("Could not obtain the Node for the computer: "
+ + getComputer().getName());
}
mavenInstallation = mavenInstallation.forNode(node, listener).forEnvironment(env);
mavenInstallation.buildEnvVars(envOverride);
@@ -578,12 +638,15 @@ private String readFromProcess(String... args) throws InterruptedException {
Proc p = launcher.launch(ps.cmds(args).stdout(baos));
int exitCode = p.join();
if (exitCode == 0) {
- return baos.toString(getComputer().getDefaultCharset().name()).replaceAll("[\t\r\n]+", " ").trim();
+ return baos.toString(getComputer().getDefaultCharset().name())
+ .replaceAll("[\t\r\n]+", " ")
+ .trim();
} else {
return null;
}
} catch (IOException e) {
- e.printStackTrace(console.format("Error executing command '%s' : %s%n", Arrays.toString(args), e.getMessage()));
+ e.printStackTrace(
+ console.format("Error executing command '%s' : %s%n", Arrays.toString(args), e.getMessage()));
}
return null;
}
@@ -596,7 +659,8 @@ private String readFromProcess(String... args) throws InterruptedException {
* @return wrapper script content
* @throws AbortException when problems creating content
*/
- private String generateMavenWrapperScriptContent(@NonNull FilePath mvnExec, @NonNull String mavenConfig) throws AbortException {
+ private String generateMavenWrapperScriptContent(@NonNull FilePath mvnExec, @NonNull String mavenConfig)
+ throws AbortException {
boolean isUnix = Boolean.TRUE.equals(getComputer().isUnix());
@@ -605,16 +669,29 @@ private String generateMavenWrapperScriptContent(@NonNull FilePath mvnExec, @Non
if (isUnix) { // Linux, Unix, MacOSX
String lineSep = "\n";
script.append("#!/bin/sh -e").append(lineSep);
- ifTraceabilityEnabled(() -> script.append("echo ----- withMaven Wrapper script -----").append(lineSep));
- script.append("\"").append(mvnExec.getRemote()).append("\" ").append(mavenConfig).append(" \"$@\"").append(lineSep);
+ ifTraceabilityEnabled(() ->
+ script.append("echo ----- withMaven Wrapper script -----").append(lineSep));
+ script.append("\"")
+ .append(mvnExec.getRemote())
+ .append("\" ")
+ .append(mavenConfig)
+ .append(" \"$@\"")
+ .append(lineSep);
} else { // Windows
String lineSep = "\r\n";
script.append("@echo off").append(lineSep);
- ifTraceabilityEnabled(() -> script.append("echo ----- withMaven Wrapper script -----").append(lineSep));
- // JENKINS-57324 escape '%' as '%%'. See https://en.wikibooks.org/wiki/Windows_Batch_Scripting#Quoting_and_escaping
+ ifTraceabilityEnabled(() ->
+ script.append("echo ----- withMaven Wrapper script -----").append(lineSep));
+ // JENKINS-57324 escape '%' as '%%'. See
+ // https://en.wikibooks.org/wiki/Windows_Batch_Scripting#Quoting_and_escaping
mavenConfig = mavenConfig.replace("%", "%%");
- script.append("\"").append(mvnExec.getRemote()).append("\" ").append(mavenConfig).append(" %*").append(lineSep);
+ script.append("\"")
+ .append(mvnExec.getRemote())
+ .append("\" ")
+ .append(mavenConfig)
+ .append(" %*")
+ .append(lineSep);
}
LOGGER.log(Level.FINER, "Generated Maven wrapper script: \n{0}", script);
@@ -631,7 +708,8 @@ private String generateMavenWrapperScriptContent(@NonNull FilePath mvnExec, @Non
* @throws InterruptedException when processing remote calls
* @throws IOException when reading files
*/
- private FilePath createWrapperScript(FilePath tempBinDir, String name, String content) throws IOException, InterruptedException {
+ private FilePath createWrapperScript(FilePath tempBinDir, String name, String content)
+ throws IOException, InterruptedException {
FilePath scriptFile = tempBinDir.child(name);
envOverride.put(MVN_CMD, scriptFile.getRemote());
@@ -664,7 +742,9 @@ private String setupMavenLocalRepo() throws IOException, InterruptedException {
expandedMavenLocalRepo = repoPath.getRemote();
}
}
- LOGGER.log(Level.FINEST, "setupMavenLocalRepo({0}): {1}", new Object[]{step.getMavenLocalRepo(), expandedMavenLocalRepo});
+ LOGGER.log(Level.FINEST, "setupMavenLocalRepo({0}): {1}", new Object[] {
+ step.getMavenLocalRepo(), expandedMavenLocalRepo
+ });
return expandedMavenLocalRepo;
}
@@ -679,13 +759,16 @@ private String setupMavenLocalRepo() throws IOException, InterruptedException {
* @throws IOException when reading files
*/
@Nullable
- private String setupSettingFile(@NonNull Collection credentials) throws IOException, InterruptedException {
+ private String setupSettingFile(@NonNull Collection credentials)
+ throws IOException, InterruptedException {
final FilePath settingsDest = tempBinDir.child("settings.xml");
// Settings from Config File Provider
if (StringUtils.isNotEmpty(step.getMavenSettingsConfig())) {
if (LOGGER.isLoggable(Level.FINE)) {
- console.formatTrace("[withMaven] using Maven settings provided by the Jenkins Managed Configuration File '%s' %n", step.getMavenSettingsConfig());
+ console.formatTrace(
+ "[withMaven] using Maven settings provided by the Jenkins Managed Configuration File '%s' %n",
+ step.getMavenSettingsConfig());
}
settingsFromConfig(step.getMavenSettingsConfig(), settingsDest, credentials);
envOverride.put("MVN_SETTINGS", settingsDest.getRemote());
@@ -700,8 +783,11 @@ private String setupSettingFile(@NonNull Collection credentials) th
if ((settings = ws.child(settingsPath)).exists()) {
// settings file residing on the agent
if (LOGGER.isLoggable(Level.FINE)) {
- console.formatTrace("[withMaven] using Maven settings provided on the build agent '%s' %n", settingsPath);
- LOGGER.log(Level.FINE, "Copying maven settings file from build agent {0} to {1}", new Object[] { settings, settingsDest });
+ console.formatTrace(
+ "[withMaven] using Maven settings provided on the build agent '%s' %n", settingsPath);
+ LOGGER.log(Level.FINE, "Copying maven settings file from build agent {0} to {1}", new Object[] {
+ settings, settingsDest
+ });
}
settings.copyTo(settingsDest);
envOverride.put("MVN_SETTINGS", settingsDest.getRemote());
@@ -714,17 +800,21 @@ private String setupSettingFile(@NonNull Collection credentials) th
SettingsProvider settingsProvider;
MavenConfigFolderOverrideProperty overrideProperty = getMavenConfigOverrideProperty();
- StringBuilder mavenSettingsLog=new StringBuilder();
+ StringBuilder mavenSettingsLog = new StringBuilder();
if (overrideProperty != null && overrideProperty.getSettings() != null) {
// Settings overridden by a folder property
- if(LOGGER.isLoggable(Level.FINE)) {
- mavenSettingsLog.append("[withMaven] using overridden Maven settings by folder '").append(overrideProperty.getOwner().getDisplayName()).append("'. ");
+ if (LOGGER.isLoggable(Level.FINE)) {
+ mavenSettingsLog
+ .append("[withMaven] using overridden Maven settings by folder '")
+ .append(overrideProperty.getOwner().getDisplayName())
+ .append("'. ");
}
settingsProvider = overrideProperty.getSettings();
} else {
if (LOGGER.isLoggable(Level.FINE)) {
- mavenSettingsLog.append("[withMaven] using Maven settings provided by the Jenkins global configuration. ");
+ mavenSettingsLog.append(
+ "[withMaven] using Maven settings provided by the Jenkins global configuration. ");
}
// Settings provided by the global maven configuration
settingsProvider = GlobalMavenConfig.get().getSettingsProvider();
@@ -733,7 +823,10 @@ private String setupSettingFile(@NonNull Collection credentials) th
if (settingsProvider instanceof MvnSettingsProvider) {
MvnSettingsProvider mvnSettingsProvider = (MvnSettingsProvider) settingsProvider;
if (LOGGER.isLoggable(Level.FINE)) {
- mavenSettingsLog.append("Config File Provider maven settings file '").append(mvnSettingsProvider.getSettingsConfigId()).append("'");
+ mavenSettingsLog
+ .append("Config File Provider maven settings file '")
+ .append(mvnSettingsProvider.getSettingsConfigId())
+ .append("'");
console.trace(mavenSettingsLog);
}
settingsFromConfig(mvnSettingsProvider.getSettingsConfigId(), settingsDest, credentials);
@@ -748,12 +841,16 @@ private String setupSettingFile(@NonNull Collection credentials) th
settings.copyTo(settingsDest);
envOverride.put("MVN_SETTINGS", settingsDest.getRemote());
if (LOGGER.isLoggable(Level.FINE)) {
- mavenSettingsLog.append("Maven settings on the build agent'").append(settingsPath).append("'");
+ mavenSettingsLog
+ .append("Maven settings on the build agent'")
+ .append(settingsPath)
+ .append("'");
console.trace(mavenSettingsLog);
}
return settingsDest.getRemote();
} else {
- throw new AbortException("Could not find file provided by the Jenkins global configuration '" + settings + "' on the build agent");
+ throw new AbortException("Could not find file provided by the Jenkins global configuration '" + settings
+ + "' on the build agent");
}
} else if (settingsProvider instanceof DefaultSettingsProvider) {
@@ -763,7 +860,8 @@ private String setupSettingFile(@NonNull Collection credentials) th
console.trace(mavenSettingsLog);
}
} else if (settingsProvider == null) {
- // should not happen according to the source code of jenkins.mvn.MavenConfig.getSettingsProvider() in jenkins-core 2.7
+ // should not happen according to the source code of jenkins.mvn.MavenConfig.getSettingsProvider() in
+ // jenkins-core 2.7
// do nothing
if (LOGGER.isLoggable(Level.FINE)) {
mavenSettingsLog.append("Maven settings are null. NO settings will be defined.");
@@ -782,9 +880,12 @@ private MavenConfigFolderOverrideProperty getMavenConfigOverrideProperty() {
// Iterate until we find an override or until we reach the top. We need it to be an item to be able to do
// getParent, AbstractFolder which has the properties is also an Item
- for (ItemGroup> group = job.getParent(); group instanceof Item && !(group instanceof Jenkins); group = ((Item) group).getParent()) {
+ for (ItemGroup> group = job.getParent();
+ group instanceof Item && !(group instanceof Jenkins);
+ group = ((Item) group).getParent()) {
if (group instanceof AbstractFolder) {
- MavenConfigFolderOverrideProperty mavenConfigProperty = ((AbstractFolder>) group).getProperties().get(MavenConfigFolderOverrideProperty.class);
+ MavenConfigFolderOverrideProperty mavenConfigProperty =
+ ((AbstractFolder>) group).getProperties().get(MavenConfigFolderOverrideProperty.class);
if (mavenConfigProperty != null && mavenConfigProperty.isOverride()) {
return mavenConfigProperty;
}
@@ -804,13 +905,16 @@ private MavenConfigFolderOverrideProperty getMavenConfigOverrideProperty() {
* @throws IOException when reading files
*/
@Nullable
- private String setupGlobalSettingFile(@NonNull Collection credentials) throws IOException, InterruptedException {
+ private String setupGlobalSettingFile(@NonNull Collection credentials)
+ throws IOException, InterruptedException {
final FilePath settingsDest = tempBinDir.child("globalSettings.xml");
// Global settings from Config File Provider
if (StringUtils.isNotEmpty(step.getGlobalMavenSettingsConfig())) {
if (LOGGER.isLoggable(Level.FINE)) {
- console.formatTrace("[withMaven] using Maven global settings provided by the Jenkins Managed Configuration File '%s' %n", step.getGlobalMavenSettingsConfig());
+ console.formatTrace(
+ "[withMaven] using Maven global settings provided by the Jenkins Managed Configuration File '%s' %n",
+ step.getGlobalMavenSettingsConfig());
}
globalSettingsFromConfig(step.getGlobalMavenSettingsConfig(), settingsDest, credentials);
envOverride.put("GLOBAL_MVN_SETTINGS", settingsDest.getRemote());
@@ -824,8 +928,13 @@ private String setupGlobalSettingFile(@NonNull Collection credentia
if ((settings = ws.child(settingsPath)).exists()) {
// Global settings file residing on the agent
if (LOGGER.isLoggable(Level.FINE)) {
- console.formatTrace("[withMaven] using Maven global settings provided on the build agent '%s' %n", settingsPath);
- LOGGER.log(Level.FINE, "Copying maven global settings file from build agent {0} to {1}", new Object[] { settings, settingsDest });
+ console.formatTrace(
+ "[withMaven] using Maven global settings provided on the build agent '%s' %n",
+ settingsPath);
+ LOGGER.log(
+ Level.FINE,
+ "Copying maven global settings file from build agent {0} to {1}",
+ new Object[] {settings, settingsDest});
}
settings.copyTo(settingsDest);
envOverride.put("GLOBAL_MVN_SETTINGS", settingsDest.getRemote());
@@ -842,14 +951,18 @@ private String setupGlobalSettingFile(@NonNull Collection credentia
StringBuilder mavenSettingsLog = new StringBuilder();
if (overrideProperty == null || overrideProperty.getGlobalSettings() == null) {
if (LOGGER.isLoggable(Level.FINE)) {
- mavenSettingsLog.append("[withMaven] using Maven global settings provided by the Jenkins global configuration. ");
+ mavenSettingsLog.append(
+ "[withMaven] using Maven global settings provided by the Jenkins global configuration. ");
}
// Settings provided by the global maven configuration
globalSettingsProvider = GlobalMavenConfig.get().getGlobalSettingsProvider();
} else {
// Settings overridden by a folder property
if (LOGGER.isLoggable(Level.FINE)) {
- mavenSettingsLog.append("[withMaven] using overridden Maven global settings by folder '").append(overrideProperty.getOwner().getDisplayName()).append("'. ");
+ mavenSettingsLog
+ .append("[withMaven] using overridden Maven global settings by folder '")
+ .append(overrideProperty.getOwner().getDisplayName())
+ .append("'. ");
}
globalSettingsProvider = overrideProperty.getGlobalSettings();
}
@@ -857,7 +970,10 @@ private String setupGlobalSettingFile(@NonNull Collection credentia
if (globalSettingsProvider instanceof MvnGlobalSettingsProvider) {
MvnGlobalSettingsProvider mvnGlobalSettingsProvider = (MvnGlobalSettingsProvider) globalSettingsProvider;
if (LOGGER.isLoggable(Level.FINE)) {
- mavenSettingsLog.append("Config File Provider maven global settings file '").append(mvnGlobalSettingsProvider.getSettingsConfigId()).append("'");
+ mavenSettingsLog
+ .append("Config File Provider maven global settings file '")
+ .append(mvnGlobalSettingsProvider.getSettingsConfigId())
+ .append("'");
}
globalSettingsFromConfig(mvnGlobalSettingsProvider.getSettingsConfigId(), settingsDest, credentials);
envOverride.put("GLOBAL_MVN_SETTINGS", settingsDest.getRemote());
@@ -866,13 +982,17 @@ private String setupGlobalSettingFile(@NonNull Collection credentia
}
return settingsDest.getRemote();
} else if (globalSettingsProvider instanceof FilePathGlobalSettingsProvider) {
- FilePathGlobalSettingsProvider filePathGlobalSettingsProvider = (FilePathGlobalSettingsProvider) globalSettingsProvider;
+ FilePathGlobalSettingsProvider filePathGlobalSettingsProvider =
+ (FilePathGlobalSettingsProvider) globalSettingsProvider;
String settingsPath = filePathGlobalSettingsProvider.getPath();
FilePath settings;
if ((settings = ws.child(settingsPath)).exists()) {
// Global settings file residing on the agent
if (LOGGER.isLoggable(Level.FINE)) {
- mavenSettingsLog.append("Maven global settings on the build agent '").append(settingsPath).append("'");
+ mavenSettingsLog
+ .append("Maven global settings on the build agent '")
+ .append(settingsPath)
+ .append("'");
}
settings.copyTo(settingsDest);
envOverride.put("GLOBAL_MVN_SETTINGS", settingsDest.getRemote());
@@ -881,16 +1001,19 @@ private String setupGlobalSettingFile(@NonNull Collection credentia
}
return settingsDest.getRemote();
} else {
- throw new AbortException("Could not find file provided by the Jenkins global configuration '" + settings + "' on the build agent");
+ throw new AbortException("Could not find file provided by the Jenkins global configuration '" + settings
+ + "' on the build agent");
}
} else if (globalSettingsProvider instanceof DefaultGlobalSettingsProvider) {
// do nothing
if (LOGGER.isLoggable(Level.FINE)) {
- mavenSettingsLog.append("Maven global settings defined by 'DefaultSettingsProvider', NOT overriding it.");
+ mavenSettingsLog.append(
+ "Maven global settings defined by 'DefaultSettingsProvider', NOT overriding it.");
console.trace(mavenSettingsLog);
}
} else if (globalSettingsProvider == null) {
- // should not happen according to the source code of jenkins.mvn.GlobalMavenConfig.getGlobalSettingsProvider() in jenkins-core 2.7
+ // should not happen according to the source code of
+ // jenkins.mvn.GlobalMavenConfig.getGlobalSettingsProvider() in jenkins-core 2.7
// do nothing
if (LOGGER.isLoggable(Level.FINE)) {
mavenSettingsLog.append("Maven global settings are null. NO settings will be defined.");
@@ -913,47 +1036,66 @@ private String setupGlobalSettingFile(@NonNull Collection credentia
* @return the {@link FilePath} to the settings file
* @throws AbortException in case of error
*/
- private void settingsFromConfig(String mavenSettingsConfigId, FilePath mavenSettingsFile, @NonNull Collection credentials) throws AbortException {
+ private void settingsFromConfig(
+ String mavenSettingsConfigId, FilePath mavenSettingsFile, @NonNull Collection credentials)
+ throws AbortException {
Config c = ConfigFiles.getByIdOrNull(build, mavenSettingsConfigId);
if (c == null) {
- throw new AbortException("Could not find the Maven settings.xml config file id:" + mavenSettingsConfigId + ". Make sure it exists on Managed Files");
+ throw new AbortException("Could not find the Maven settings.xml config file id:" + mavenSettingsConfigId
+ + ". Make sure it exists on Managed Files");
}
if (StringUtils.isBlank(c.content)) {
- throw new AbortException("Could not create Maven settings.xml config file id:" + mavenSettingsConfigId + ". Content of the file is empty");
+ throw new AbortException("Could not create Maven settings.xml config file id:" + mavenSettingsConfigId
+ + ". Content of the file is empty");
}
MavenSettingsConfig mavenSettingsConfig;
if (c instanceof MavenSettingsConfig) {
mavenSettingsConfig = (MavenSettingsConfig) c;
} else {
- mavenSettingsConfig = new MavenSettingsConfig(c.id, c.name, c.comment, c.content, MavenSettingsConfig.isReplaceAllDefault, null);
+ mavenSettingsConfig = new MavenSettingsConfig(
+ c.id, c.name, c.comment, c.content, MavenSettingsConfig.isReplaceAllDefault, null);
}
try {
- final Map resolvedCredentialsByMavenServerId = resolveCredentials(mavenSettingsConfig.getServerCredentialMappings(), "Maven settings");
+ final Map resolvedCredentialsByMavenServerId =
+ resolveCredentials(mavenSettingsConfig.getServerCredentialMappings(), "Maven settings");
String mavenSettingsFileContent;
if (resolvedCredentialsByMavenServerId.isEmpty()) {
mavenSettingsFileContent = mavenSettingsConfig.content;
if (LOGGER.isLoggable(Level.FINE)) {
- console.trace("[withMaven] using Maven settings.xml '" + mavenSettingsConfig.id + "' with NO Maven servers credentials provided by Jenkins");
+ console.trace("[withMaven] using Maven settings.xml '" + mavenSettingsConfig.id
+ + "' with NO Maven servers credentials provided by Jenkins");
}
} else {
credentials.addAll(resolvedCredentialsByMavenServerId.values());
List tempFiles = new ArrayList<>();
- mavenSettingsFileContent = CredentialsHelper.fillAuthentication(mavenSettingsConfig.content, mavenSettingsConfig.isReplaceAll, resolvedCredentialsByMavenServerId, tempBinDir, tempFiles);
+ mavenSettingsFileContent = CredentialsHelper.fillAuthentication(
+ mavenSettingsConfig.content,
+ mavenSettingsConfig.isReplaceAll,
+ resolvedCredentialsByMavenServerId,
+ tempBinDir,
+ tempFiles);
if (LOGGER.isLoggable(Level.FINE)) {
- console.trace("[withMaven] using Maven settings.xml '" + mavenSettingsConfig.id + "' with Maven servers credentials provided by Jenkins " +
- "(replaceAll: " + mavenSettingsConfig.isReplaceAll + "): " +
- resolvedCredentialsByMavenServerId.entrySet().stream().map(new MavenServerToCredentialsMappingToStringFunction()).sorted().collect(Collectors.joining(", ")));
+ console.trace("[withMaven] using Maven settings.xml '" + mavenSettingsConfig.id
+ + "' with Maven servers credentials provided by Jenkins " + "(replaceAll: "
+ + mavenSettingsConfig.isReplaceAll + "): "
+ + resolvedCredentialsByMavenServerId.entrySet().stream()
+ .map(new MavenServerToCredentialsMappingToStringFunction())
+ .sorted()
+ .collect(Collectors.joining(", ")));
}
}
- mavenSettingsFile.write(mavenSettingsFileContent, getComputer().getDefaultCharset().name());
+ mavenSettingsFile.write(
+ mavenSettingsFileContent, getComputer().getDefaultCharset().name());
} catch (Exception e) {
- throw new IllegalStateException("Exception injecting Maven settings.xml " + mavenSettingsConfig.id +
- " during the build: " + build + ": " + e.getMessage(), e);
+ throw new IllegalStateException(
+ "Exception injecting Maven settings.xml " + mavenSettingsConfig.id + " during the build: " + build
+ + ": " + e.getMessage(),
+ e);
}
}
@@ -967,48 +1109,66 @@ private void settingsFromConfig(String mavenSettingsConfigId, FilePath mavenSett
* @return the {@link FilePath} to the settings file
* @throws AbortException in case of error
*/
- private void globalSettingsFromConfig(String mavenGlobalSettingsConfigId, FilePath mavenGlobalSettingsFile, Collection credentials) throws AbortException {
+ private void globalSettingsFromConfig(
+ String mavenGlobalSettingsConfigId, FilePath mavenGlobalSettingsFile, Collection credentials)
+ throws AbortException {
Config c = ConfigFiles.getByIdOrNull(build, mavenGlobalSettingsConfigId);
if (c == null) {
- throw new AbortException("Could not find the Maven global settings.xml config file id:" + mavenGlobalSettingsFile + ". Make sure it exists on Managed Files");
+ throw new AbortException("Could not find the Maven global settings.xml config file id:"
+ + mavenGlobalSettingsFile + ". Make sure it exists on Managed Files");
}
if (StringUtils.isBlank(c.content)) {
- throw new AbortException("Could not create Maven global settings.xml config file id:" + mavenGlobalSettingsFile + ". Content of the file is empty");
+ throw new AbortException("Could not create Maven global settings.xml config file id:"
+ + mavenGlobalSettingsFile + ". Content of the file is empty");
}
GlobalMavenSettingsConfig mavenGlobalSettingsConfig;
if (c instanceof GlobalMavenSettingsConfig) {
mavenGlobalSettingsConfig = (GlobalMavenSettingsConfig) c;
} else {
- mavenGlobalSettingsConfig = new GlobalMavenSettingsConfig(c.id, c.name, c.comment, c.content, MavenSettingsConfig.isReplaceAllDefault, null);
+ mavenGlobalSettingsConfig = new GlobalMavenSettingsConfig(
+ c.id, c.name, c.comment, c.content, MavenSettingsConfig.isReplaceAllDefault, null);
}
try {
- final Map resolvedCredentialsByMavenServerId = resolveCredentials(mavenGlobalSettingsConfig.getServerCredentialMappings(), " Global Maven settings");
+ final Map resolvedCredentialsByMavenServerId = resolveCredentials(
+ mavenGlobalSettingsConfig.getServerCredentialMappings(), " Global Maven settings");
String mavenGlobalSettingsFileContent;
if (resolvedCredentialsByMavenServerId.isEmpty()) {
mavenGlobalSettingsFileContent = mavenGlobalSettingsConfig.content;
- console.trace("[withMaven] using Maven global settings.xml '" + mavenGlobalSettingsConfig.id + "' with NO Maven servers credentials provided by Jenkins");
+ console.trace("[withMaven] using Maven global settings.xml '" + mavenGlobalSettingsConfig.id
+ + "' with NO Maven servers credentials provided by Jenkins");
} else {
credentials.addAll(resolvedCredentialsByMavenServerId.values());
List tempFiles = new ArrayList<>();
- mavenGlobalSettingsFileContent = CredentialsHelper.fillAuthentication(mavenGlobalSettingsConfig.content, mavenGlobalSettingsConfig.isReplaceAll, resolvedCredentialsByMavenServerId, tempBinDir, tempFiles);
- console.trace("[withMaven] using Maven global settings.xml '" + mavenGlobalSettingsConfig.id + "' with Maven servers credentials provided by Jenkins " +
- "(replaceAll: " + mavenGlobalSettingsConfig.isReplaceAll + "): " +
- resolvedCredentialsByMavenServerId.entrySet().stream().map(new MavenServerToCredentialsMappingToStringFunction()).sorted().collect(Collectors.joining(", ")));
-
+ mavenGlobalSettingsFileContent = CredentialsHelper.fillAuthentication(
+ mavenGlobalSettingsConfig.content,
+ mavenGlobalSettingsConfig.isReplaceAll,
+ resolvedCredentialsByMavenServerId,
+ tempBinDir,
+ tempFiles);
+ console.trace("[withMaven] using Maven global settings.xml '" + mavenGlobalSettingsConfig.id
+ + "' with Maven servers credentials provided by Jenkins " + "(replaceAll: "
+ + mavenGlobalSettingsConfig.isReplaceAll + "): "
+ + resolvedCredentialsByMavenServerId.entrySet().stream()
+ .map(new MavenServerToCredentialsMappingToStringFunction())
+ .sorted()
+ .collect(Collectors.joining(", ")));
}
-
- mavenGlobalSettingsFile.write(mavenGlobalSettingsFileContent, getComputer().getDefaultCharset().name());
- LOGGER.log(Level.FINE, "Created global config file {0}", new Object[]{mavenGlobalSettingsFile});
+ mavenGlobalSettingsFile.write(
+ mavenGlobalSettingsFileContent,
+ getComputer().getDefaultCharset().name());
+ LOGGER.log(Level.FINE, "Created global config file {0}", new Object[] {mavenGlobalSettingsFile});
} catch (Exception e) {
- throw new IllegalStateException("Exception injecting Maven settings.xml " + mavenGlobalSettingsConfig.id +
- " during the build: " + build + ": " + e.getMessage(), e);
+ throw new IllegalStateException(
+ "Exception injecting Maven settings.xml " + mavenGlobalSettingsConfig.id + " during the build: "
+ + build + ": " + e.getMessage(),
+ e);
}
}
@@ -1019,8 +1179,10 @@ private void globalSettingsFromConfig(String mavenGlobalSettingsConfigId, FilePa
* @return credentials by Maven server Id
*/
@NonNull
- public Map resolveCredentials(@Nullable final List serverCredentialMappings, String logMessagePrefix) {
- // CredentialsHelper.removeMavenServerDefinitions() requires a Map implementation that supports `null` values. `HashMap` supports `null` values, `TreeMap` doesn't
+ public Map resolveCredentials(
+ @Nullable final List serverCredentialMappings, String logMessagePrefix) {
+ // CredentialsHelper.removeMavenServerDefinitions() requires a Map implementation that supports `null` values.
+ // `HashMap` supports `null` values, `TreeMap` doesn't
// https://github.com/jenkinsci/config-file-provider-plugin/blob/config-file-provider-2.16.4/src/main/java/org/jenkinsci/plugins/configfiles/maven/security/CredentialsHelper.java#L252
Map mavenServerIdToCredentials = new HashMap<>();
if (serverCredentialMappings == null) {
@@ -1029,9 +1191,15 @@ public Map resolveCredentials(@Nullable fin
List unresolvedServerCredentialsMappings = new ArrayList<>();
for (ServerCredentialMapping serverCredentialMapping : serverCredentialMappings) {
- List domainRequirements = StringUtils.isBlank(serverCredentialMapping.getServerId()) ? Collections.emptyList(): Collections.singletonList(new MavenServerIdRequirement(serverCredentialMapping.getServerId()));
+ List domainRequirements = StringUtils.isBlank(serverCredentialMapping.getServerId())
+ ? Collections.emptyList()
+ : Collections.singletonList(new MavenServerIdRequirement(serverCredentialMapping.getServerId()));
@Nullable
- final StandardUsernameCredentials credentials = CredentialsProvider.findCredentialById(serverCredentialMapping.getCredentialsId(), StandardUsernameCredentials.class, build, domainRequirements);
+ final StandardUsernameCredentials credentials = CredentialsProvider.findCredentialById(
+ serverCredentialMapping.getCredentialsId(),
+ StandardUsernameCredentials.class,
+ build,
+ domainRequirements);
if (credentials == null) {
unresolvedServerCredentialsMappings.add(serverCredentialMapping);
@@ -1044,8 +1212,11 @@ public Map resolveCredentials(@Nullable fin
* we prefer to print a warning message rather than failing the build with an AbortException if some credentials are NOT found for backward compatibility reasons.
* The behaviour of o.j.p.configfiles.m.s.CredentialsHelper.resolveCredentials(model.Run, List, TaskListener)` is to just print a warning message
*/
- console.println("[withMaven] WARNING " + logMessagePrefix + " - Silently skip Maven server Ids with missing associated Jenkins credentials: " +
- unresolvedServerCredentialsMappings.stream().map(new ServerCredentialMappingToStringFunction()).collect(Collectors.joining(", ")));
+ console.println("[withMaven] WARNING " + logMessagePrefix
+ + " - Silently skip Maven server Ids with missing associated Jenkins credentials: "
+ + unresolvedServerCredentialsMappings.stream()
+ .map(new ServerCredentialMappingToStringFunction())
+ .collect(Collectors.joining(", ")));
}
return mavenServerIdToCredentials;
}
@@ -1069,7 +1240,7 @@ private static final class ExpanderImpl extends EnvironmentExpander {
private final Map overrides;
private ExpanderImpl(EnvVars overrides) {
- LOGGER.log(Level.FINEST, "ExpanderImpl(overrides: {0})", new Object[]{overrides});
+ LOGGER.log(Level.FINEST, "ExpanderImpl(overrides: {0})", new Object[] {overrides});
this.overrides = new HashMap<>();
for (Entry entry : overrides.entrySet()) {
this.overrides.put(entry.getKey(), entry.getValue());
@@ -1078,9 +1249,11 @@ private ExpanderImpl(EnvVars overrides) {
@Override
public void expand(EnvVars env) throws IOException, InterruptedException {
- LOGGER.log(Level.FINEST, "ExpanderImpl.expand - env before expand: {0}", new Object[]{env}); // JENKINS-40484
+ LOGGER.log(
+ Level.FINEST, "ExpanderImpl.expand - env before expand: {0}", new Object[] {env}); // JENKINS-40484
env.overrideAll(overrides);
- LOGGER.log(Level.FINEST, "ExpanderImpl.expand - env after expand: {0}", new Object[]{env}); // JENKINS-40484
+ LOGGER.log(
+ Level.FINEST, "ExpanderImpl.expand - env after expand: {0}", new Object[] {env}); // JENKINS-40484
}
}
@@ -1099,8 +1272,10 @@ private class WithMavenStepExecutionCallBack extends TailCall {
private final MavenSpyLogProcessor mavenSpyLogProcessor = new MavenSpyLogProcessor();
- private WithMavenStepExecutionCallBack(@NonNull FilePath tempBinDir, @NonNull List options,
- @NonNull MavenPublisherStrategy mavenPublisherStrategy) {
+ private WithMavenStepExecutionCallBack(
+ @NonNull FilePath tempBinDir,
+ @NonNull List options,
+ @NonNull MavenPublisherStrategy mavenPublisherStrategy) {
this.tempBinDirPath = tempBinDir.getRemote();
this.options = options;
this.mavenPublisherStrategy = mavenPublisherStrategy;
@@ -1179,7 +1354,7 @@ private Computer getComputer() throws AbortException {
LOGGER.log(Level.FINE, "Computer: {0}", computer.getName());
try {
LOGGER.log(Level.FINE, "Env: {0}", computer.getEnvironment());
- } catch (IOException | InterruptedException e) {// ignored
+ } catch (IOException | InterruptedException e) { // ignored
}
}
return computer;
@@ -1198,32 +1373,31 @@ private static FilePath tempDir(FilePath ws) {
private static class ServerCredentialMappingToStringFunction implements Function {
@Override
public String apply(ServerCredentialMapping mapping) {
- return "[mavenServerId: " + mapping.getServerId() + ", jenkinsCredentials: " + mapping.getCredentialsId() + "]";
+ return "[mavenServerId: " + mapping.getServerId() + ", jenkinsCredentials: " + mapping.getCredentialsId()
+ + "]";
}
}
/**
* ToString of the mapping mavenServerId -> Credentials
*/
- private static class MavenServerToCredentialsMappingToStringFunction implements Function, String> {
+ private static class MavenServerToCredentialsMappingToStringFunction
+ implements Function, String> {
@Override
public String apply(@Nullable Entry entry) {
- if (entry == null)
- return null;
+ if (entry == null) return null;
String mavenServerId = entry.getKey();
StandardUsernameCredentials credentials = entry.getValue();
- return "[" +
- "mavenServerId: '" + mavenServerId + "', " +
- "jenkinsCredentials: '" + credentials.getId() + "'" +
- "]";
+ return "[" + "mavenServerId: '"
+ + mavenServerId + "', " + "jenkinsCredentials: '"
+ + credentials.getId() + "'" + "]";
}
}
private static class CredentialsToPrettyString implements Function {
@Override
public String apply(@javax.annotation.Nullable Credentials credentials) {
- if (credentials == null)
- return "null";
+ if (credentials == null) return "null";
String result = ClassUtils.getShortName(credentials.getClass()) + "[";
if (credentials instanceof IdCredentials) {
diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/cause/MavenDependencyAbstractCause.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/cause/MavenDependencyAbstractCause.java
index 62b19f77..cb58021e 100644
--- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/cause/MavenDependencyAbstractCause.java
+++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/cause/MavenDependencyAbstractCause.java
@@ -1,13 +1,12 @@
package org.jenkinsci.plugins.pipeline.maven.cause;
-import hudson.model.Cause;
-import org.jenkinsci.plugins.pipeline.maven.MavenArtifact;
-
import edu.umd.cs.findbugs.annotations.NonNull;
import edu.umd.cs.findbugs.annotations.Nullable;
+import hudson.model.Cause;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
+import org.jenkinsci.plugins.pipeline.maven.MavenArtifact;
/**
* @author Cyrille Le Clerc
@@ -18,8 +17,7 @@ public abstract class MavenDependencyAbstractCause extends Cause implements Mave
private List omittedPipelineFullNames;
- public MavenDependencyAbstractCause() {
- }
+ public MavenDependencyAbstractCause() {}
public MavenDependencyAbstractCause(@Nullable List mavenArtifacts) {
this.mavenArtifacts = mavenArtifacts;
@@ -39,7 +37,6 @@ public void setMavenArtifacts(@NonNull List mavenArtifacts) {
this.mavenArtifacts = mavenArtifacts;
}
-
@NonNull
@Override
public List getOmittedPipelineFullNames() {
diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/cause/MavenDependencyCause.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/cause/MavenDependencyCause.java
index e127e082..3e2bb52c 100644
--- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/cause/MavenDependencyCause.java
+++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/cause/MavenDependencyCause.java
@@ -1,10 +1,9 @@
package org.jenkinsci.plugins.pipeline.maven.cause;
-import hudson.model.Job;
-import org.jenkinsci.plugins.pipeline.maven.MavenArtifact;
-
import edu.umd.cs.findbugs.annotations.NonNull;
+import hudson.model.Job;
import java.util.List;
+import org.jenkinsci.plugins.pipeline.maven.MavenArtifact;
/**
* @author Cyrille Le Clerc
@@ -27,5 +26,4 @@ public interface MavenDependencyCause {
void setOmittedPipelineFullNames(List omittedPipelineFullNames);
String getMavenArtifactsDescription();
-
}
diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/cause/MavenDependencyCauseHelper.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/cause/MavenDependencyCauseHelper.java
index 6e2744be..51abf1f6 100644
--- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/cause/MavenDependencyCauseHelper.java
+++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/cause/MavenDependencyCauseHelper.java
@@ -1,14 +1,13 @@
package org.jenkinsci.plugins.pipeline.maven.cause;
import com.google.common.base.Preconditions;
-import hudson.model.Cause;
-import org.jenkinsci.plugins.pipeline.maven.MavenArtifact;
-
import edu.umd.cs.findbugs.annotations.NonNull;
+import hudson.model.Cause;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
+import org.jenkinsci.plugins.pipeline.maven.MavenArtifact;
/**
* @author Cyrille Le Clerc
@@ -24,8 +23,11 @@ public static List isSameCause(MavenDependencyCause newMavenCause
return Collections.emptyList();
}
- List newCauseArtifacts = Preconditions.checkNotNull(newMavenCause.getMavenArtifacts(), "newMavenCause.mavenArtifacts should not be null");
- List oldCauseArtifacts = Preconditions.checkNotNull(((MavenDependencyCause) oldMavenCause).getMavenArtifacts(), "oldMavenCause.mavenArtifacts should not be null");
+ List newCauseArtifacts = Preconditions.checkNotNull(
+ newMavenCause.getMavenArtifacts(), "newMavenCause.mavenArtifacts should not be null");
+ List oldCauseArtifacts = Preconditions.checkNotNull(
+ ((MavenDependencyCause) oldMavenCause).getMavenArtifacts(),
+ "oldMavenCause.mavenArtifacts should not be null");
List matchingArtifacts = new ArrayList<>();
for (MavenArtifact newCauseArtifact : newCauseArtifacts) {
@@ -33,12 +35,12 @@ public static List isSameCause(MavenDependencyCause newMavenCause
// snapshot without exact version (aka base version), cannot search for same cause
} else {
for (MavenArtifact oldCauseArtifact : oldCauseArtifacts) {
- if (Objects.equals(newCauseArtifact.getGroupId(), oldCauseArtifact.getGroupId()) &&
- Objects.equals(newCauseArtifact.getArtifactId(), oldCauseArtifact.getArtifactId()) &&
- Objects.equals(newCauseArtifact.getVersion(), oldCauseArtifact.getVersion()) &&
- Objects.equals(newCauseArtifact.getBaseVersion(), oldCauseArtifact.getBaseVersion()) &&
- Objects.equals(newCauseArtifact.getClassifier(), oldCauseArtifact.getClassifier()) &&
- Objects.equals(newCauseArtifact.getType(), oldCauseArtifact.getType())) {
+ if (Objects.equals(newCauseArtifact.getGroupId(), oldCauseArtifact.getGroupId())
+ && Objects.equals(newCauseArtifact.getArtifactId(), oldCauseArtifact.getArtifactId())
+ && Objects.equals(newCauseArtifact.getVersion(), oldCauseArtifact.getVersion())
+ && Objects.equals(newCauseArtifact.getBaseVersion(), oldCauseArtifact.getBaseVersion())
+ && Objects.equals(newCauseArtifact.getClassifier(), oldCauseArtifact.getClassifier())
+ && Objects.equals(newCauseArtifact.getType(), oldCauseArtifact.getType())) {
matchingArtifacts.add(newCauseArtifact);
}
}
@@ -51,7 +53,7 @@ public static List isSameCause(MavenDependencyCause newMavenCause
public static List isSameCause(MavenDependencyCause newMavenCause, List oldMavenCauses) {
List matchingArtifacts = new ArrayList<>();
- for (Cause oldMavenCause:oldMavenCauses) {
+ for (Cause oldMavenCause : oldMavenCauses) {
matchingArtifacts.addAll(isSameCause(newMavenCause, oldMavenCause));
}
return matchingArtifacts;
diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/cause/MavenDependencyCliCause.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/cause/MavenDependencyCliCause.java
index 9e7cc72a..6f0d0c59 100644
--- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/cause/MavenDependencyCliCause.java
+++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/cause/MavenDependencyCliCause.java
@@ -3,11 +3,10 @@
import hudson.console.ModelHyperlinkNote;
import hudson.model.TaskListener;
import hudson.model.User;
-import org.jenkinsci.plugins.pipeline.maven.MavenArtifact;
-
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
+import org.jenkinsci.plugins.pipeline.maven.MavenArtifact;
/**
* @author Cyrille Le Clerc
@@ -38,8 +37,8 @@ public String getShortDescription() {
@Override
public void print(TaskListener listener) {
- listener.getLogger().println(
- "Started from command line by " + ModelHyperlinkNote.encodeTo("/user/" + startedBy, startedBy) + " for maven artifacts " + getMavenArtifactsDescription());
+ listener.getLogger()
+ .println("Started from command line by " + ModelHyperlinkNote.encodeTo("/user/" + startedBy, startedBy)
+ + " for maven artifacts " + getMavenArtifactsDescription());
}
-
}
diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/cause/MavenDependencyUpstreamCause.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/cause/MavenDependencyUpstreamCause.java
index 7692935c..42170a06 100644
--- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/cause/MavenDependencyUpstreamCause.java
+++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/cause/MavenDependencyUpstreamCause.java
@@ -1,13 +1,11 @@
package org.jenkinsci.plugins.pipeline.maven.cause;
+import edu.umd.cs.findbugs.annotations.NonNull;
+import edu.umd.cs.findbugs.annotations.Nullable;
import hudson.console.ModelHyperlinkNote;
import hudson.model.Cause;
import hudson.model.Run;
import hudson.model.TaskListener;
-import org.jenkinsci.plugins.pipeline.maven.MavenArtifact;
-
-import edu.umd.cs.findbugs.annotations.NonNull;
-import edu.umd.cs.findbugs.annotations.Nullable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
@@ -15,6 +13,7 @@
import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;
+import org.jenkinsci.plugins.pipeline.maven.MavenArtifact;
public class MavenDependencyUpstreamCause extends Cause.UpstreamCause implements MavenDependencyCause {
private List mavenArtifacts;
@@ -26,15 +25,20 @@ public MavenDependencyUpstreamCause(Run, ?> up, @NonNull MavenArtifact... mave
this.mavenArtifacts = Arrays.asList(mavenArtifact);
}
- public MavenDependencyUpstreamCause(Run, ?> up, @Nullable Collection mavenArtifacts, @Nullable Collection omittedPipelineFullNames) {
+ public MavenDependencyUpstreamCause(
+ Run, ?> up,
+ @Nullable Collection mavenArtifacts,
+ @Nullable Collection omittedPipelineFullNames) {
super(up);
this.mavenArtifacts = mavenArtifacts == null ? Collections.emptyList() : new ArrayList<>(mavenArtifacts);
- this.omittedPipelineFullNames = omittedPipelineFullNames == null ? Collections.emptyList() : new ArrayList<>(omittedPipelineFullNames);
+ this.omittedPipelineFullNames =
+ omittedPipelineFullNames == null ? Collections.emptyList() : new ArrayList<>(omittedPipelineFullNames);
}
@Override
public String getShortDescription() {
- return "Started by upstream build \"" + getUpstreamProject() + "\" #" + getUpstreamBuild() + " generating Maven artifacts: " + getMavenArtifactsDescription();
+ return "Started by upstream build \"" + getUpstreamProject() + "\" #" + getUpstreamBuild()
+ + " generating Maven artifacts: " + getMavenArtifactsDescription();
}
/**
@@ -72,12 +76,18 @@ private void print(TaskListener listener, int depth) {
Run, ?> upstreamRun = getUpstreamRun();
if (upstreamRun == null) {
- listener.getLogger().println("Started by upstream build " + ModelHyperlinkNote.encodeTo('/' + getUpstreamUrl(), getUpstreamProject()) +
- "\" #" + ModelHyperlinkNote.encodeTo('/' + getUpstreamUrl() + getUpstreamBuild(), Integer.toString(getUpstreamBuild())) +
- " generating Maven artifact: " + getMavenArtifactsDescription());
+ listener.getLogger()
+ .println("Started by upstream build "
+ + ModelHyperlinkNote.encodeTo('/' + getUpstreamUrl(), getUpstreamProject()) + "\" #"
+ + ModelHyperlinkNote.encodeTo(
+ '/' + getUpstreamUrl() + getUpstreamBuild(), Integer.toString(getUpstreamBuild()))
+ + " generating Maven artifact: "
+ + getMavenArtifactsDescription());
} else {
- listener.getLogger().println("Started by upstream build " +
- ModelHyperlinkNote.encodeTo('/' + upstreamRun.getUrl(), upstreamRun.getFullDisplayName()) + " generating Maven artifacts: " + getMavenArtifactsDescription());
+ listener.getLogger()
+ .println("Started by upstream build "
+ + ModelHyperlinkNote.encodeTo('/' + upstreamRun.getUrl(), upstreamRun.getFullDisplayName())
+ + " generating Maven artifacts: " + getMavenArtifactsDescription());
}
if (getUpstreamCauses() != null && !getUpstreamCauses().isEmpty()) {
diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/cause/OtherMavenDependencyCause.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/cause/OtherMavenDependencyCause.java
index 66ce7ca5..f00e6ebb 100644
--- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/cause/OtherMavenDependencyCause.java
+++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/cause/OtherMavenDependencyCause.java
@@ -1,17 +1,17 @@
package org.jenkinsci.plugins.pipeline.maven.cause;
-import org.jenkinsci.plugins.pipeline.maven.MavenArtifact;
-
import edu.umd.cs.findbugs.annotations.NonNull;
import edu.umd.cs.findbugs.annotations.Nullable;
import java.util.List;
import java.util.Objects;
+import org.jenkinsci.plugins.pipeline.maven.MavenArtifact;
/**
* @author Cyrille Le Clerc
*/
public class OtherMavenDependencyCause extends MavenDependencyAbstractCause {
final String shortDescription;
+
public OtherMavenDependencyCause(@NonNull String shortDescription) {
super();
this.shortDescription = Objects.requireNonNull(shortDescription);
diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/cli/TriggerDownstreamPipelinesCommand.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/cli/TriggerDownstreamPipelinesCommand.java
index 13ef9608..9c8f0af1 100644
--- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/cli/TriggerDownstreamPipelinesCommand.java
+++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/cli/TriggerDownstreamPipelinesCommand.java
@@ -2,6 +2,7 @@
import hudson.Extension;
import hudson.cli.CLICommand;
+import java.util.Collection;
import jenkins.model.Jenkins;
import org.jenkinsci.plugins.pipeline.maven.GlobalPipelineMavenConfig;
import org.jenkinsci.plugins.pipeline.maven.cause.MavenDependencyCliCause;
@@ -9,8 +10,6 @@
import org.jenkinsci.plugins.pipeline.maven.service.ServiceLoggerImpl;
import org.kohsuke.args4j.Option;
-import java.util.Collection;
-
/**
* @author Cyrille Le Clerc
*/
@@ -18,12 +17,25 @@
public class TriggerDownstreamPipelinesCommand extends CLICommand {
@Option(name = "--groupId", aliases = "-g", usage = "Group ID", required = true)
public String groupId;
+
@Option(name = "--artifactId", aliases = "-a", usage = "Artifact ID", required = true)
public String artifactId;
- @Option(name = "--version", aliases = "-v", usage = "Artifact version (e.g. '1.0-SNAPSHOT' is just built locally or '1.0-20100529-1213' when a SNAPSHOT artifact is deployed to a Maven repository or '1.0' for a released version", required = true)
+
+ @Option(
+ name = "--version",
+ aliases = "-v",
+ usage =
+ "Artifact version (e.g. '1.0-SNAPSHOT' is just built locally or '1.0-20100529-1213' when a SNAPSHOT artifact is deployed to a Maven repository or '1.0' for a released version",
+ required = true)
public String version;
- @Option(name = "--base-version", aliases = "-bv", usage = "Artifact base version (e.g. '1.0-SNAPSHOT'). The base version is different from the '--version' that provides the timestamped version number when uploading snapshots to Maven repository")
+
+ @Option(
+ name = "--base-version",
+ aliases = "-bv",
+ usage =
+ "Artifact base version (e.g. '1.0-SNAPSHOT'). The base version is different from the '--version' that provides the timestamped version number when uploading snapshots to Maven repository")
public String baseVersion;
+
@Option(name = "--type", aliases = "-t", usage = "Artifact type", required = true)
public String type;
@@ -32,16 +44,24 @@ public String getShortDescription() {
return "Triggers the downstream pipelines of the given Maven artifact based on their Maven dependencies";
}
-
@Override
protected int run() throws Exception {
/*
* @Inject does NOT work to inject GlobalPipelineMavenConfig in the TriggerDownstreamPipelinesCommand instance, use static code :-(
*/
- PipelineTriggerService pipelineTriggerService = GlobalPipelineMavenConfig.get().getPipelineTriggerService();
+ PipelineTriggerService pipelineTriggerService =
+ GlobalPipelineMavenConfig.get().getPipelineTriggerService();
- MavenDependencyCliCause cause = new MavenDependencyCliCause(Jenkins.getAuthentication().getName());
- Collection triggeredPipelines = pipelineTriggerService.triggerDownstreamPipelines(groupId, artifactId, baseVersion, version, type, cause, new ServiceLoggerImpl(this.stdout, this.stderr, null));
+ MavenDependencyCliCause cause =
+ new MavenDependencyCliCause(Jenkins.getAuthentication().getName());
+ Collection triggeredPipelines = pipelineTriggerService.triggerDownstreamPipelines(
+ groupId,
+ artifactId,
+ baseVersion,
+ version,
+ type,
+ cause,
+ new ServiceLoggerImpl(this.stdout, this.stderr, null));
stdout.println(triggeredPipelines);
return 0;
}
diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/console/MaskPasswordsConsoleLogFilter.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/console/MaskPasswordsConsoleLogFilter.java
index d69a7228..cff4a94e 100644
--- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/console/MaskPasswordsConsoleLogFilter.java
+++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/console/MaskPasswordsConsoleLogFilter.java
@@ -4,12 +4,10 @@
import com.cloudbees.plugins.credentials.Credentials;
import com.cloudbees.plugins.credentials.common.PasswordCredentials;
import com.cloudbees.plugins.credentials.common.UsernamePasswordCredentials;
-
+import edu.umd.cs.findbugs.annotations.NonNull;
import hudson.console.ConsoleLogFilter;
import hudson.model.Run;
import hudson.util.Secret;
-
-import edu.umd.cs.findbugs.annotations.NonNull;
import java.io.IOException;
import java.io.OutputStream;
import java.io.Serializable;
@@ -27,23 +25,26 @@
*/
public class MaskPasswordsConsoleLogFilter extends ConsoleLogFilter implements Serializable {
private static final long serialVersionUID = 1;
- private final static Logger LOGGER = Logger.getLogger(MaskPasswordsConsoleLogFilter.class.getName());
+ private static final Logger LOGGER = Logger.getLogger(MaskPasswordsConsoleLogFilter.class.getName());
private final Secret secretsAsRegexp;
private final String charsetName;
public MaskPasswordsConsoleLogFilter(@NonNull Collection secrets, @NonNull String charsetName) {
- this.secretsAsRegexp = Secret.fromString(SecretPatterns.getAggregateSecretPattern(secrets).toString());
+ this.secretsAsRegexp = Secret.fromString(
+ SecretPatterns.getAggregateSecretPattern(secrets).toString());
this.charsetName = charsetName;
}
@Override
public OutputStream decorateLogger(Run build, final OutputStream logger) throws IOException, InterruptedException {
- return new SecretPatterns.MaskingOutputStream(logger, () -> Pattern.compile(secretsAsRegexp.getPlainText()), charsetName);
+ return new SecretPatterns.MaskingOutputStream(
+ logger, () -> Pattern.compile(secretsAsRegexp.getPlainText()), charsetName);
}
@NonNull
- public static MaskPasswordsConsoleLogFilter newMaskPasswordsConsoleLogFilter(@NonNull Iterable credentials, @NonNull Charset charset){
+ public static MaskPasswordsConsoleLogFilter newMaskPasswordsConsoleLogFilter(
+ @NonNull Iterable credentials, @NonNull Charset charset) {
Collection secrets = toString(credentials);
return new MaskPasswordsConsoleLogFilter(secrets, charset.name());
}
@@ -69,7 +70,9 @@ protected static Collection toString(@NonNull Iterable cred
}
// omit the private key, there
} else {
- LOGGER.log(Level.FINE, "Skip masking of unsupported credentials type {0}: {1}", new Object[]{creds.getClass(), creds.getDescriptor().getDisplayName()});
+ LOGGER.log(Level.FINE, "Skip masking of unsupported credentials type {0}: {1}", new Object[] {
+ creds.getClass(), creds.getDescriptor().getDisplayName()
+ });
}
}
return result;
diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/console/MavenColorizerConsoleLogFilter.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/console/MavenColorizerConsoleLogFilter.java
index 4c8840a5..d1c5fe7f 100644
--- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/console/MavenColorizerConsoleLogFilter.java
+++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/console/MavenColorizerConsoleLogFilter.java
@@ -2,7 +2,6 @@
import hudson.console.ConsoleLogFilter;
import hudson.model.Run;
-
import java.io.IOException;
import java.io.OutputStream;
import java.io.Serializable;
@@ -28,8 +27,7 @@ private Object readResolve() {
}
@Override
- public OutputStream decorateLogger(Run run, final OutputStream logger)
- throws IOException, InterruptedException {
+ public OutputStream decorateLogger(Run run, final OutputStream logger) throws IOException, InterruptedException {
return new MavenConsoleAnnotator(logger, Charset.forName(charset), notes);
}
}
diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/console/MavenConsoleAnnotator.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/console/MavenConsoleAnnotator.java
index ddf5d80f..98b3accf 100644
--- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/console/MavenConsoleAnnotator.java
+++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/console/MavenConsoleAnnotator.java
@@ -28,8 +28,6 @@
import hudson.tasks._maven.MavenErrorNote;
import hudson.tasks._maven.MavenMojoNote;
import hudson.tasks._maven.MavenWarningNote;
-import jenkins.util.JenkinsJVM;
-
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStream;
@@ -37,6 +35,7 @@
import java.nio.charset.Charset;
import java.util.regex.Matcher;
import java.util.stream.Stream;
+import jenkins.util.JenkinsJVM;
// adapted from version in hudson.tasks._maven
@@ -47,15 +46,17 @@ class MavenConsoleAnnotator extends LineTransformationOutputStream {
static byte[][] createNotes() {
JenkinsJVM.checkJenkinsJVM();
- return Stream.of(new MavenMojoNote(), new Maven3MojoNote(), new MavenWarningNote(), new MavenErrorNote()).map(note -> {
- ByteArrayOutputStream baos = new ByteArrayOutputStream();
- try {
- note.encodeTo(baos);
- } catch (IOException x) { // should be impossible
- throw new RuntimeException(x);
- }
- return baos.toByteArray();
- }).toArray(byte[][]::new);
+ return Stream.of(new MavenMojoNote(), new Maven3MojoNote(), new MavenWarningNote(), new MavenErrorNote())
+ .map(note -> {
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ try {
+ note.encodeTo(baos);
+ } catch (IOException x) { // should be impossible
+ throw new RuntimeException(x);
+ }
+ return baos.toByteArray();
+ })
+ .toArray(byte[][]::new);
}
private final OutputStream out;
@@ -96,7 +97,7 @@ protected void eol(byte[] b, int len) throws IOException {
out.write(notes[3]);
}
- out.write(b,0,len);
+ out.write(b, 0, len);
}
@Override
@@ -109,5 +110,4 @@ public void close() throws IOException {
super.close();
out.close();
}
-
}
diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/listeners/AbstractWorkflowRunListener.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/listeners/AbstractWorkflowRunListener.java
index b6ea8d22..1697bf19 100644
--- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/listeners/AbstractWorkflowRunListener.java
+++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/listeners/AbstractWorkflowRunListener.java
@@ -4,12 +4,11 @@
import static java.util.stream.StreamSupport.stream;
import static org.jenkinsci.plugins.pipeline.maven.WithMavenStep.DescriptorImpl.FUNCTION_NAME;
-import org.jenkinsci.plugins.workflow.flow.FlowExecutionOwner;
-import org.jenkinsci.plugins.workflow.graphanalysis.DepthFirstScanner;
-
import hudson.model.Run;
import hudson.model.TaskListener;
import hudson.model.listeners.RunListener;
+import org.jenkinsci.plugins.workflow.flow.FlowExecutionOwner;
+import org.jenkinsci.plugins.workflow.graphanalysis.DepthFirstScanner;
public abstract class AbstractWorkflowRunListener extends RunListener> {
@@ -17,13 +16,16 @@ protected boolean shouldRun(Run, ?> run, TaskListener listener) {
if (!(run instanceof FlowExecutionOwner.Executable)) {
return false;
}
-
+
return ofNullable(((FlowExecutionOwner.Executable) run).asFlowExecutionOwner())
.map(owner -> {
try {
return owner.get();
} catch (Exception ex) {
- listener.getLogger().println("[withMaven] downstreamPipelineTriggerRunListener - Failure to introspect build steps: " + ex.toString());
+ listener.getLogger()
+ .println(
+ "[withMaven] downstreamPipelineTriggerRunListener - Failure to introspect build steps: "
+ + ex.toString());
return null;
}
})
@@ -33,7 +35,8 @@ protected boolean shouldRun(Run, ?> run, TaskListener listener) {
})
.map(scanner -> scanner.spliterator())
.map(iterator -> stream(iterator, false))
- .flatMap(stream -> stream.filter(n -> FUNCTION_NAME.equals(n.getDisplayFunctionName())).findAny())
+ .flatMap(stream -> stream.filter(n -> FUNCTION_NAME.equals(n.getDisplayFunctionName()))
+ .findAny())
.isPresent();
}
}
diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/listeners/DaoHelper.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/listeners/DaoHelper.java
index 9f9a93bf..d9c4d5ae 100644
--- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/listeners/DaoHelper.java
+++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/listeners/DaoHelper.java
@@ -2,6 +2,9 @@
import static org.jenkinsci.plugins.pipeline.maven.dao.MonitoringPipelineMavenPluginDaoDecorator.registerCacheStatsSupplier;
+import edu.umd.cs.findbugs.annotations.NonNull;
+import hudson.model.Item;
+import hudson.model.Run;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@@ -9,19 +12,13 @@
import java.util.concurrent.atomic.AtomicInteger;
import java.util.logging.Level;
import java.util.logging.Logger;
-
-import edu.umd.cs.findbugs.annotations.NonNull;
-
import org.jenkinsci.plugins.pipeline.maven.GlobalPipelineMavenConfig;
import org.jenkinsci.plugins.pipeline.maven.MavenArtifact;
import org.jenkinsci.plugins.pipeline.maven.dao.CacheStats;
-import hudson.model.Item;
-import hudson.model.Run;
-
public class DaoHelper {
- private final static Logger LOGGER = Logger.getLogger(DownstreamPipelineTriggerRunListener.class.getName());
+ private static final Logger LOGGER = Logger.getLogger(DownstreamPipelineTriggerRunListener.class.getName());
private static final AtomicInteger GET_GENERATED_ARTIFACTS_HITS = new AtomicInteger();
private static final AtomicInteger GET_GENERATED_ARTIFACTS_MISSES = new AtomicInteger();
@@ -30,8 +27,10 @@ public class DaoHelper {
private static final AtomicInteger LIST_DOWNSTREAM_JOBS_MISSES = new AtomicInteger();
static {
- registerCacheStatsSupplier(() -> new CacheStats("getGeneratedArtifacts", GET_GENERATED_ARTIFACTS_HITS.get(), GET_GENERATED_ARTIFACTS_MISSES.get()));
- registerCacheStatsSupplier(() -> new CacheStats("listDownstreamJobsByArtifact", LIST_DOWNSTREAM_JOBS_HITS.get(), LIST_DOWNSTREAM_JOBS_MISSES.get()));
+ registerCacheStatsSupplier(() -> new CacheStats(
+ "getGeneratedArtifacts", GET_GENERATED_ARTIFACTS_HITS.get(), GET_GENERATED_ARTIFACTS_MISSES.get()));
+ registerCacheStatsSupplier(() -> new CacheStats(
+ "listDownstreamJobsByArtifact", LIST_DOWNSTREAM_JOBS_HITS.get(), LIST_DOWNSTREAM_JOBS_MISSES.get()));
}
private GlobalPipelineMavenConfig globalPipelineMavenConfig;
@@ -57,17 +56,21 @@ public DaoHelper(GlobalPipelineMavenConfig globalPipelineMavenConfig) {
List getGeneratedArtifacts(@NonNull String jobFullName, int buildNumber) {
String key = jobFullName + '#' + buildNumber;
- LOGGER.log(Level.FINER, "calling getGeneratedArtifacts {0} {1}, cache size: {2}",
- new Object[] { jobFullName, buildNumber, generatedArtifactsCache.size() });
+ LOGGER.log(Level.FINER, "calling getGeneratedArtifacts {0} {1}, cache size: {2}", new Object[] {
+ jobFullName, buildNumber, generatedArtifactsCache.size()
+ });
if (generatedArtifactsCache.containsKey(key)) {
- LOGGER.log(Level.FINER, "cache hit for getGeneratedArtifacts {0} {1}", new Object[] { jobFullName, buildNumber });
+ LOGGER.log(
+ Level.FINER, "cache hit for getGeneratedArtifacts {0} {1}", new Object[] {jobFullName, buildNumber
+ });
GET_GENERATED_ARTIFACTS_HITS.incrementAndGet();
} else {
GET_GENERATED_ARTIFACTS_MISSES.incrementAndGet();
}
- return generatedArtifactsCache.computeIfAbsent(key, k -> globalPipelineMavenConfig.getDao().getGeneratedArtifacts(jobFullName, buildNumber));
+ return generatedArtifactsCache.computeIfAbsent(
+ key, k -> globalPipelineMavenConfig.getDao().getGeneratedArtifacts(jobFullName, buildNumber));
}
/**
@@ -87,11 +90,14 @@ List getGeneratedArtifacts(@NonNull String jobFullName, int build
Map> listDownstreamJobsByArtifact(String jobFullName, int buildNumber) {
String key = jobFullName + '#' + buildNumber;
if (downstreamJobsByArtifact.containsKey(key)) {
- LOGGER.log(Level.FINER, "cache hit for listDownstreamJobsByArtifact {0} {1}", new Object[] { jobFullName, buildNumber });
+ LOGGER.log(Level.FINER, "cache hit for listDownstreamJobsByArtifact {0} {1}", new Object[] {
+ jobFullName, buildNumber
+ });
LIST_DOWNSTREAM_JOBS_HITS.incrementAndGet();
} else {
LIST_DOWNSTREAM_JOBS_MISSES.incrementAndGet();
}
- return downstreamJobsByArtifact.computeIfAbsent(key, k -> globalPipelineMavenConfig.getDao().listDownstreamJobsByArtifact(jobFullName, buildNumber));
+ return downstreamJobsByArtifact.computeIfAbsent(
+ key, k -> globalPipelineMavenConfig.getDao().listDownstreamJobsByArtifact(jobFullName, buildNumber));
}
}
diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/listeners/DatabaseSyncItemListener.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/listeners/DatabaseSyncItemListener.java
index 7af24b2e..110ae6f9 100644
--- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/listeners/DatabaseSyncItemListener.java
+++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/listeners/DatabaseSyncItemListener.java
@@ -4,14 +4,13 @@
import hudson.model.Item;
import hudson.model.ItemGroup;
import hudson.model.listeners.ItemListener;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+import javax.inject.Inject;
import jenkins.model.Jenkins;
import org.jenkinsci.plugins.pipeline.maven.GlobalPipelineMavenConfig;
import org.jenkinsci.plugins.workflow.flow.BlockableResume;
-import javax.inject.Inject;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
/**
* Maintains the database in sync with the jobs and builds.
*
@@ -19,7 +18,7 @@
*/
@Extension
public class DatabaseSyncItemListener extends ItemListener {
- private final static Logger LOGGER = Logger.getLogger(DatabaseSyncItemListener.class.getName());
+ private static final Logger LOGGER = Logger.getLogger(DatabaseSyncItemListener.class.getName());
@Inject
public GlobalPipelineMavenConfig globalPipelineMavenConfig;
@@ -30,14 +29,14 @@ public void onDeleted(Item item) {
LOGGER.log(Level.FINE, "onDeleted({0})", item);
globalPipelineMavenConfig.getDao().deleteJob(item.getFullName());
} else {
- LOGGER.log(Level.FINE, "Ignore onDeleted({0})", new Object[]{item});
+ LOGGER.log(Level.FINE, "Ignore onDeleted({0})", new Object[] {item});
}
}
@Override
public void onRenamed(Item item, String oldName, String newName) {
if (item instanceof BlockableResume) {
- LOGGER.log(Level.FINE, "onRenamed({0}, {1}, {2})", new Object[]{item, oldName, newName});
+ LOGGER.log(Level.FINE, "onRenamed({0}, {1}, {2})", new Object[] {item, oldName, newName});
String oldFullName;
ItemGroup parent = item.getParent();
@@ -49,18 +48,19 @@ public void onRenamed(Item item, String oldName, String newName) {
String newFullName = item.getFullName();
globalPipelineMavenConfig.getDao().renameJob(oldFullName, newFullName);
} else {
- LOGGER.log(Level.FINE, "Ignore onRenamed({0}, {1}, {2})", new Object[]{item, oldName, newName});
+ LOGGER.log(Level.FINE, "Ignore onRenamed({0}, {1}, {2})", new Object[] {item, oldName, newName});
}
}
@Override
public void onLocationChanged(Item item, String oldFullName, String newFullName) {
if (item instanceof BlockableResume) {
- LOGGER.log(Level.FINE, "onLocationChanged({0}, {1}, {2})", new Object[]{item, oldFullName, newFullName});
+ LOGGER.log(Level.FINE, "onLocationChanged({0}, {1}, {2})", new Object[] {item, oldFullName, newFullName});
globalPipelineMavenConfig.getDao().renameJob(oldFullName, newFullName);
} else {
- LOGGER.log(Level.FINE, "Ignore onLocationChanged({0}, {1}, {2})", new Object[]{item, oldFullName, newFullName});
+ LOGGER.log(
+ Level.FINE, "Ignore onLocationChanged({0}, {1}, {2})", new Object[] {item, oldFullName, newFullName
+ });
}
}
-
}
diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/listeners/DatabaseSyncRunListener.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/listeners/DatabaseSyncRunListener.java
index a45846f2..6a07276c 100644
--- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/listeners/DatabaseSyncRunListener.java
+++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/listeners/DatabaseSyncRunListener.java
@@ -1,14 +1,13 @@
package org.jenkinsci.plugins.pipeline.maven.listeners;
+import edu.umd.cs.findbugs.annotations.NonNull;
import hudson.Extension;
import hudson.model.Cause;
import hudson.model.Result;
import hudson.model.Run;
import hudson.model.TaskListener;
-import org.jenkinsci.plugins.pipeline.maven.GlobalPipelineMavenConfig;
-
-import edu.umd.cs.findbugs.annotations.NonNull;
import javax.inject.Inject;
+import org.jenkinsci.plugins.pipeline.maven.GlobalPipelineMavenConfig;
/**
* @author Cyrille Le Clerc
@@ -28,16 +27,21 @@ public void onDeleted(Run, ?> run) {
public void onInitialize(Run, ?> run) {
super.onInitialize(run);
- for (Cause cause: run.getCauses()) {
+ for (Cause cause : run.getCauses()) {
if (cause instanceof Cause.UpstreamCause) {
Cause.UpstreamCause upstreamCause = (Cause.UpstreamCause) cause;
String upstreamJobName = upstreamCause.getUpstreamProject();
int upstreamBuildNumber = upstreamCause.getUpstreamBuild();
- globalPipelineMavenConfig.getDao().recordBuildUpstreamCause(upstreamJobName, upstreamBuildNumber, run.getParent().getFullName(), run.getNumber());
+ globalPipelineMavenConfig
+ .getDao()
+ .recordBuildUpstreamCause(
+ upstreamJobName,
+ upstreamBuildNumber,
+ run.getParent().getFullName(),
+ run.getNumber());
}
}
-
}
/*
@@ -56,11 +60,15 @@ public void onCompleted(Run, ?> workflowRun, @NonNull TaskListener listener) {
if (result == null) {
result = Result.SUCCESS; // FIXME more elegant handling
}
- globalPipelineMavenConfig.getDao().updateBuildOnCompletion(
- workflowRun.getParent().getFullName(),
- workflowRun.getNumber(),
- result.ordinal,
- workflowRun.getStartTimeInMillis(),
- Math.max(System.currentTimeMillis() - workflowRun.getStartTimeInMillis(), 0)); // @see HUDSON-5844
+ globalPipelineMavenConfig
+ .getDao()
+ .updateBuildOnCompletion(
+ workflowRun.getParent().getFullName(),
+ workflowRun.getNumber(),
+ result.ordinal,
+ workflowRun.getStartTimeInMillis(),
+ Math.max(
+ System.currentTimeMillis() - workflowRun.getStartTimeInMillis(),
+ 0)); // @see HUDSON-5844
}
}
diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/listeners/DownstreamPipelineTriggerRunListener.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/listeners/DownstreamPipelineTriggerRunListener.java
index 4292b723..9e2ec145 100644
--- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/listeners/DownstreamPipelineTriggerRunListener.java
+++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/listeners/DownstreamPipelineTriggerRunListener.java
@@ -1,29 +1,15 @@
package org.jenkinsci.plugins.pipeline.maven.listeners;
+import edu.umd.cs.findbugs.annotations.NonNull;
import hudson.Extension;
import hudson.console.ModelHyperlinkNote;
import hudson.model.Cause;
import hudson.model.CauseAction;
import hudson.model.Job;
import hudson.model.Queue;
+import hudson.model.Queue.Task;
import hudson.model.Run;
import hudson.model.TaskListener;
-import hudson.model.Queue.Task;
-import jenkins.model.Jenkins;
-import jenkins.model.ParameterizedJobMixIn;
-import org.jenkinsci.plugins.pipeline.maven.GlobalPipelineMavenConfig;
-import org.jenkinsci.plugins.pipeline.maven.MavenArtifact;
-import org.jenkinsci.plugins.pipeline.maven.cause.MavenDependencyAbstractCause;
-import org.jenkinsci.plugins.pipeline.maven.cause.MavenDependencyCause;
-import org.jenkinsci.plugins.pipeline.maven.cause.MavenDependencyCauseHelper;
-import org.jenkinsci.plugins.pipeline.maven.cause.MavenDependencyUpstreamCause;
-import org.jenkinsci.plugins.pipeline.maven.cause.OtherMavenDependencyCause;
-import org.jenkinsci.plugins.pipeline.maven.dao.UpstreamMemory;
-import org.jenkinsci.plugins.pipeline.maven.trigger.WorkflowJobDependencyTrigger;
-
-import edu.umd.cs.findbugs.annotations.NonNull;
-import javax.inject.Inject;
-
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
@@ -39,6 +25,18 @@
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.stream.Collectors;
+import javax.inject.Inject;
+import jenkins.model.Jenkins;
+import jenkins.model.ParameterizedJobMixIn;
+import org.jenkinsci.plugins.pipeline.maven.GlobalPipelineMavenConfig;
+import org.jenkinsci.plugins.pipeline.maven.MavenArtifact;
+import org.jenkinsci.plugins.pipeline.maven.cause.MavenDependencyAbstractCause;
+import org.jenkinsci.plugins.pipeline.maven.cause.MavenDependencyCause;
+import org.jenkinsci.plugins.pipeline.maven.cause.MavenDependencyCauseHelper;
+import org.jenkinsci.plugins.pipeline.maven.cause.MavenDependencyUpstreamCause;
+import org.jenkinsci.plugins.pipeline.maven.cause.OtherMavenDependencyCause;
+import org.jenkinsci.plugins.pipeline.maven.dao.UpstreamMemory;
+import org.jenkinsci.plugins.pipeline.maven.trigger.WorkflowJobDependencyTrigger;
/**
* Trigger downstream pipelines.
@@ -48,81 +46,115 @@
@Extension
public class DownstreamPipelineTriggerRunListener extends AbstractWorkflowRunListener {
- private final static Logger LOGGER = Logger.getLogger(DownstreamPipelineTriggerRunListener.class.getName());
+ private static final Logger LOGGER = Logger.getLogger(DownstreamPipelineTriggerRunListener.class.getName());
@Inject
public GlobalPipelineMavenConfig globalPipelineMavenConfig;
@Override
public void onCompleted(Run, ?> upstreamBuild, @NonNull TaskListener listener) {
- LOGGER.log(Level.FINER, "onCompleted({0})", new Object[]{upstreamBuild});
+ LOGGER.log(Level.FINER, "onCompleted({0})", new Object[] {upstreamBuild});
UpstreamMemory upstreamMemory = new UpstreamMemory();
DaoHelper daoHelper = new DaoHelper(globalPipelineMavenConfig);
if (!shouldRun(upstreamBuild, listener)) {
- LOGGER.log(Level.FINE, "Skipping downstream pipeline triggering for {0} as withMaven step not found.",
- new Object[]{upstreamBuild});
+ LOGGER.log(
+ Level.FINE,
+ "Skipping downstream pipeline triggering for {0} as withMaven step not found.",
+ new Object[] {upstreamBuild});
return;
}
long startTimeInNanos = System.nanoTime();
- if(LOGGER.isLoggable(Level.FINER)) {
+ if (LOGGER.isLoggable(Level.FINER)) {
listener.getLogger().println("[withMaven] pipelineGraphPublisher - triggerDownstreamPipelines");
}
- if (!globalPipelineMavenConfig.getTriggerDownstreamBuildsResultsCriteria().contains(upstreamBuild.getResult())) {
- Map> omittedPipelineFullNamesAndCauses = new HashMap<>();
- for (Cause cause: upstreamBuild.getCauses()) {
+ if (!globalPipelineMavenConfig
+ .getTriggerDownstreamBuildsResultsCriteria()
+ .contains(upstreamBuild.getResult())) {
+ Map> omittedPipelineFullNamesAndCauses = new HashMap<>();
+ for (Cause cause : upstreamBuild.getCauses()) {
if (cause instanceof MavenDependencyCause) {
MavenDependencyCause mavenDependencyCause = (MavenDependencyCause) cause;
- for (String omittedPipelineFullName: mavenDependencyCause.getOmittedPipelineFullNames()) {
- omittedPipelineFullNamesAndCauses.computeIfAbsent(omittedPipelineFullName, p-> new ArrayList<>()).add(mavenDependencyCause);
+ for (String omittedPipelineFullName : mavenDependencyCause.getOmittedPipelineFullNames()) {
+ omittedPipelineFullNamesAndCauses
+ .computeIfAbsent(omittedPipelineFullName, p -> new ArrayList<>())
+ .add(mavenDependencyCause);
}
}
}
if (omittedPipelineFullNamesAndCauses.isEmpty()) {
if (LOGGER.isLoggable(Level.FINER)) {
- listener.getLogger().println("[withMaven] Skip triggering downstream jobs for upstream build with ignored result status " + upstreamBuild + ": " + upstreamBuild.getResult());
+ listener.getLogger()
+ .println(
+ "[withMaven] Skip triggering downstream jobs for upstream build with ignored result status "
+ + upstreamBuild + ": " + upstreamBuild.getResult());
}
} else {
- for (Map.Entry> entry: omittedPipelineFullNamesAndCauses.entrySet()) {
+ for (Map.Entry> entry :
+ omittedPipelineFullNamesAndCauses.entrySet()) {
Job omittedPipeline = Jenkins.get().getItemByFullName(entry.getKey(), Job.class);
if (omittedPipeline == null) {
- listener.getLogger().println("[withMaven] downstreamPipelineTriggerRunListener - Illegal state: " + entry.getKey() + " not resolved");
+ listener.getLogger()
+ .println("[withMaven] downstreamPipelineTriggerRunListener - Illegal state: "
+ + entry.getKey() + " not resolved");
continue;
}
List omittedPipelineTriggerCauses = new ArrayList<>();
- for (MavenDependencyCause cause: entry.getValue()) {
+ for (MavenDependencyCause cause : entry.getValue()) {
if (cause instanceof MavenDependencyUpstreamCause) {
- MavenDependencyUpstreamCause mavenDependencyUpstreamCause = (MavenDependencyUpstreamCause) cause;
- Run, ?> upstreamRun = mavenDependencyUpstreamCause.getUpstreamRun() == null ? upstreamBuild: mavenDependencyUpstreamCause.getUpstreamRun();
- omittedPipelineTriggerCauses.add(new MavenDependencyUpstreamCause(upstreamRun, mavenDependencyUpstreamCause.getMavenArtifacts(), Collections.emptyList()));
+ MavenDependencyUpstreamCause mavenDependencyUpstreamCause =
+ (MavenDependencyUpstreamCause) cause;
+ Run, ?> upstreamRun = mavenDependencyUpstreamCause.getUpstreamRun() == null
+ ? upstreamBuild
+ : mavenDependencyUpstreamCause.getUpstreamRun();
+ omittedPipelineTriggerCauses.add(new MavenDependencyUpstreamCause(
+ upstreamRun,
+ mavenDependencyUpstreamCause.getMavenArtifacts(),
+ Collections.emptyList()));
} else if (cause instanceof MavenDependencyAbstractCause) {
try {
- MavenDependencyCause mavenDependencyCause = ((MavenDependencyAbstractCause)cause).clone();
+ MavenDependencyCause mavenDependencyCause =
+ ((MavenDependencyAbstractCause) cause).clone();
mavenDependencyCause.setOmittedPipelineFullNames(Collections.emptyList());
omittedPipelineTriggerCauses.add((Cause) mavenDependencyCause);
} catch (CloneNotSupportedException e) {
- listener.getLogger().println("[withMaven] downstreamPipelineTriggerRunListener - Failure to clone pipeline cause " + cause + " : " + e);
- omittedPipelineTriggerCauses.add(new OtherMavenDependencyCause(((MavenDependencyAbstractCause) cause).getShortDescription()));
+ listener.getLogger()
+ .println(
+ "[withMaven] downstreamPipelineTriggerRunListener - Failure to clone pipeline cause "
+ + cause + " : " + e);
+ omittedPipelineTriggerCauses.add(new OtherMavenDependencyCause(
+ ((MavenDependencyAbstractCause) cause).getShortDescription()));
}
} else {
- omittedPipelineTriggerCauses.add(new OtherMavenDependencyCause(((MavenDependencyAbstractCause) cause).getShortDescription()));
+ omittedPipelineTriggerCauses.add(new OtherMavenDependencyCause(
+ ((MavenDependencyAbstractCause) cause).getShortDescription()));
}
}
// TODO deduplicate pipeline triggers
// See jenkins.triggers.ReverseBuildTrigger.RunListenerImpl.onCompleted(Run, TaskListener)
- Queue.Item queuedItem = ParameterizedJobMixIn.scheduleBuild2(omittedPipeline, -1, new CauseAction(omittedPipelineTriggerCauses));
+ Queue.Item queuedItem = ParameterizedJobMixIn.scheduleBuild2(
+ omittedPipeline, -1, new CauseAction(omittedPipelineTriggerCauses));
if (queuedItem == null) {
- listener.getLogger().println("[withMaven] downstreamPipelineTriggerRunListener - Failure to trigger omitted pipeline " + ModelHyperlinkNote.encodeTo(omittedPipeline) + " due to causes " +
- omittedPipelineTriggerCauses + ", invocation rejected.");
+ listener.getLogger()
+ .println(
+ "[withMaven] downstreamPipelineTriggerRunListener - Failure to trigger omitted pipeline "
+ + ModelHyperlinkNote.encodeTo(omittedPipeline) + " due to causes "
+ + omittedPipelineTriggerCauses + ", invocation rejected.");
} else {
- listener.getLogger().println("[withMaven] downstreamPipelineTriggerRunListener - Triggering downstream pipeline " + ModelHyperlinkNote.encodeTo(omittedPipeline) + " despite build result " +
- upstreamBuild.getResult() + " for the upstream causes: " + omittedPipelineTriggerCauses.stream().map(
- Cause::getShortDescription).collect(Collectors.joining(", ")));
+ listener.getLogger()
+ .println(
+ "[withMaven] downstreamPipelineTriggerRunListener - Triggering downstream pipeline "
+ + ModelHyperlinkNote.encodeTo(omittedPipeline)
+ + " despite build result " + upstreamBuild.getResult()
+ + " for the upstream causes: "
+ + omittedPipelineTriggerCauses.stream()
+ .map(Cause::getShortDescription)
+ .collect(Collectors.joining(", ")));
}
}
}
@@ -130,9 +162,14 @@ public void onCompleted(Run, ?> upstreamBuild, @NonNull TaskListener listener)
}
try {
- this.globalPipelineMavenConfig.getPipelineTriggerService().checkNoInfiniteLoopOfUpstreamCause(upstreamBuild);
+ this.globalPipelineMavenConfig
+ .getPipelineTriggerService()
+ .checkNoInfiniteLoopOfUpstreamCause(upstreamBuild);
} catch (IllegalStateException e) {
- listener.getLogger().println("[withMaven] WARNING abort infinite build trigger loop. Please consider opening a Jira issue: " + e.getMessage());
+ listener.getLogger()
+ .println(
+ "[withMaven] WARNING abort infinite build trigger loop. Please consider opening a Jira issue: "
+ + e.getMessage());
return;
}
@@ -140,15 +177,19 @@ public void onCompleted(Run, ?> upstreamBuild, @NonNull TaskListener listener)
String upstreamPipelineFullName = upstreamPipeline.getFullName();
int upstreamBuildNumber = upstreamBuild.getNumber();
- Map> downstreamPipelinesByArtifact = globalPipelineMavenConfig.getDao().listDownstreamJobsByArtifact(upstreamPipelineFullName, upstreamBuildNumber);
- LOGGER.log(Level.FINER, "got downstreamPipelinesByArtifact for project {0} and build #{1}: {2}", new Object[]{upstreamPipelineFullName, upstreamBuildNumber, downstreamPipelinesByArtifact});
+ Map> downstreamPipelinesByArtifact = globalPipelineMavenConfig
+ .getDao()
+ .listDownstreamJobsByArtifact(upstreamPipelineFullName, upstreamBuildNumber);
+ LOGGER.log(Level.FINER, "got downstreamPipelinesByArtifact for project {0} and build #{1}: {2}", new Object[] {
+ upstreamPipelineFullName, upstreamBuildNumber, downstreamPipelinesByArtifact
+ });
Map> jobsToTrigger = new TreeMap<>();
- Map> omittedPipelineTriggersByPipelineFullname = new HashMap<>();
+ Map> omittedPipelineTriggersByPipelineFullname = new HashMap<>();
List rejectedPipelines = new ArrayList<>();
// build the list of pipelines to trigger
- for (Map.Entry> entry: downstreamPipelinesByArtifact.entrySet()) {
+ for (Map.Entry> entry : downstreamPipelinesByArtifact.entrySet()) {
MavenArtifact mavenArtifact = entry.getKey();
SortedSet downstreamPipelines = entry.getValue();
@@ -158,13 +199,23 @@ public void onCompleted(Run, ?> upstreamBuild, @NonNull TaskListener listener)
if (jobsToTrigger.containsKey(downstreamPipelineFullName)) {
// downstream pipeline has already been added to the list of pipelines to trigger,
- // we have already verified that it's meeting requirements (not an infinite loop, authorized by security, not excessive triggering, buildable...)
+ // we have already verified that it's meeting requirements (not an infinite loop, authorized by
+ // security, not excessive triggering, buildable...)
if (LOGGER.isLoggable(Level.FINEST)) {
- listener.getLogger().println("[withMaven] downstreamPipelineTriggerRunListener - Skip eligibility check of pipeline " + downstreamPipelineFullName + " for artifact " + mavenArtifact.getShortDescription() + ", eligibility already confirmed");
+ listener.getLogger()
+ .println(
+ "[withMaven] downstreamPipelineTriggerRunListener - Skip eligibility check of pipeline "
+ + downstreamPipelineFullName + " for artifact "
+ + mavenArtifact.getShortDescription()
+ + ", eligibility already confirmed");
}
Set mavenArtifacts = jobsToTrigger.get(downstreamPipelineFullName);
if (mavenArtifacts == null) {
- listener.getLogger().println("[withMaven] downstreamPipelineTriggerRunListener - Invalid state, no artifacts found for pipeline '" + downstreamPipelineFullName + "' while evaluating " + mavenArtifact.getShortDescription());
+ listener.getLogger()
+ .println(
+ "[withMaven] downstreamPipelineTriggerRunListener - Invalid state, no artifacts found for pipeline '"
+ + downstreamPipelineFullName + "' while evaluating "
+ + mavenArtifact.getShortDescription());
} else {
mavenArtifacts.add(mavenArtifact);
}
@@ -178,43 +229,71 @@ public void onCompleted(Run, ?> upstreamBuild, @NonNull TaskListener listener)
}
if (rejectedPipelines.contains(downstreamPipelineFullName)) {
- LOGGER.log(Level.FINE, "Downstream pipeline {0} already checked",
- new Object[]{downstreamPipelineFullName});
+ LOGGER.log(Level.FINE, "Downstream pipeline {0} already checked", new Object[] {
+ downstreamPipelineFullName
+ });
continue;
}
- final Job, ?> downstreamPipeline = Jenkins.get().getItemByFullName(downstreamPipelineFullName, Job.class);
+ final Job, ?> downstreamPipeline =
+ Jenkins.get().getItemByFullName(downstreamPipelineFullName, Job.class);
if (downstreamPipeline == null || downstreamPipeline.getLastBuild() == null) {
- LOGGER.log(Level.FINE, "Downstream pipeline {0} or downstream pipeline last build not found from upstream build {1}. Database synchronization issue or security restriction?",
- new Object[]{downstreamPipelineFullName, upstreamBuild.getFullDisplayName(), Jenkins.getAuthentication()});
+ LOGGER.log(
+ Level.FINE,
+ "Downstream pipeline {0} or downstream pipeline last build not found from upstream build {1}. Database synchronization issue or security restriction?",
+ new Object[] {
+ downstreamPipelineFullName,
+ upstreamBuild.getFullDisplayName(),
+ Jenkins.getAuthentication()
+ });
rejectedPipelines.add(downstreamPipelineFullName);
continue;
}
int downstreamBuildNumber = downstreamPipeline.getLastBuild().getNumber();
- List downstreamPipelineGeneratedArtifacts = daoHelper.getGeneratedArtifacts(downstreamPipelineFullName, downstreamBuildNumber);
+ List downstreamPipelineGeneratedArtifacts =
+ daoHelper.getGeneratedArtifacts(downstreamPipelineFullName, downstreamBuildNumber);
if (LOGGER.isLoggable(Level.FINEST)) {
- listener.getLogger().println("[withMaven] downstreamPipelineTriggerRunListener - Pipeline " + ModelHyperlinkNote.encodeTo(downstreamPipeline) + " evaluated for because it has a dependency on " + mavenArtifact + " generates " + downstreamPipelineGeneratedArtifacts);
- }
+ listener.getLogger()
+ .println("[withMaven] downstreamPipelineTriggerRunListener - Pipeline "
+ + ModelHyperlinkNote.encodeTo(downstreamPipeline)
+ + " evaluated for because it has a dependency on " + mavenArtifact + " generates "
+ + downstreamPipelineGeneratedArtifacts);
+ }
for (MavenArtifact downstreamPipelineGeneratedArtifact : downstreamPipelineGeneratedArtifacts) {
- if (Objects.equals(mavenArtifact.getGroupId(), downstreamPipelineGeneratedArtifact.getGroupId()) &&
- Objects.equals(mavenArtifact.getArtifactId(), downstreamPipelineGeneratedArtifact.getArtifactId())) {
+ if (Objects.equals(mavenArtifact.getGroupId(), downstreamPipelineGeneratedArtifact.getGroupId())
+ && Objects.equals(
+ mavenArtifact.getArtifactId(),
+ downstreamPipelineGeneratedArtifact.getArtifactId())) {
if (LOGGER.isLoggable(Level.FINE)) {
- listener.getLogger().println("[withMaven] downstreamPipelineTriggerRunListener - Skip triggering " + ModelHyperlinkNote.encodeTo(downstreamPipeline) + " for " + mavenArtifact + " because it generates artifact with same groupId:artifactId " + downstreamPipelineGeneratedArtifact);
+ listener.getLogger()
+ .println("[withMaven] downstreamPipelineTriggerRunListener - Skip triggering "
+ + ModelHyperlinkNote.encodeTo(downstreamPipeline) + " for " + mavenArtifact
+ + " because it generates artifact with same groupId:artifactId "
+ + downstreamPipelineGeneratedArtifact);
}
continue downstreamPipelinesLoop;
}
}
- Map> downstreamDownstreamPipelinesByArtifact = daoHelper.listDownstreamJobsByArtifact(downstreamPipelineFullName, downstreamBuildNumber);
- for (Map.Entry> entry2 : downstreamDownstreamPipelinesByArtifact.entrySet()) {
+ Map> downstreamDownstreamPipelinesByArtifact =
+ daoHelper.listDownstreamJobsByArtifact(downstreamPipelineFullName, downstreamBuildNumber);
+ for (Map.Entry> entry2 :
+ downstreamDownstreamPipelinesByArtifact.entrySet()) {
SortedSet downstreamDownstreamPipelines = entry2.getValue();
if (downstreamDownstreamPipelines.contains(upstreamPipelineFullName)) {
- listener.getLogger().println("[withMaven] downstreamPipelineTriggerRunListener - Infinite loop detected: skip triggering " + ModelHyperlinkNote.encodeTo(downstreamPipeline) + " " +
- " (dependency: " + mavenArtifact.getShortDescription() + ") because it is itself triggering this pipeline " +
- ModelHyperlinkNote.encodeTo(upstreamPipeline) + " (dependency: " + entry2.getKey().getShortDescription() + ")");
+ listener.getLogger()
+ .println(
+ "[withMaven] downstreamPipelineTriggerRunListener - Infinite loop detected: skip triggering "
+ + ModelHyperlinkNote.encodeTo(downstreamPipeline) + " "
+ + " (dependency: "
+ + mavenArtifact.getShortDescription()
+ + ") because it is itself triggering this pipeline "
+ + ModelHyperlinkNote.encodeTo(upstreamPipeline)
+ + " (dependency: "
+ + entry2.getKey().getShortDescription() + ")");
// prevent infinite loop
continue downstreamPipelinesLoop;
}
@@ -222,22 +301,36 @@ public void onCompleted(Run, ?> upstreamBuild, @NonNull TaskListener listener)
// Avoid excessive triggering
// See #46313
- Map transitiveUpstreamPipelines = globalPipelineMavenConfig.getDao().listTransitiveUpstreamJobs(downstreamPipelineFullName, downstreamBuildNumber, upstreamMemory);
+ Map transitiveUpstreamPipelines = globalPipelineMavenConfig
+ .getDao()
+ .listTransitiveUpstreamJobs(downstreamPipelineFullName, downstreamBuildNumber, upstreamMemory);
if (LOGGER.isLoggable(Level.FINER)) {
- listener.getLogger().println("[withMaven] downstreamPipelineTriggerRunListener - Found transitive upstream pipelines for " + ModelHyperlinkNote.encodeTo(downstreamPipeline) +
- ": " + transitiveUpstreamPipelines.keySet().stream().collect(Collectors.joining(",")));
+ listener.getLogger()
+ .println(
+ "[withMaven] downstreamPipelineTriggerRunListener - Found transitive upstream pipelines for "
+ + ModelHyperlinkNote.encodeTo(downstreamPipeline) + ": "
+ + transitiveUpstreamPipelines.keySet().stream()
+ .collect(Collectors.joining(",")));
}
- // If a job is running in this moment we get an empty list. We use the last successful build in this case
+ // If a job is running in this moment we get an empty list. We use the last successful build in this
+ // case
if (transitiveUpstreamPipelines != null && transitiveUpstreamPipelines.isEmpty()) {
Job, ?> job = Jenkins.get().getItemByFullName(downstreamPipelineFullName, Job.class);
if (job != null) {
Run lastSuccessfulBuild = job.getLastSuccessfulBuild();
if (lastSuccessfulBuild != null) {
- transitiveUpstreamPipelines = globalPipelineMavenConfig.getDao().listTransitiveUpstreamJobs(downstreamPipelineFullName, lastSuccessfulBuild.number, upstreamMemory);
+ transitiveUpstreamPipelines = globalPipelineMavenConfig
+ .getDao()
+ .listTransitiveUpstreamJobs(
+ downstreamPipelineFullName, lastSuccessfulBuild.number, upstreamMemory);
if (LOGGER.isLoggable(Level.FINER)) {
- listener.getLogger().println("[withMaven] downstreamPipelineTriggerRunListener - Updated transitive upstream pipelines for " + ModelHyperlinkNote.encodeTo(downstreamPipeline) +
- ": " + transitiveUpstreamPipelines.keySet().stream().collect(Collectors.joining(",")));
+ listener.getLogger()
+ .println(
+ "[withMaven] downstreamPipelineTriggerRunListener - Updated transitive upstream pipelines for "
+ + ModelHyperlinkNote.encodeTo(downstreamPipeline) + ": "
+ + transitiveUpstreamPipelines.keySet().stream()
+ .collect(Collectors.joining(",")));
}
}
}
@@ -246,128 +339,195 @@ public void onCompleted(Run, ?> upstreamBuild, @NonNull TaskListener listener)
for (String transitiveUpstreamPipelineName : transitiveUpstreamPipelines.keySet()) {
// Skip if one of the downstream's upstream is already building or in queue
// Then it will get triggered anyway by that upstream, we don't need to trigger it again
- Job, ?> transitiveUpstreamPipeline = Jenkins.get().getItemByFullName(transitiveUpstreamPipelineName, Job.class);
+ Job, ?> transitiveUpstreamPipeline =
+ Jenkins.get().getItemByFullName(transitiveUpstreamPipelineName, Job.class);
if (transitiveUpstreamPipeline == null) {
// security: not allowed to view this transitive upstream pipeline, continue to loop
if (LOGGER.isLoggable(Level.FINER)) {
- listener.getLogger().println("[withMaven] downstreamPipelineTriggerRunListener - Skip triggering " + ModelHyperlinkNote.encodeTo(downstreamPipeline) +
- " because transitive pipeline " + transitiveUpstreamPipelineName + " is unaccessible");
+ listener.getLogger()
+ .println("[withMaven] downstreamPipelineTriggerRunListener - Skip triggering "
+ + ModelHyperlinkNote.encodeTo(downstreamPipeline)
+ + " because transitive pipeline " + transitiveUpstreamPipelineName
+ + " is unaccessible");
}
continue;
} else if (transitiveUpstreamPipeline.getFullName().equals(upstreamPipeline.getFullName())) {
- // this upstream pipeline of the current downstreamPipeline is the upstream pipeline itself, continue to loop
+ // this upstream pipeline of the current downstreamPipeline is the upstream pipeline itself,
+ // continue to loop
if (LOGGER.isLoggable(Level.FINER)) {
- listener.getLogger().println("[withMaven] downstreamPipelineTriggerRunListener - Skip triggering " + ModelHyperlinkNote.encodeTo(downstreamPipeline) +
- " because transitive pipeline " + transitiveUpstreamPipelineName + " is the current one");
+ listener.getLogger()
+ .println("[withMaven] downstreamPipelineTriggerRunListener - Skip triggering "
+ + ModelHyperlinkNote.encodeTo(downstreamPipeline)
+ + " because transitive pipeline " + transitiveUpstreamPipelineName
+ + " is the current one");
}
continue;
} else if (transitiveUpstreamPipeline.isBuilding()) {
- listener.getLogger().println("[withMaven] downstreamPipelineTriggerRunListener - Skip triggering " + ModelHyperlinkNote.encodeTo(downstreamPipeline) +
- " because it has a dependency already building: " + ModelHyperlinkNote.encodeTo(transitiveUpstreamPipeline));
+ listener.getLogger()
+ .println("[withMaven] downstreamPipelineTriggerRunListener - Skip triggering "
+ + ModelHyperlinkNote.encodeTo(downstreamPipeline)
+ + " because it has a dependency already building: "
+ + ModelHyperlinkNote.encodeTo(transitiveUpstreamPipeline));
continue downstreamPipelinesLoop;
} else if (isInQueue(transitiveUpstreamPipeline)) {
- listener.getLogger().println("[withMaven] downstreamPipelineTriggerRunListener - Skip triggering " + ModelHyperlinkNote.encodeTo(downstreamPipeline) +
- " because it has a dependency already building or in queue: " + ModelHyperlinkNote.encodeTo(transitiveUpstreamPipeline));
+ listener.getLogger()
+ .println("[withMaven] downstreamPipelineTriggerRunListener - Skip triggering "
+ + ModelHyperlinkNote.encodeTo(downstreamPipeline)
+ + " because it has a dependency already building or in queue: "
+ + ModelHyperlinkNote.encodeTo(transitiveUpstreamPipeline));
continue downstreamPipelinesLoop;
} else if (downstreamPipelines.contains(transitiveUpstreamPipelineName)) {
- // Skip if this downstream pipeline will be triggered by another one of our downstream pipelines
- // That's the case when one of the downstream's transitive upstream is our own downstream
- listener.getLogger().println("[withMaven] downstreamPipelineTriggerRunListener - Skip triggering " + ModelHyperlinkNote.encodeTo(downstreamPipeline) +
- " because it has a dependency on a pipeline that will be triggered by this build: " + ModelHyperlinkNote.encodeTo(transitiveUpstreamPipeline));
- omittedPipelineTriggersByPipelineFullname.computeIfAbsent(transitiveUpstreamPipelineName, p -> new TreeSet<>()).add(downstreamPipelineFullName);
+ // Skip if this downstream pipeline will be triggered by another one of our downstream pipelines
+ // That's the case when one of the downstream's transitive upstream is our own downstream
+ listener.getLogger()
+ .println("[withMaven] downstreamPipelineTriggerRunListener - Skip triggering "
+ + ModelHyperlinkNote.encodeTo(downstreamPipeline)
+ + " because it has a dependency on a pipeline that will be triggered by this build: "
+ + ModelHyperlinkNote.encodeTo(transitiveUpstreamPipeline));
+ omittedPipelineTriggersByPipelineFullname
+ .computeIfAbsent(transitiveUpstreamPipelineName, p -> new TreeSet<>())
+ .add(downstreamPipelineFullName);
continue downstreamPipelinesLoop;
}
}
- listener.getLogger().println("[withMaven] downstreamPipelineTriggerRunListener - checked transitive upstreams for: " + downstreamPipelineFullName + " build: " +
- downstreamBuildNumber + " result: " + String.join(",", transitiveUpstreamPipelines.keySet()));
+ listener.getLogger()
+ .println("[withMaven] downstreamPipelineTriggerRunListener - checked transitive upstreams for: "
+ + downstreamPipelineFullName + " build: " + downstreamBuildNumber + " result: "
+ + String.join(",", transitiveUpstreamPipelines.keySet()));
if (!downstreamPipeline.isBuildable()) {
if (LOGGER.isLoggable(Level.FINER)) {
- listener.getLogger().println("[withMaven] downstreamPipelineTriggerRunListener - Skip triggering of non buildable (disabled: " +
- ((ParameterizedJobMixIn.ParameterizedJob, ?>) downstreamPipeline).isDisabled() + ", isHoldOffBuildUntilSave: " +
- downstreamPipeline.isHoldOffBuildUntilSave() + ") downstream pipeline " + downstreamPipeline.getFullName() +
- " from upstream build " + upstreamBuild.getFullDisplayName());
+ listener.getLogger()
+ .println(
+ "[withMaven] downstreamPipelineTriggerRunListener - Skip triggering of non buildable (disabled: "
+ + ((ParameterizedJobMixIn.ParameterizedJob, ?>) downstreamPipeline)
+ .isDisabled()
+ + ", isHoldOffBuildUntilSave: "
+ + downstreamPipeline.isHoldOffBuildUntilSave()
+ + ") downstream pipeline " + downstreamPipeline.getFullName()
+ + " from upstream build "
+ + upstreamBuild.getFullDisplayName());
}
rejectedPipelines.add(downstreamPipelineFullName);
continue;
}
- WorkflowJobDependencyTrigger downstreamPipelineTrigger = this.globalPipelineMavenConfig.getPipelineTriggerService().getWorkflowJobDependencyTrigger((ParameterizedJobMixIn.ParameterizedJob, ?>) downstreamPipeline);
+ WorkflowJobDependencyTrigger downstreamPipelineTrigger = this.globalPipelineMavenConfig
+ .getPipelineTriggerService()
+ .getWorkflowJobDependencyTrigger(
+ (ParameterizedJobMixIn.ParameterizedJob, ?>) downstreamPipeline);
if (downstreamPipelineTrigger == null) {
- listener.getLogger().println("[withMaven] downstreamPipelineTriggerRunListener - Skip triggering of downstream pipeline " + downstreamPipeline.getFullName() +
- " from upstream build " + upstreamBuild.getFullDisplayName() + ": dependency trigger not configured");
+ listener.getLogger()
+ .println(
+ "[withMaven] downstreamPipelineTriggerRunListener - Skip triggering of downstream pipeline "
+ + downstreamPipeline.getFullName() + " from upstream build "
+ + upstreamBuild.getFullDisplayName()
+ + ": dependency trigger not configured");
rejectedPipelines.add(downstreamPipelineFullName);
continue;
}
- boolean downstreamVisibleByUpstreamBuildAuth = this.globalPipelineMavenConfig.getPipelineTriggerService().isDownstreamVisibleByUpstreamBuildAuth(downstreamPipeline);
- boolean upstreamVisibleByDownstreamBuildAuth = this.globalPipelineMavenConfig.getPipelineTriggerService().isUpstreamBuildVisibleByDownstreamBuildAuth(upstreamPipeline, downstreamPipeline);
+ boolean downstreamVisibleByUpstreamBuildAuth = this.globalPipelineMavenConfig
+ .getPipelineTriggerService()
+ .isDownstreamVisibleByUpstreamBuildAuth(downstreamPipeline);
+ boolean upstreamVisibleByDownstreamBuildAuth = this.globalPipelineMavenConfig
+ .getPipelineTriggerService()
+ .isUpstreamBuildVisibleByDownstreamBuildAuth(upstreamPipeline, downstreamPipeline);
if (LOGGER.isLoggable(Level.FINER)) {
- LOGGER.log(Level.FINER,
- "upstreamPipeline (" + upstreamPipelineFullName + ", visibleByDownstreamBuildAuth: " + upstreamVisibleByDownstreamBuildAuth + "), " +
- " downstreamPipeline (" + downstreamPipeline.getFullName() + ", visibleByUpstreamBuildAuth: " + downstreamVisibleByUpstreamBuildAuth + "), " +
- "upstreamBuildAuth: " + Jenkins.getAuthentication());
+ LOGGER.log(
+ Level.FINER,
+ "upstreamPipeline (" + upstreamPipelineFullName + ", visibleByDownstreamBuildAuth: "
+ + upstreamVisibleByDownstreamBuildAuth + "), " + " downstreamPipeline ("
+ + downstreamPipeline.getFullName() + ", visibleByUpstreamBuildAuth: "
+ + downstreamVisibleByUpstreamBuildAuth + "), " + "upstreamBuildAuth: "
+ + Jenkins.getAuthentication());
}
if (downstreamVisibleByUpstreamBuildAuth && upstreamVisibleByDownstreamBuildAuth) {
- Set mavenArtifactsCausingTheTrigger = jobsToTrigger.computeIfAbsent(downstreamPipelineFullName, k -> new TreeSet<>());
- if(mavenArtifactsCausingTheTrigger.contains(mavenArtifact)) {
+ Set mavenArtifactsCausingTheTrigger =
+ jobsToTrigger.computeIfAbsent(downstreamPipelineFullName, k -> new TreeSet<>());
+ if (mavenArtifactsCausingTheTrigger.contains(mavenArtifact)) {
// TODO display warning
} else {
mavenArtifactsCausingTheTrigger.add(mavenArtifact);
}
} else {
if (LOGGER.isLoggable(Level.FINER)) {
- listener.getLogger().println("[withMaven] downstreamPipelineTriggerRunListener - Skip triggering of " + downstreamPipeline.getFullName() + " by " +
- upstreamBuild.getFullDisplayName() + ": downstreamVisibleByUpstreamBuildAuth: " + downstreamVisibleByUpstreamBuildAuth +
- ", upstreamVisibleByDownstreamBuildAuth: " + upstreamVisibleByDownstreamBuildAuth);
+ listener.getLogger()
+ .println("[withMaven] downstreamPipelineTriggerRunListener - Skip triggering of "
+ + downstreamPipeline.getFullName() + " by " + upstreamBuild.getFullDisplayName()
+ + ": downstreamVisibleByUpstreamBuildAuth: "
+ + downstreamVisibleByUpstreamBuildAuth
+ + ", upstreamVisibleByDownstreamBuildAuth: "
+ + upstreamVisibleByDownstreamBuildAuth);
}
}
}
}
- // note: we could verify that the upstreamBuild.getCauses().getOmittedPipelineFullNames are listed in jobsToTrigger
+ // note: we could verify that the upstreamBuild.getCauses().getOmittedPipelineFullNames are listed in
+ // jobsToTrigger
// trigger the pipelines
triggerPipelinesLoop:
- for (Map.Entry> entry: jobsToTrigger.entrySet()) {
+ for (Map.Entry> entry : jobsToTrigger.entrySet()) {
String downstreamJobFullName = entry.getKey();
Job downstreamJob = Jenkins.get().getItemByFullName(downstreamJobFullName, Job.class);
if (downstreamJob == null) {
- listener.getLogger().println("[withMaven] downstreamPipelineTriggerRunListener - Illegal state: " + downstreamJobFullName + " not resolved");
+ listener.getLogger()
+ .println("[withMaven] downstreamPipelineTriggerRunListener - Illegal state: "
+ + downstreamJobFullName + " not resolved");
continue;
}
Set mavenArtifacts = entry.getValue();
// See jenkins.triggers.ReverseBuildTrigger.RunListenerImpl.onCompleted(Run, TaskListener)
- MavenDependencyUpstreamCause cause = new MavenDependencyUpstreamCause(upstreamBuild, mavenArtifacts, omittedPipelineTriggersByPipelineFullname.get(downstreamJobFullName));
+ MavenDependencyUpstreamCause cause = new MavenDependencyUpstreamCause(
+ upstreamBuild,
+ mavenArtifacts,
+ omittedPipelineTriggersByPipelineFullname.get(downstreamJobFullName));
Run downstreamJobLastBuild = downstreamJob.getLastBuild();
if (downstreamJobLastBuild == null) {
// should never happen, we need at least one build to know the dependencies
// trigger downstream pipeline anyway
} else {
- List matchingMavenDependencies = MavenDependencyCauseHelper.isSameCause(cause, downstreamJobLastBuild.getCauses());
+ List matchingMavenDependencies =
+ MavenDependencyCauseHelper.isSameCause(cause, downstreamJobLastBuild.getCauses());
if (matchingMavenDependencies.isEmpty()) {
- for (Map.Entry> omittedPipeline : omittedPipelineTriggersByPipelineFullname.entrySet()) {
+ for (Map.Entry> omittedPipeline :
+ omittedPipelineTriggersByPipelineFullname.entrySet()) {
if (omittedPipeline.getValue().contains(downstreamJobFullName)) {
Job transitiveDownstreamJob = Jenkins.get().getItemByFullName(entry.getKey(), Job.class);
- listener.getLogger().println("[withMaven] downstreamPipelineTriggerRunListener - Skip triggering "
- + "downstream pipeline " + ModelHyperlinkNote.encodeTo(downstreamJob) + "because it will be triggered by transitive downstream " + ModelHyperlinkNote.encodeTo(transitiveDownstreamJob));
+ listener.getLogger()
+ .println("[withMaven] downstreamPipelineTriggerRunListener - Skip triggering "
+ + "downstream pipeline " + ModelHyperlinkNote.encodeTo(downstreamJob)
+ + "because it will be triggered by transitive downstream "
+ + ModelHyperlinkNote.encodeTo(transitiveDownstreamJob));
continue triggerPipelinesLoop; // don't trigger downstream pipeline
}
}
// trigger downstream pipeline
} else {
downstreamJobLastBuild.addAction(new CauseAction(cause));
- listener.getLogger().println("[withMaven] downstreamPipelineTriggerRunListener - Skip triggering downstream pipeline " + ModelHyperlinkNote.encodeTo(downstreamJob) + " as it was already triggered for Maven dependencies: " +
- matchingMavenDependencies.stream().map(mavenDependency -> mavenDependency == null ? null : mavenDependency.getShortDescription()).collect(Collectors.joining(", ")));
+ listener.getLogger()
+ .println(
+ "[withMaven] downstreamPipelineTriggerRunListener - Skip triggering downstream pipeline "
+ + ModelHyperlinkNote.encodeTo(downstreamJob)
+ + " as it was already triggered for Maven dependencies: "
+ + matchingMavenDependencies.stream()
+ .map(mavenDependency -> mavenDependency == null
+ ? null
+ : mavenDependency.getShortDescription())
+ .collect(Collectors.joining(", ")));
try {
downstreamJobLastBuild.save();
} catch (IOException e) {
- listener.getLogger().println("[withMaven] downstreamPipelineTriggerRunListener - Failure to update build " + downstreamJobLastBuild.getFullDisplayName() + ": " + e.toString());
+ listener.getLogger()
+ .println("[withMaven] downstreamPipelineTriggerRunListener - Failure to update build "
+ + downstreamJobLastBuild.getFullDisplayName() + ": " + e.toString());
}
continue; // don't trigger downstream pipeline
}
@@ -376,9 +536,12 @@ public void onCompleted(Run, ?> upstreamBuild, @NonNull TaskListener listener)
scheduleBuild(downstreamJob, cause, listener);
}
- long durationInMillis = TimeUnit.MILLISECONDS.convert(System.nanoTime() - startTimeInNanos, TimeUnit.NANOSECONDS);
+ long durationInMillis =
+ TimeUnit.MILLISECONDS.convert(System.nanoTime() - startTimeInNanos, TimeUnit.NANOSECONDS);
if (durationInMillis > TimeUnit.MILLISECONDS.convert(5, TimeUnit.SECONDS) || LOGGER.isLoggable(Level.FINE)) {
- listener.getLogger().println("[withMaven] downstreamPipelineTriggerRunListener - completed in " + durationInMillis + " ms");
+ listener.getLogger()
+ .println("[withMaven] downstreamPipelineTriggerRunListener - completed in " + durationInMillis
+ + " ms");
}
}
@@ -387,24 +550,29 @@ private void scheduleBuild(Job downstreamJob, MavenDependencyUpstreamCause cause
// by locking in hudson.model.Queue.schedule2()
if (isInQueue(downstreamJob)) {
- listener.getLogger().println("[withMaven] downstreamPipelineTriggerRunListener - Skip triggering " + ModelHyperlinkNote.encodeTo(downstreamJob) +
- " because it is already in the queue");
+ listener.getLogger()
+ .println("[withMaven] downstreamPipelineTriggerRunListener - Skip triggering "
+ + ModelHyperlinkNote.encodeTo(downstreamJob) + " because it is already in the queue");
} else {
Queue.Item queuedItem = ParameterizedJobMixIn.scheduleBuild2(downstreamJob, -1, new CauseAction(cause));
String dependenciesMessage = cause.getMavenArtifactsDescription();
if (queuedItem == null) {
- listener.getLogger().println("[withMaven] downstreamPipelineTriggerRunListener - Skip triggering downstream pipeline " + ModelHyperlinkNote.encodeTo(downstreamJob) + " due to dependencies on " +
- dependenciesMessage + ", invocation rejected.");
+ listener.getLogger()
+ .println(
+ "[withMaven] downstreamPipelineTriggerRunListener - Skip triggering downstream pipeline "
+ + ModelHyperlinkNote.encodeTo(downstreamJob) + " due to dependencies on "
+ + dependenciesMessage + ", invocation rejected.");
} else {
- listener.getLogger().println("[withMaven] downstreamPipelineTriggerRunListener - Triggering downstream pipeline " + ModelHyperlinkNote.encodeTo(downstreamJob) + "#" + downstreamJob.getNextBuildNumber() + " due to dependency on " +
- dependenciesMessage + " ...");
+ listener.getLogger()
+ .println("[withMaven] downstreamPipelineTriggerRunListener - Triggering downstream pipeline "
+ + ModelHyperlinkNote.encodeTo(downstreamJob) + "#" + downstreamJob.getNextBuildNumber()
+ + " due to dependency on " + dependenciesMessage + " ...");
}
}
}
private boolean isInQueue(Job, ?> job) {
- // isInQueue returns always false in WorkflowJob !
- return job instanceof Task && Jenkins.get().getQueue().contains((Task)job);
+ // isInQueue returns always false in WorkflowJob !
+ return job instanceof Task && Jenkins.get().getQueue().contains((Task) job);
}
-
}
diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/AbstractHealthAwarePublisher.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/AbstractHealthAwarePublisher.java
index 019a027d..c543525c 100644
--- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/AbstractHealthAwarePublisher.java
+++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/AbstractHealthAwarePublisher.java
@@ -35,7 +35,6 @@ public abstract class AbstractHealthAwarePublisher extends MavenPublisher {
*/
private String thresholdLimit = DEFAULT_PRIORITY_THRESHOLD_LIMIT;
-
public String getHealthy() {
return healthy;
}
@@ -76,27 +75,24 @@ protected void setHealthAwarePublisherAttributes(Object healthAwarePublisher) {
@Override
public String toString() {
- return getClass().getName() + "[" +
- "disabled='" + isDisabled() + '\'' +
- ", healthy='" + healthy + '\'' +
- ", unHealthy='" + unHealthy + '\'' +
- ", thresholdLimit='" + thresholdLimit + '\'' +
- ']';
+ return getClass().getName() + "[" + "disabled='"
+ + isDisabled() + '\'' + ", healthy='"
+ + healthy + '\'' + ", unHealthy='"
+ + unHealthy + '\'' + ", thresholdLimit='"
+ + thresholdLimit + '\'' + ']';
}
/**
* Required by org/jenkinsci/plugins/pipeline/maven/publishers/AbstractHealthAwarePublisher/health.jelly
*/
- public static abstract class DescriptorImpl extends MavenPublisher.DescriptorImpl {
-
- }
-
+ public abstract static class DescriptorImpl extends MavenPublisher.DescriptorImpl {}
/**
* @author Cyrille Le Clerc
*/
static class Helper {
- protected static void setHealthAwarePublisherAttributes(Object healthAwarePublisherAsObject, AbstractHealthAwarePublisher abstractHealthAwarePublisher) {
+ protected static void setHealthAwarePublisherAttributes(
+ Object healthAwarePublisherAsObject, AbstractHealthAwarePublisher abstractHealthAwarePublisher) {
if (healthAwarePublisherAsObject instanceof HealthAwarePublisher) {
HealthAwarePublisher healthAwarePublisher = (HealthAwarePublisher) healthAwarePublisherAsObject;
healthAwarePublisher.setHealthy(abstractHealthAwarePublisher.getHealthy());
@@ -105,4 +101,4 @@ protected static void setHealthAwarePublisherAttributes(Object healthAwarePublis
}
}
}
-}
\ No newline at end of file
+}
diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/ConcordionTestsPublisher.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/ConcordionTestsPublisher.java
index 66ed4cce..0c91815d 100644
--- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/ConcordionTestsPublisher.java
+++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/ConcordionTestsPublisher.java
@@ -18,6 +18,7 @@
package org.jenkinsci.plugins.pipeline.maven.publishers;
+import edu.umd.cs.findbugs.annotations.NonNull;
import htmlpublisher.HtmlPublisher;
import htmlpublisher.HtmlPublisherTarget;
import hudson.Extension;
@@ -25,15 +26,6 @@
import hudson.model.Run;
import hudson.model.StreamBuildListener;
import hudson.model.TaskListener;
-import org.jenkinsci.Symbol;
-import org.jenkinsci.plugins.pipeline.maven.MavenPublisher;
-import org.jenkinsci.plugins.pipeline.maven.Messages;
-import org.jenkinsci.plugins.pipeline.maven.util.XmlUtils;
-import org.jenkinsci.plugins.workflow.steps.StepContext;
-import org.kohsuke.stapler.DataBoundConstructor;
-import org.w3c.dom.Element;
-
-import edu.umd.cs.findbugs.annotations.NonNull;
import java.io.IOException;
import java.io.OutputStream;
import java.util.ArrayList;
@@ -45,6 +37,13 @@
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
+import org.jenkinsci.Symbol;
+import org.jenkinsci.plugins.pipeline.maven.MavenPublisher;
+import org.jenkinsci.plugins.pipeline.maven.Messages;
+import org.jenkinsci.plugins.pipeline.maven.util.XmlUtils;
+import org.jenkinsci.plugins.workflow.steps.StepContext;
+import org.kohsuke.stapler.DataBoundConstructor;
+import org.w3c.dom.Element;
/**
* @author Cyrille Le Clerc
@@ -60,23 +59,21 @@ public class ConcordionTestsPublisher extends MavenPublisher {
private static final long serialVersionUID = 1L;
@DataBoundConstructor
- public ConcordionTestsPublisher() {
-
- }
+ public ConcordionTestsPublisher() {}
/*
-
-
-
-
-
- ${project.build.directory}/failsafe-reports
-
- target/concordion-reports
-
-
-
- */
+
+
+
+
+
+ ${project.build.directory}/failsafe-reports
+
+ target/concordion-reports
+
+
+
+ */
@Override
public void process(@NonNull final StepContext context, @NonNull final Element mavenSpyLogsElt)
throws IOException, InterruptedException {
@@ -91,12 +88,15 @@ public void process(@NonNull final StepContext context, @NonNull final Element m
final Run run = context.get(Run.class);
Set concordionOutputDirPatterns = new HashSet<>();
- concordionOutputDirPatterns.addAll(findConcordionOutputDirPatterns(XmlUtils.getExecutionEventsByPlugin(mavenSpyLogsElt, GROUP_ID, SUREFIRE_ID, SUREFIRE_GOAL, "MojoSucceeded", "MojoFailed")));
- concordionOutputDirPatterns.addAll(findConcordionOutputDirPatterns(XmlUtils.getExecutionEventsByPlugin(mavenSpyLogsElt, GROUP_ID, FAILSAFE_ID, FAILSAFE_GOAL, "MojoSucceeded", "MojoFailed")));
+ concordionOutputDirPatterns.addAll(findConcordionOutputDirPatterns(XmlUtils.getExecutionEventsByPlugin(
+ mavenSpyLogsElt, GROUP_ID, SUREFIRE_ID, SUREFIRE_GOAL, "MojoSucceeded", "MojoFailed")));
+ concordionOutputDirPatterns.addAll(findConcordionOutputDirPatterns(XmlUtils.getExecutionEventsByPlugin(
+ mavenSpyLogsElt, GROUP_ID, FAILSAFE_ID, FAILSAFE_GOAL, "MojoSucceeded", "MojoFailed")));
if (concordionOutputDirPatterns.isEmpty()) {
if (LOGGER.isLoggable(Level.FINE)) {
- listener.getLogger().println("[withMaven] concordionPublisher - No concordion output dir pattern given, skip.");
+ listener.getLogger()
+ .println("[withMaven] concordionPublisher - No concordion output dir pattern given, skip.");
}
return;
}
@@ -107,22 +107,23 @@ public void process(@NonNull final StepContext context, @NonNull final Element m
}
if (paths.isEmpty()) {
if (LOGGER.isLoggable(Level.FINE)) {
- listener.getLogger().println(
- "[withMaven] concordionPublisher - Did not found any Concordion reports directory, skip.");
+ listener.getLogger()
+ .println(
+ "[withMaven] concordionPublisher - Did not found any Concordion reports directory, skip.");
}
return;
}
- listener.getLogger().println(
- "[withMaven] concordionPublisher - Found " + paths.size() + " file(s) in Concordion reports directory.");
-
+ listener.getLogger()
+ .println("[withMaven] concordionPublisher - Found " + paths.size()
+ + " file(s) in Concordion reports directory.");
try {
Class.forName("htmlpublisher.HtmlPublisher");
} catch (final ClassNotFoundException e) {
listener.getLogger().print("[withMaven] concordionPublisher - Jenkins ");
- listener.hyperlink("https://wiki.jenkins.io/display/JENKINS/HTML+Publisher+Plugin",
- "HTML Publisher Plugin");
+ listener.hyperlink(
+ "https://wiki.jenkins.io/display/JENKINS/HTML+Publisher+Plugin", "HTML Publisher Plugin");
listener.getLogger().println(" not found, do not archive concordion reports.");
return;
}
@@ -132,14 +133,15 @@ public void process(@NonNull final StepContext context, @NonNull final Element m
files.add(XmlUtils.getPathInWorkspace(path.getRemote(), workspace));
}
- final HtmlPublisherTarget target = new HtmlPublisherTarget("Concordion reports", ".",
- XmlUtils.join(files, ","), true, true, true);
+ final HtmlPublisherTarget target =
+ new HtmlPublisherTarget("Concordion reports", ".", XmlUtils.join(files, ","), true, true, true);
try {
- listener.getLogger().println(
- "[withMaven] concordionPublisher - Publishing HTML reports named \"" + target.getReportName() +
- "\" with the following files: " + target.getReportFiles());
- HtmlPublisher.publishReports(run, workspace, listener, Collections.singletonList(target), HtmlPublisher.class);
+ listener.getLogger()
+ .println("[withMaven] concordionPublisher - Publishing HTML reports named \""
+ + target.getReportName() + "\" with the following files: " + target.getReportFiles());
+ HtmlPublisher.publishReports(
+ run, workspace, listener, Collections.singletonList(target), HtmlPublisher.class);
} catch (final Exception e) {
listener.error("[withMaven] concordionPublisher - exception archiving Concordion reports: " + e);
LOGGER.log(Level.WARNING, "Exception processing Concordion reports archiving", e);
@@ -151,7 +153,8 @@ public void process(@NonNull final StepContext context, @NonNull final Element m
private Collection findConcordionOutputDirPatterns(@NonNull List elements) {
List result = new ArrayList<>();
for (Element element : elements) {
- Element envVars = XmlUtils.getUniqueChildElementOrNull(XmlUtils.getUniqueChildElement(element, "plugin"), "systemPropertyVariables");
+ Element envVars = XmlUtils.getUniqueChildElementOrNull(
+ XmlUtils.getUniqueChildElement(element, "plugin"), "systemPropertyVariables");
if (envVars != null) {
Element concordionOutputDir = XmlUtils.getUniqueChildElementOrNull(envVars, "concordion.output.dir");
if (concordionOutputDir != null) {
diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/DependenciesFingerprintPublisher.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/DependenciesFingerprintPublisher.java
index 1256f891..e471a9c0 100644
--- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/DependenciesFingerprintPublisher.java
+++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/DependenciesFingerprintPublisher.java
@@ -23,12 +23,23 @@
*/
package org.jenkinsci.plugins.pipeline.maven.publishers;
+import static org.jenkinsci.plugins.pipeline.maven.publishers.DependenciesLister.listDependencies;
+
+import edu.umd.cs.findbugs.annotations.NonNull;
import hudson.Extension;
import hudson.FilePath;
import hudson.model.FingerprintMap;
import hudson.model.Run;
import hudson.model.TaskListener;
import hudson.tasks.Fingerprinter;
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.TreeSet;
+import java.util.logging.Level;
+import java.util.logging.Logger;
import jenkins.model.Jenkins;
import org.apache.commons.lang.StringUtils;
import org.jenkinsci.Symbol;
@@ -40,18 +51,6 @@
import org.kohsuke.stapler.DataBoundSetter;
import org.w3c.dom.Element;
-import edu.umd.cs.findbugs.annotations.NonNull;
-import java.io.IOException;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.TreeSet;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import static org.jenkinsci.plugins.pipeline.maven.publishers.DependenciesLister.listDependencies;
-
/**
* Fingerprint the dependencies of the maven project.
*
@@ -81,19 +80,16 @@ public DependenciesFingerprintPublisher() {
protected Set getIncludedScopes() {
Set includedScopes = new TreeSet<>();
- if (includeScopeCompile)
- includedScopes.add("compile");
- if (includeScopeRuntime)
- includedScopes.add("runtime");
- if (includeScopeProvided)
- includedScopes.add("provided");
- if (includeScopeTest)
- includedScopes.add("test");
+ if (includeScopeCompile) includedScopes.add("compile");
+ if (includeScopeRuntime) includedScopes.add("runtime");
+ if (includeScopeProvided) includedScopes.add("provided");
+ if (includeScopeTest) includedScopes.add("test");
return includedScopes;
}
@Override
- public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsElt) throws IOException, InterruptedException {
+ public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsElt)
+ throws IOException, InterruptedException {
Run run = context.get(Run.class);
TaskListener listener = context.get(TaskListener.class);
@@ -102,9 +98,11 @@ public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsE
List dependencies = listDependencies(mavenSpyLogsElt, LOGGER);
if (LOGGER.isLoggable(Level.FINE)) {
- listener.getLogger().println("[withMaven] dependenciesFingerprintPublisher - filter: " +
- "versions[snapshot: " + isIncludeSnapshotVersions() + ", release: " + isIncludeReleaseVersions() + "], " +
- "scopes:" + getIncludedScopes());
+ listener.getLogger()
+ .println("[withMaven] dependenciesFingerprintPublisher - filter: " + "versions[snapshot: "
+ + isIncludeSnapshotVersions() + ", release: " + isIncludeReleaseVersions() + "], "
+ + "scopes:"
+ + getIncludedScopes());
}
Map artifactsToFingerPrint = new HashMap<>(); // artifactPathInFingerprintZone -> artifactMd5
@@ -112,21 +110,24 @@ public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsE
if (dependency.isSnapshot()) {
if (!includeSnapshotVersions) {
if (LOGGER.isLoggable(Level.FINER)) {
- listener.getLogger().println("[withMaven] Skip fingerprinting snapshot dependency: " + dependency);
+ listener.getLogger()
+ .println("[withMaven] Skip fingerprinting snapshot dependency: " + dependency);
}
continue;
}
} else {
if (!includeReleaseVersions) {
if (LOGGER.isLoggable(Level.FINER)) {
- listener.getLogger().println("[withMaven] Skip fingerprinting release dependency: " + dependency);
+ listener.getLogger()
+ .println("[withMaven] Skip fingerprinting release dependency: " + dependency);
}
continue;
}
}
if (!getIncludedScopes().contains(dependency.getScope())) {
if (LOGGER.isLoggable(Level.FINER)) {
- listener.getLogger().println("[withMaven] Skip fingerprinting dependency with ignored scope: " + dependency);
+ listener.getLogger()
+ .println("[withMaven] Skip fingerprinting dependency with ignored scope: " + dependency);
}
continue;
}
@@ -134,7 +135,9 @@ public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsE
try {
if (StringUtils.isEmpty(dependency.getFile())) {
if (LOGGER.isLoggable(Level.FINER)) {
- listener.getLogger().println("[withMaven] Can't fingerprint maven dependency with no file attached: " + dependency);
+ listener.getLogger()
+ .println("[withMaven] Can't fingerprint maven dependency with no file attached: "
+ + dependency);
}
continue;
}
@@ -144,28 +147,32 @@ public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsE
if (!(dependency.getFile().endsWith("." + dependency.getExtension()))) {
if (dependencyFilePath.isDirectory()) {
if (LOGGER.isLoggable(Level.FINE)) {
- listener.getLogger().println("[withMaven] Skip fingerprinting of maven dependency of type directory " + dependency);
+ listener.getLogger()
+ .println("[withMaven] Skip fingerprinting of maven dependency of type directory "
+ + dependency);
}
continue;
}
}
String dependencyMavenRepoStyleFilePath =
- dependency.getGroupId().replace('.', '/') + "/" +
- dependency.getArtifactId() + "/" +
- dependency.getBaseVersion() + "/" +
- dependency.getFileNameWithBaseVersion();
-
+ dependency.getGroupId().replace('.', '/') + "/" + dependency.getArtifactId()
+ + "/" + dependency.getBaseVersion()
+ + "/" + dependency.getFileNameWithBaseVersion();
if (dependencyFilePath.exists()) {
- // the subsequent call to digest could test the existence but we don't want to prematurely optimize performances
+ // the subsequent call to digest could test the existence but we don't want to prematurely optimize
+ // performances
if (LOGGER.isLoggable(Level.FINE)) {
- listener.getLogger().println("[withMaven] Fingerprint dependency " + dependencyMavenRepoStyleFilePath);
+ listener.getLogger()
+ .println("[withMaven] Fingerprint dependency " + dependencyMavenRepoStyleFilePath);
}
String artifactDigest = dependencyFilePath.digest();
artifactsToFingerPrint.put(dependencyMavenRepoStyleFilePath, artifactDigest);
} else {
- listener.getLogger().println("[withMaven] FAILURE to fingerprint " + dependencyMavenRepoStyleFilePath + ", file not found");
+ listener.getLogger()
+ .println("[withMaven] FAILURE to fingerprint " + dependencyMavenRepoStyleFilePath
+ + ", file not found");
}
} catch (IOException | RuntimeException e) {
@@ -181,7 +188,9 @@ public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsE
for (Map.Entry artifactToFingerprint : artifactsToFingerPrint.entrySet()) {
String artifactPathInFingerprintZone = artifactToFingerprint.getKey();
String artifactMd5 = artifactToFingerprint.getValue();
- fingerprintMap.getOrCreate(null, artifactPathInFingerprintZone, artifactMd5).addFor(run);
+ fingerprintMap
+ .getOrCreate(null, artifactPathInFingerprintZone, artifactMd5)
+ .addFor(run);
}
// add action
@@ -195,11 +204,10 @@ public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsE
@Override
public String toString() {
- return getClass().getName() + "[" +
- "disabled=" + isDisabled() + ", " +
- "scopes=" + getIncludedScopes() + ", " +
- "versions={snapshot:" + isIncludeSnapshotVersions() + ", release:" + isIncludeReleaseVersions() + "}" +
- ']';
+ return getClass().getName() + "[" + "disabled="
+ + isDisabled() + ", " + "scopes="
+ + getIncludedScopes() + ", " + "versions={snapshot:"
+ + isIncludeSnapshotVersions() + ", release:" + isIncludeReleaseVersions() + "}" + ']';
}
public boolean isIncludeSnapshotVersions() {
diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/DependenciesLister.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/DependenciesLister.java
index 5abf5f37..a507cb9b 100644
--- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/DependenciesLister.java
+++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/DependenciesLister.java
@@ -1,19 +1,17 @@
package org.jenkinsci.plugins.pipeline.maven.publishers;
-import org.apache.commons.lang.StringUtils;
-import org.jenkinsci.plugins.pipeline.maven.MavenArtifact;
-import org.jenkinsci.plugins.pipeline.maven.MavenDependency;
-import org.jenkinsci.plugins.pipeline.maven.util.XmlUtils;
-import org.w3c.dom.Element;
-
import edu.umd.cs.findbugs.annotations.NonNull;
-
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
+import org.apache.commons.lang.StringUtils;
+import org.jenkinsci.plugins.pipeline.maven.MavenArtifact;
+import org.jenkinsci.plugins.pipeline.maven.MavenDependency;
+import org.jenkinsci.plugins.pipeline.maven.util.XmlUtils;
+import org.w3c.dom.Element;
/**
* List dependencies from the spy log.
@@ -27,30 +25,30 @@ public class DependenciesLister {
* @return list of {@link MavenArtifact}
*/
@NonNull
- public static List listDependencies(final Element mavenSpyLogs,
- final Logger logger) {
+ public static List listDependencies(final Element mavenSpyLogs, final Logger logger) {
final Set result = new HashSet<>();
- for (final Element dependencyResolutionResult : XmlUtils.getChildrenElements(mavenSpyLogs,
- "DependencyResolutionResult")) {
- final Element resolvedDependenciesElt = XmlUtils.getUniqueChildElementOrNull(
- dependencyResolutionResult, "resolvedDependencies");
+ for (final Element dependencyResolutionResult :
+ XmlUtils.getChildrenElements(mavenSpyLogs, "DependencyResolutionResult")) {
+ final Element resolvedDependenciesElt =
+ XmlUtils.getUniqueChildElementOrNull(dependencyResolutionResult, "resolvedDependencies");
if (resolvedDependenciesElt == null) {
continue;
}
- for (final Element dependencyElt : XmlUtils.getChildrenElements(resolvedDependenciesElt,
- "dependency")) {
- final MavenDependency dependencyArtifact = XmlUtils.newMavenDependency(
- dependencyElt);
+ for (final Element dependencyElt : XmlUtils.getChildrenElements(resolvedDependenciesElt, "dependency")) {
+ final MavenDependency dependencyArtifact = XmlUtils.newMavenDependency(dependencyElt);
final Element fileElt = XmlUtils.getUniqueChildElementOrNull(dependencyElt, "file");
- if (fileElt == null || fileElt.getTextContent() == null
+ if (fileElt == null
+ || fileElt.getTextContent() == null
|| fileElt.getTextContent().isEmpty()) {
- logger.log(Level.WARNING, "listDependencies: no associated file found for "
- + dependencyArtifact + " in " + XmlUtils.toString(dependencyElt));
+ logger.log(
+ Level.WARNING,
+ "listDependencies: no associated file found for " + dependencyArtifact + " in "
+ + XmlUtils.toString(dependencyElt));
} else {
dependencyArtifact.setFile(StringUtils.trim(fileElt.getTextContent()));
}
@@ -67,15 +65,13 @@ public static List listDependencies(final Element mavenSpyLogs,
* @return list of {@link MavenArtifact}
*/
@NonNull
- public static List listParentProjects(final Element mavenSpyLogs,
- final Logger logger) {
+ public static List listParentProjects(final Element mavenSpyLogs, final Logger logger) {
final Set result = new HashSet<>();
- for (final Element dependencyResolutionResult : XmlUtils.getExecutionEvents(mavenSpyLogs,
- "ProjectStarted")) {
- final Element parentProjectElt = XmlUtils.getUniqueChildElementOrNull(
- dependencyResolutionResult, "parentProject");
+ for (final Element dependencyResolutionResult : XmlUtils.getExecutionEvents(mavenSpyLogs, "ProjectStarted")) {
+ final Element parentProjectElt =
+ XmlUtils.getUniqueChildElementOrNull(dependencyResolutionResult, "parentProject");
if (parentProjectElt == null) {
continue;
@@ -93,5 +89,4 @@ public static List listParentProjects(final Element mavenSpyLogs,
return new ArrayList<>(result);
}
-
}
diff --git a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/FindbugsAnalysisPublisher.java b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/FindbugsAnalysisPublisher.java
index 6f30bada..4a3850ea 100644
--- a/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/FindbugsAnalysisPublisher.java
+++ b/pipeline-maven/src/main/java/org/jenkinsci/plugins/pipeline/maven/publishers/FindbugsAnalysisPublisher.java
@@ -24,6 +24,7 @@
package org.jenkinsci.plugins.pipeline.maven.publishers;
+import edu.umd.cs.findbugs.annotations.NonNull;
import hudson.Extension;
import hudson.FilePath;
import hudson.Launcher;
@@ -31,6 +32,11 @@
import hudson.model.StreamBuildListener;
import hudson.model.TaskListener;
import hudson.plugins.findbugs.FindBugsPublisher;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.util.List;
+import java.util.logging.Level;
+import java.util.logging.Logger;
import org.jenkinsci.Symbol;
import org.jenkinsci.plugins.pipeline.maven.MavenArtifact;
import org.jenkinsci.plugins.pipeline.maven.MavenSpyLogProcessor;
@@ -40,13 +46,6 @@
import org.kohsuke.stapler.DataBoundConstructor;
import org.w3c.dom.Element;
-import edu.umd.cs.findbugs.annotations.NonNull;
-import java.io.IOException;
-import java.io.OutputStream;
-import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
/**
* @author Cyrille Le Clerc
*/
@@ -56,9 +55,7 @@ public class FindbugsAnalysisPublisher extends AbstractHealthAwarePublisher {
private static final long serialVersionUID = 1L;
@DataBoundConstructor
- public FindbugsAnalysisPublisher() {
-
- }
+ public FindbugsAnalysisPublisher() {}
/*
@@ -161,7 +158,8 @@ public FindbugsAnalysisPublisher() {
*/
@Override
- public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsElt) throws IOException, InterruptedException {
+ public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsElt)
+ throws IOException, InterruptedException {
TaskListener listener = context.get(TaskListener.class);
if (listener == null) {
@@ -177,11 +175,19 @@ public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsE
} catch (ClassNotFoundException e) {
listener.getLogger().print("[withMaven] Jenkins ");
listener.hyperlink("https://wiki.jenkins-ci.org/display/JENKINS/FindBugs+Plugin", "FindBugs Plugin");
- listener.getLogger().println(" not found, don't display org.codehaus.mojo:findbugs-maven-plugin:findbugs results in pipeline screen.");
+ listener.getLogger()
+ .println(
+ " not found, don't display org.codehaus.mojo:findbugs-maven-plugin:findbugs results in pipeline screen.");
return;
}
- List findbugsEvents = XmlUtils.getExecutionEventsByPlugin(mavenSpyLogsElt, "org.codehaus.mojo", "findbugs-maven-plugin", "findbugs", "MojoSucceeded", "MojoFailed");
+ List findbugsEvents = XmlUtils.getExecutionEventsByPlugin(
+ mavenSpyLogsElt,
+ "org.codehaus.mojo",
+ "findbugs-maven-plugin",
+ "findbugs",
+ "MojoSucceeded",
+ "MojoFailed");
if (findbugsEvents.isEmpty()) {
LOGGER.log(Level.FINE, "No org.codehaus.mojo:findbugs-maven-plugin:findbugs execution found");
@@ -197,14 +203,18 @@ public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsE
MavenSpyLogProcessor.PluginInvocation pluginInvocation = XmlUtils.newPluginInvocation(pluginElt);
if (xmlOutputDirectoryElt == null) {
- listener.getLogger().println("[withMaven] No element found for in " + XmlUtils.toString(findBugsTestEvent));
+ listener.getLogger()
+ .println("[withMaven] No element found for in "
+ + XmlUtils.toString(findBugsTestEvent));
continue;
}
String xmlOutputDirectory = xmlOutputDirectoryElt.getTextContent().trim();
if (xmlOutputDirectory.contains("${project.build.directory}")) {
String projectBuildDirectory = XmlUtils.getProjectBuildDirectory(projectElt);
if (projectBuildDirectory == null || projectBuildDirectory.isEmpty()) {
- listener.getLogger().println("[withMaven] '${project.build.directory}' found for in " + XmlUtils.toString(findBugsTestEvent));
+ listener.getLogger()
+ .println("[withMaven] '${project.build.directory}' found for in "
+ + XmlUtils.toString(findBugsTestEvent));
continue;
}
@@ -213,7 +223,9 @@ public void process(@NonNull StepContext context, @NonNull Element mavenSpyLogsE
} else if (xmlOutputDirectory.contains("${basedir}")) {
String baseDir = projectElt.getAttribute("baseDir");
if (baseDir.isEmpty()) {
- listener.getLogger().println("[withMaven] '${basedir}' found for