diff --git a/arquillian-plugin-scanner/pom.xml b/arquillian-plugin-scanner/pom.xml index 22b71bab..74c16907 100644 --- a/arquillian-plugin-scanner/pom.xml +++ b/arquillian-plugin-scanner/pom.xml @@ -5,7 +5,7 @@ org.wildfly.glow wildfly-glow-parent - 1.0.7.Final-SNAPSHOT + 1.1.0.Final-SNAPSHOT wildfly-glow-arquillian-plugin-scanner diff --git a/arquillian-plugin/pom.xml b/arquillian-plugin/pom.xml index 7a2c8400..69fa65d2 100644 --- a/arquillian-plugin/pom.xml +++ b/arquillian-plugin/pom.xml @@ -5,7 +5,7 @@ org.wildfly.glow wildfly-glow-parent - 1.0.7.Final-SNAPSHOT + 1.1.0.Final-SNAPSHOT wildfly-glow-arquillian-plugin diff --git a/arquillian-plugin/src/main/java/org/wildfly/glow/plugin/arquillian/ChannelMavenArtifactRepositoryManager.java b/arquillian-plugin/src/main/java/org/wildfly/glow/plugin/arquillian/ChannelMavenArtifactRepositoryManager.java new file mode 100644 index 00000000..092c038e --- /dev/null +++ b/arquillian-plugin/src/main/java/org/wildfly/glow/plugin/arquillian/ChannelMavenArtifactRepositoryManager.java @@ -0,0 +1,205 @@ +/* + * Copyright 2024 Red Hat, Inc. and/or its affiliates + * and other contributors as indicated by the @author tags. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.wildfly.glow.plugin.arquillian; + +import static org.wildfly.channel.maven.VersionResolverFactory.DEFAULT_REPOSITORY_MAPPER; + +import java.nio.file.Path; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.function.Function; +import java.util.regex.Pattern; + +import org.apache.maven.repository.internal.MavenRepositorySystemUtils; +import org.eclipse.aether.DefaultRepositorySystemSession; +import org.eclipse.aether.RepositorySystem; +import org.eclipse.aether.RepositorySystemSession; +import org.eclipse.aether.artifact.Artifact; +import org.eclipse.aether.artifact.DefaultArtifact; +import org.eclipse.aether.repository.RemoteRepository; +import org.eclipse.aether.resolution.VersionRangeRequest; +import org.eclipse.aether.resolution.VersionRangeResolutionException; +import org.eclipse.aether.resolution.VersionRangeResult; +import org.jboss.galleon.api.MavenStreamResolver; +import org.jboss.galleon.universe.maven.MavenArtifact; +import org.jboss.galleon.universe.maven.MavenUniverseException; +import org.jboss.galleon.universe.maven.repo.MavenRepoManager; +import org.wildfly.channel.ArtifactTransferException; +import org.wildfly.channel.Channel; +import org.wildfly.channel.ChannelSession; +import org.wildfly.channel.NoStreamFoundException; +import org.wildfly.channel.Repository; +import org.wildfly.channel.UnresolvedMavenArtifactException; +import org.wildfly.channel.VersionResult; +import org.wildfly.channel.maven.VersionResolverFactory; +import org.wildfly.channel.spi.ChannelResolvable; + +public class ChannelMavenArtifactRepositoryManager implements MavenRepoManager, ChannelResolvable, MavenStreamResolver { + + private final ChannelSession channelSession; + private final RepositorySystem system; + private final DefaultRepositorySystemSession session; + private final List repositories; + + public ChannelMavenArtifactRepositoryManager(List channels, + RepositorySystem system, + RepositorySystemSession contextSession, + List repositories) + throws Exception { + session = MavenRepositorySystemUtils.newSession(); + this.repositories = repositories; + session.setLocalRepositoryManager(contextSession.getLocalRepositoryManager()); + Map mapping = new HashMap<>(); + for (RemoteRepository r : repositories) { + mapping.put(r.getId(), r); + } + Function mapper = r -> { + RemoteRepository rep = mapping.get(r.getId()); + if (rep == null) { + rep = DEFAULT_REPOSITORY_MAPPER.apply(r); + } + return rep; + }; + VersionResolverFactory factory = new VersionResolverFactory(system, session, mapper); + channelSession = new ChannelSession(channels, factory); + this.system = system; + } + + public ChannelSession getChannelSession() { + return channelSession; + } + + @Override + public void resolve(MavenArtifact artifact) throws MavenUniverseException { + try { + resolveFromChannels(artifact); + } catch (ArtifactTransferException ex) { + throw new MavenUniverseException(ex.getLocalizedMessage(), ex); + } catch (NoStreamFoundException ex) { + // unable to resolve the artifact through the channel. + // if the version is defined, let's resolve it directly + if (artifact.getVersion() == null || artifact.getVersion().isEmpty()) { + throw new MavenUniverseException(ex.getLocalizedMessage(), ex); + } + try { + org.wildfly.channel.MavenArtifact mavenArtifact = channelSession.resolveDirectMavenArtifact( + artifact.getGroupId(), artifact.getArtifactId(), artifact.getExtension(), artifact.getClassifier(), + artifact.getVersion()); + artifact.setPath(mavenArtifact.getFile().toPath()); + } catch (UnresolvedMavenArtifactException e) { + // if the artifact can not be resolved directly either, we abort + throw new MavenUniverseException(e.getLocalizedMessage(), e); + } + } + } + + private void resolveFromChannels(MavenArtifact artifact) throws UnresolvedMavenArtifactException { + org.wildfly.channel.MavenArtifact result = channelSession.resolveMavenArtifact(artifact.getGroupId(), + artifact.getArtifactId(), artifact.getExtension(), artifact.getClassifier(), artifact.getVersion()); + artifact.setVersion(result.getVersion()); + artifact.setPath(result.getFile().toPath()); + } + + @Override + public void resolveLatestVersion(MavenArtifact artifact) throws MavenUniverseException { + throw new MavenUniverseException("Channel resolution can't be applied to Galleon universe"); + } + + @Override + public boolean isResolved(MavenArtifact artifact) throws MavenUniverseException { + throw new MavenUniverseException("Channel resolution can't be applied to Galleon universe"); + } + + @Override + public boolean isLatestVersionResolved(MavenArtifact artifact, String lowestQualifier) throws MavenUniverseException { + throw new MavenUniverseException("Channel resolution can't be applied to Galleon universe"); + } + + @Override + public void resolveLatestVersion(MavenArtifact artifact, String lowestQualifier, Pattern includeVersion, + Pattern excludeVersion) throws MavenUniverseException { + resolveLatestVersion(artifact, null, false); + } + + @Override + public void resolveLatestVersion(MavenArtifact artifact, String lowestQualifier, boolean locallyAvailable) + throws MavenUniverseException { + artifact.setVersion(getLatestVersion(artifact)); + resolve(artifact); + } + + @Override + public String getLatestVersion(MavenArtifact artifact) throws MavenUniverseException { + return getLatestVersion(artifact, null, null, null); + } + + @Override + public String getLatestVersion(MavenArtifact artifact, String lowestQualifier) throws MavenUniverseException { + return getLatestVersion(artifact, lowestQualifier, null, null); + } + + @Override + public String getLatestVersion(MavenArtifact artifact, String lowestQualifier, Pattern includeVersion, + Pattern excludeVersion) throws MavenUniverseException { + try { + return channelSession.resolveMavenArtifact(artifact.getGroupId(), artifact.getArtifactId(), artifact.getExtension(), + artifact.getClassifier(), null).getVersion(); + } catch (UnresolvedMavenArtifactException e) { + VersionRangeResult res = getVersionRange(new DefaultArtifact(artifact.getGroupId(), + artifact.getArtifactId(), artifact.getExtension(), artifact.getVersionRange())); + return res.getHighestVersion().toString(); + } + } + + @Override + public List getAllVersions(MavenArtifact artifact) throws MavenUniverseException { + throw new MavenUniverseException("Channel resolution can't be applied to Galleon universe"); + } + + @Override + public List getAllVersions(MavenArtifact artifact, Pattern includeVersion, Pattern excludeVersion) + throws MavenUniverseException { + throw new MavenUniverseException("Channel resolution can't be applied to Galleon universe"); + } + + @Override + public void install(MavenArtifact artifact, Path path) throws MavenUniverseException { + throw new MavenUniverseException("Channel resolution can't be applied to Galleon universe"); + } + + @Override + public String getLatestVersion(String groupId, String artifactId, String extension, String classifier, String baseVersion) { + VersionResult res = channelSession.findLatestMavenArtifactVersion(groupId, artifactId, extension, classifier, + baseVersion); + return res.getVersion(); + } + + private VersionRangeResult getVersionRange(Artifact artifact) throws MavenUniverseException { + VersionRangeRequest rangeRequest = new VersionRangeRequest(); + rangeRequest.setArtifact(artifact); + rangeRequest.setRepositories(repositories); + VersionRangeResult rangeResult; + try { + rangeResult = system.resolveVersionRange(session, rangeRequest); + } catch (VersionRangeResolutionException ex) { + throw new MavenUniverseException(ex.getLocalizedMessage(), ex); + } + return rangeResult; + } + +} diff --git a/arquillian-plugin/src/main/java/org/wildfly/glow/plugin/arquillian/ConfiguredChannels.java b/arquillian-plugin/src/main/java/org/wildfly/glow/plugin/arquillian/ConfiguredChannels.java index 96a0d274..e1a6aef6 100644 --- a/arquillian-plugin/src/main/java/org/wildfly/glow/plugin/arquillian/ConfiguredChannels.java +++ b/arquillian-plugin/src/main/java/org/wildfly/glow/plugin/arquillian/ConfiguredChannels.java @@ -49,17 +49,27 @@ public ConfiguredChannels(List channels, if (channels.isEmpty()) { throw new MojoExecutionException("No channel specified."); } + List channelDefinitions = new ArrayList<>(); + for (ChannelConfiguration channelConfiguration : channels) { + channelDefinitions.add(channelConfiguration.toChannel(repositories)); + } + channelSession = buildChannelSession(system, contextSession, repositories, channelDefinitions); + } + + ChannelSession getChannelSession() { + return channelSession; + } + + public static ChannelSession buildChannelSession(RepositorySystem system, + RepositorySystemSession contextSession, + List repositories, + List channels) { DefaultRepositorySystemSession session = MavenRepositorySystemUtils.newSession(); session.setLocalRepositoryManager(contextSession.getLocalRepositoryManager()); - session.setOffline(offline); Map mapping = new HashMap<>(); for (RemoteRepository r : repositories) { mapping.put(r.getId(), r); } - List channelDefinitions = new ArrayList<>(); - for (ChannelConfiguration channelConfiguration : channels) { - channelDefinitions.add(channelConfiguration.toChannel(repositories)); - } Function mapper = r -> { RemoteRepository rep = mapping.get(r.getId()); if (rep == null) { @@ -68,11 +78,6 @@ public ConfiguredChannels(List channels, return rep; }; VersionResolverFactory factory = new VersionResolverFactory(system, session, mapper); - channelSession = new ChannelSession(channelDefinitions, factory); + return new ChannelSession(channels, factory); } - - ChannelSession getChannelSession() { - return channelSession; - } - } diff --git a/arquillian-plugin/src/main/java/org/wildfly/glow/plugin/arquillian/ScanMojo.java b/arquillian-plugin/src/main/java/org/wildfly/glow/plugin/arquillian/ScanMojo.java index fa052b8c..7155131b 100644 --- a/arquillian-plugin/src/main/java/org/wildfly/glow/plugin/arquillian/ScanMojo.java +++ b/arquillian-plugin/src/main/java/org/wildfly/glow/plugin/arquillian/ScanMojo.java @@ -44,7 +44,6 @@ import javax.xml.stream.XMLStreamException; import java.io.File; import java.io.IOException; -import java.net.MalformedURLException; import java.net.URISyntaxException; import java.net.URL; import java.net.URLClassLoader; @@ -60,8 +59,6 @@ import java.util.Map; import java.util.Set; import org.apache.maven.plugin.logging.Log; -import org.wildfly.channel.UnresolvedMavenArtifactException; -import org.wildfly.channel.VersionResult; import org.jboss.galleon.ProvisioningException; import org.jboss.galleon.api.GalleonBuilder; import org.jboss.galleon.api.GalleonFeaturePack; @@ -70,10 +67,9 @@ import org.jboss.galleon.api.config.GalleonConfigurationWithLayersBuilder; import org.jboss.galleon.api.config.GalleonFeaturePackConfig; import org.jboss.galleon.api.config.GalleonProvisioningConfig; -import org.jboss.galleon.universe.Channel; import org.jboss.galleon.universe.FeaturePackLocation; -import org.jboss.galleon.universe.UniverseResolver; -import org.jboss.galleon.universe.maven.MavenChannel; +import org.jboss.galleon.universe.maven.repo.MavenRepoManager; +import org.wildfly.channel.Channel; import org.wildfly.glow.ScanArguments; import org.wildfly.glow.error.IdentifiedError; import static org.wildfly.glow.plugin.arquillian.GlowArquillianDeploymentExporter.TEST_CLASSPATH; @@ -322,66 +318,17 @@ public void execute() throws MojoExecutionException, MojoFailureException { for (String s : project.getTestClasspathElements()) { paths.add(new File(s).getAbsolutePath()); } - MavenArtifactRepositoryManager artifactResolver = new MavenArtifactRepositoryManager(repoSystem, repoSession, repositories); + MavenRepoManager artifactResolver; + ConfiguredChannels cr = null; if (channels != null && !channels.isEmpty()) { - getLog().debug("WildFly channel enabled, feature-pack versions are retrieved from channels (if stream known)."); - try { - ConfiguredChannels cr = new ConfiguredChannels(channels, - repoSystem, repoSession, repositories, - getLog(), true); - UniverseResolver universeResolver = UniverseResolver.builder().addArtifactResolver(artifactResolver).build(); - for (GalleonFeaturePack fp : featurePacks) { - if (fp.getLocation() == null && (fp.getGroupId() == null || fp.getArtifactId() == null)) { - throw new IllegalArgumentException("Feature-pack location or Maven GAV is missing"); - } - String groupId; - String artifactId; - String loc = fp.getLocation(); - if (loc == null) { - groupId = fp.getGroupId(); - artifactId = fp.getArtifactId(); - } else { - // Special case for G:A that conflicts with producer:channel that we can't have in the plugin. - if (!FeaturePackLocation.fromString(loc).hasUniverse()) { - long numSeparators = loc.chars().filter(ch -> ch == ':').count(); - if (numSeparators <= 1) { - loc += ":"; - } - } - FeaturePackLocation location = FeaturePackLocation.fromString(loc); - if (location.isMavenCoordinates()) { - String[] coordinates = loc.split(":"); - groupId = coordinates[0]; - artifactId = coordinates[1]; - } else { - Channel c = universeResolver.getChannel(location); - MavenChannel mc = (MavenChannel) c; - groupId = mc.getFeaturePackGroupId(); - artifactId = mc.getFeaturePackArtifactId(); - } - } - try { - VersionResult res = cr.getChannelSession().findLatestMavenArtifactVersion(groupId, artifactId, - fp.getExtension(), fp.getClassifier(), null); - getLog().debug(fp.getGroupId() + ":" + fp.getArtifactId() + ", Channel resolved version " + res.getVersion()); - if (fp.getLocation() == null) { - fp.setVersion(res.getVersion()); - } else { - FeaturePackLocation l = FeaturePackLocation.fromString(loc); - FeaturePackLocation resolved = new FeaturePackLocation(l.getUniverse(), - l.getProducerName(), - l.getChannelName(), - l.getFrequency(), - res.getVersion()); - fp.setLocation(resolved.toString()); - } - } catch (Exception ex) { - getLog().debug("Got exception trying to resolve " + fp.getGroupId() + ":" + fp.getArtifactId(), ex); - } - } - } catch (MalformedURLException | UnresolvedMavenArtifactException ex) { - throw new MojoExecutionException(ex.getLocalizedMessage(), ex); + getLog().debug("WildFly channel enabled."); + List lst = new ArrayList<>(); + for (ChannelConfiguration conf : channels) { + lst.add(conf.toChannel(repositories)); } + artifactResolver = new ChannelMavenArtifactRepositoryManager(lst, repoSystem, repoSession, repositories); + } else { + artifactResolver = new MavenArtifactRepositoryManager(repoSystem, repoSession, repositories); } Set profiles = new HashSet<>(); if (profile != null) { @@ -404,7 +351,6 @@ public void execute() throws MojoExecutionException, MojoFailureException { } Arguments arguments = argumentsBuilder.build(); - try (ScanResults results = GlowSession.scan(artifactResolver, arguments, writer)) { boolean skipTests = Boolean.getBoolean("maven.test.skip") || Boolean.getBoolean("skipTests"); @@ -626,7 +572,7 @@ private static void collectCpPaths(String javaHome, ClassLoader cl, StringBuilde } } - private Path buildInputConfig(Path outputFolder, MavenArtifactRepositoryManager artifactResolver) throws ProvisioningException, IOException, XMLStreamException { + private Path buildInputConfig(Path outputFolder, MavenRepoManager artifactResolver) throws ProvisioningException, IOException, XMLStreamException { GalleonProvisioningConfig.Builder inBuilder = GalleonProvisioningConfig.builder(); // Build config for (GalleonFeaturePack fp : featurePacks) { diff --git a/cli-support/pom.xml b/cli-support/pom.xml index 2b89744d..75686ddb 100644 --- a/cli-support/pom.xml +++ b/cli-support/pom.xml @@ -5,7 +5,7 @@ org.wildfly.glow wildfly-glow-parent - 1.0.7.Final-SNAPSHOT + 1.1.0.Final-SNAPSHOT wildfly-glow-cli-support jar diff --git a/cli-support/src/main/java/org/wildfly/glow/cli/support/CLIConfigurationResolver.java b/cli-support/src/main/java/org/wildfly/glow/cli/support/CLIConfigurationResolver.java index c9a6831d..04bd1c4d 100644 --- a/cli-support/src/main/java/org/wildfly/glow/cli/support/CLIConfigurationResolver.java +++ b/cli-support/src/main/java/org/wildfly/glow/cli/support/CLIConfigurationResolver.java @@ -30,26 +30,29 @@ public class CLIConfigurationResolver implements ConfigurationResolver { private final Set disabledDeployers; + private final Set enabledDeployers; private final boolean isOpenShift; public CLIConfigurationResolver() { - this(true, null); + this(true, null, null); } public CLIConfigurationResolver(boolean isOpenShift, - Set disabledDeployers) { + Set disabledDeployers, + Set enabledDeployers) { this.disabledDeployers = disabledDeployers == null ? Collections.emptySet() : disabledDeployers; + this.enabledDeployers = enabledDeployers == null ? Collections.emptySet() : enabledDeployers; this.isOpenShift = isOpenShift; } @Override public ResolvedEnvs getResolvedEnvs(Layer layer, Set input) throws Exception { if (isOpenShift) { - return OpenShiftSupport.getResolvedEnvs(layer, input, disabledDeployers); + return OpenShiftSupport.getResolvedEnvs(layer, input, disabledDeployers, enabledDeployers); } return null; } @Override public String getPossibleDeployer(Set layers) throws Exception { - return OpenShiftSupport.getPossibleDeployer(layers, disabledDeployers); + return OpenShiftSupport.getPossibleDeployer(layers, disabledDeployers, enabledDeployers); } } diff --git a/cli-support/src/main/java/org/wildfly/glow/cli/support/Constants.java b/cli-support/src/main/java/org/wildfly/glow/cli/support/Constants.java index 6f57eb4c..6b9e03a0 100644 --- a/cli-support/src/main/java/org/wildfly/glow/cli/support/Constants.java +++ b/cli-support/src/main/java/org/wildfly/glow/cli/support/Constants.java @@ -27,6 +27,8 @@ public interface Constants { String ADD_ONS_OPTION_SHORT = "-ao"; String ADD_LAYERS_FOR_JNDI_OPTION = "--add-layers-for-jndi"; String ADD_LAYERS_FOR_JNDI_OPTION_LABEL = ""; + String APP_NAME_OPTION = "--app-name"; + String APP_NAME_OPTION_LABEL = ""; String BATCH_OPTION = "--batch"; String BATCH_OPTION_SHORT = "-B"; String BUILD_ENV_FILE_OPTION = "--build-env-file"; @@ -35,6 +37,8 @@ public interface Constants { String CHANNELS_OPTION = "--channels"; String CHANNELS_OPTION_SHORT = "-cc"; String CHANNELS_OPTION_LABEL = ""; + String CONFIG_FILE_OPTION_LABEL = ""; + String CONFIG_FILE_OPTION = "--config-file"; String CLI_SCRIPT_OPTION = "--cli-script"; String CLI_SCRIPT_OPTION_SHORT = "-cs"; String CLI_SCRIPT_OPTION_LABEL = ""; @@ -44,10 +48,13 @@ public interface Constants { String CONFIG_STABILITY_OPTION = "--config-stability-level"; String CONFIG_STABILITY_OPTION_SHORT = "-csl"; String DISABLE_DEPLOYERS = "--disable-deployers"; - String DISABLE_DEPLOYERS_LABEL = ""; + String DISABLE_DEPLOYERS_OPTION_LABEL = ""; String DOCKER_IMAGE_NAME_OPTION = "--docker-image-name"; String DOCKER_IMAGE_NAME_OPTION_LABEL = ""; String DOCKER_IMAGE_NAME_OPTION_SHORT = "-di"; + String DRY_RUN_OPTION = "--dry-run"; + String ENABLE_DEPLOYERS = "--enable-deployers"; + String ENABLE_DEPLOYERS_OPTION_LABEL = ""; String ENV_FILE_OPTION = "--env-file"; String ENV_FILE_OPTION_SHORT = "-ef"; String ENV_FILE_OPTION_LABEL = ""; diff --git a/cli-support/src/main/java/org/wildfly/glow/cli/support/Utils.java b/cli-support/src/main/java/org/wildfly/glow/cli/support/Utils.java index 1cc560d1..681e6b7e 100644 --- a/cli-support/src/main/java/org/wildfly/glow/cli/support/Utils.java +++ b/cli-support/src/main/java/org/wildfly/glow/cli/support/Utils.java @@ -18,15 +18,22 @@ import java.nio.file.Files; import java.nio.file.Path; +import java.util.Collections; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.Set; import org.jboss.galleon.api.config.GalleonProvisioningConfig; import org.jboss.galleon.universe.FeaturePackLocation; +import org.jboss.galleon.universe.maven.repo.MavenRepoManager; +import org.wildfly.channel.Channel; +import org.wildfly.channel.ChannelMapper; import org.wildfly.glow.AddOn; +import org.wildfly.glow.Arguments; import org.wildfly.glow.Layer; import org.wildfly.glow.LayerMapping; import org.wildfly.glow.ProvisioningUtils; +import org.wildfly.glow.ScanArguments; import org.wildfly.glow.maven.MavenResolver; /** @@ -58,8 +65,19 @@ public void consume(GalleonProvisioningConfig provisioning, Map a } }; + ScanArguments.Builder builder = Arguments.scanBuilder(); + MavenRepoManager repoManager; + List channels = Collections.emptyList(); + if (channelsFile != null) { + String content = Files.readString(channelsFile); + channels = ChannelMapper.fromString(content); + builder.setChannels(channels); + repoManager = MavenResolver.newMavenResolver(channels); + } else { + repoManager = MavenResolver.newMavenResolver(); + } ProvisioningUtils.traverseProvisioning(consumer, context, provisioningXml, isLatest, serverVersion, - isPreview, MavenResolver.buildMavenResolver(channelsFile)); + isPreview, channels, repoManager); } public static void setSystemProperties(Set systemProperties) throws Exception { @@ -100,4 +118,58 @@ public static Map handleOpenShiftEnvFile(Path envFile) throws Ex } return extraEnv; } + + public static Map readConfigFile(Path file) throws Exception { + if (file == null) { + return Collections.emptyMap(); + } + if(!Files.exists(file)) { + throw new Exception("File " + file + " doesn't exist."); + } + Map map = new HashMap<>(); + for (String l : Files.readAllLines(file)) { + l = l.trim(); + if (!l.startsWith("#")) { + String[] split = l.split("="); + map.put(split[0].trim(), split[1].trim()); + } + } + return map; + } + + public static void addAddOnsFromConfig(Map config, Set addOns) throws Exception { + String val = config.get("add-ons"); + if(val != null) { + String[] addOnsArray = val.split(","); + for(String addOn : addOnsArray) { + addOns.add(addOn.trim()); + } + } + } + public static String getServerVersionFromConfig(Map config) throws Exception { + return config.get("server-version"); + } + public static void addDisableDeployersFromConfig(Map config, Set addOns) throws Exception { + String val = config.get("disable-deployers"); + if(val != null) { + String[] addOnsArray = val.split(","); + for(String addOn : addOnsArray) { + addOns.add(addOn.trim()); + } + } + } + public static void addEnableDeployersFromConfig(Map config, Set addOns) throws Exception { + String val = config.get("enable-deployers"); + if(val != null) { + String[] addOnsArray = val.split(","); + for(String addOn : addOnsArray) { + addOns.add(addOn.trim()); + } + } + } + + public static Boolean getHaFromConfig(Map config) { + String val = config.get("ha"); + return Boolean.valueOf(val); + } } diff --git a/cli/pom.xml b/cli/pom.xml index ef554880..b2e534b3 100644 --- a/cli/pom.xml +++ b/cli/pom.xml @@ -5,7 +5,7 @@ org.wildfly.glow wildfly-glow-parent - 1.0.7.Final-SNAPSHOT + 1.1.0.Final-SNAPSHOT wildfly-glow jar diff --git a/cli/src/main/java/org/wildfly/glow/cli/commands/ScanCommand.java b/cli/src/main/java/org/wildfly/glow/cli/commands/ScanCommand.java index f9765c31..bc8a49e9 100644 --- a/cli/src/main/java/org/wildfly/glow/cli/commands/ScanCommand.java +++ b/cli/src/main/java/org/wildfly/glow/cli/commands/ScanCommand.java @@ -48,7 +48,8 @@ import java.util.Set; import org.jboss.galleon.universe.FeaturePackLocation.ProducerSpec; import org.jboss.galleon.universe.maven.repo.MavenRepoManager; -import org.wildfly.channel.ChannelSession; +import org.wildfly.channel.Channel; +import org.wildfly.channel.ChannelMapper; import static org.wildfly.glow.Arguments.CLOUD_EXECUTION_CONTEXT; import static org.wildfly.glow.Arguments.COMPACT_PROPERTY; @@ -58,7 +59,6 @@ import static org.wildfly.glow.OutputFormat.OPENSHIFT; import org.wildfly.glow.StabilitySupport; import org.wildfly.glow.cli.support.Utils; -import org.wildfly.glow.maven.ChannelMavenArtifactRepositoryManager; @CommandLine.Command( name = Constants.SCAN_COMMAND, @@ -139,9 +139,12 @@ public OutputFormat convert(String value) throws Exception { @CommandLine.Option(names = {Constants.CLI_SCRIPT_OPTION_SHORT, Constants.CLI_SCRIPT_OPTION}, paramLabel = Constants.CLI_SCRIPT_OPTION_LABEL) Optional cliScriptFile; - @CommandLine.Option(names = Constants.DISABLE_DEPLOYERS, split = ",", paramLabel = Constants.ADD_ONS_OPTION_LABEL) + @CommandLine.Option(names = Constants.DISABLE_DEPLOYERS, split = ",", paramLabel = Constants.DISABLE_DEPLOYERS_OPTION_LABEL) Set disableDeployers = new LinkedHashSet<>(); + @CommandLine.Option(names = Constants.ENABLE_DEPLOYERS, split = ",", paramLabel = Constants.ENABLE_DEPLOYERS_OPTION_LABEL) + Set enableDeployers = new LinkedHashSet<>(); + @CommandLine.Option(names = {Constants.SYSTEM_PROPERTIES_OPTION_SHORT, Constants.SYSTEM_PROPERTIES_OPTION}, split = " ", paramLabel = Constants.SYSTEM_PROPERTIES_LABEL) Set systemProperties = new HashSet<>(); @@ -152,19 +155,35 @@ public OutputFormat convert(String value) throws Exception { @CommandLine.Option(names = {Constants.CHANNELS_OPTION_SHORT, Constants.CHANNELS_OPTION}, paramLabel = Constants.CHANNELS_OPTION_LABEL) Optional channelsFile; + @CommandLine.Option(names = {Constants.CONFIG_FILE_OPTION}, paramLabel = Constants.CONFIG_FILE_OPTION_LABEL) + Optional configFile; + + @CommandLine.Option(names = {Constants.APP_NAME_OPTION}, paramLabel = Constants.APP_NAME_OPTION_LABEL) + Optional appName; + + @CommandLine.Option(names = Constants.DRY_RUN_OPTION) + Optional dryRun; + @Override public Integer call() throws Exception { Utils.setSystemProperties(systemProperties); + Map configMap = Utils.readConfigFile(configFile.orElse(null)); HiddenPropertiesAccessor hiddenPropertiesAccessor = new HiddenPropertiesAccessor(); boolean compact = Boolean.parseBoolean(hiddenPropertiesAccessor.getProperty(COMPACT_PROPERTY)); if (!compact) { print("Wildfly Glow is scanning..."); } Builder builder = Arguments.scanBuilder(); - if (haProfile.orElse(false)) { - Set profiles = new HashSet<>(); - profiles.add(Constants.HA); + Set profiles = new HashSet<>(); + profiles.add(Constants.HA); + boolean haProfileEnabled = haProfile.orElse(false); + if (haProfileEnabled) { builder.setExecutionProfiles(profiles); + } else { + haProfileEnabled = Utils.getHaFromConfig(configMap); + if (haProfileEnabled) { + builder.setExecutionProfiles(profiles); + } } if (!layersForJndi.isEmpty()) { builder.setJndiLayers(layersForJndi); @@ -183,6 +202,11 @@ public Integer call() throws Exception { throw new Exception(Constants.SERVER_VERSION_OPTION + "can't be set when " + Constants.CHANNELS_OPTION + " is set."); } builder.setVersion(wildflyServerVersion.get()); + } else { + String vers = Utils.getServerVersionFromConfig(configMap); + if (vers != null) { + builder.setVersion(vers); + } } Map extraEnv = new HashMap<>(); Map buildExtraEnv = new HashMap<>(); @@ -196,6 +220,15 @@ public Integer call() throws Exception { } extraEnv.putAll(Utils.handleOpenShiftEnvFile(envFile.get())); } + if (dryRun.isPresent()) { + if (provision.isPresent()) { + if (!OPENSHIFT.equals(provision.get())) { + throw new Exception("--dry-run is only usable when --provision=" + OPENSHIFT + " option is set."); + } + } else { + throw new Exception("--dry-run is only usable when --provision=" + OPENSHIFT + " option is set."); + } + } if (buildEnvFile.isPresent()) { if (provision.isPresent()) { if (!OPENSHIFT.equals(provision.get())) { @@ -233,6 +266,7 @@ public Integer call() throws Exception { } } builder.setVerbose(verbose); + Utils.addAddOnsFromConfig(configMap, addOns); if (!addOns.isEmpty()) { builder.setUserEnabledAddOns(addOns); } @@ -248,9 +282,12 @@ public Integer call() throws Exception { if (!Files.exists(channelsFilePath)) { throw new Exception(channelsFilePath + " file doesn't exist"); } - ChannelSession session = MavenResolver.buildChannelSession(channelsFilePath); - builder.setChannelSession(session); - repoManager = new ChannelMavenArtifactRepositoryManager(session); + String content = Files.readString(channelsFilePath); + List channels = ChannelMapper.fromString(content); + builder.setChannels(channels); + repoManager = MavenResolver.newMavenResolver(channels); + } else { + repoManager = MavenResolver.newMavenResolver(); } if (provision.isPresent()) { if (BOOTABLE_JAR.equals(provision.get()) && cloud.orElse(false)) { @@ -269,9 +306,9 @@ public Integer call() throws Exception { } builder.setExcludeArchivesFromScan(excludeArchivesFromScan); - // Enforce community stability level. Doing so, any discovered features at a lower level are advertised + // Set a default community stability level. Doing so, any discovered features at a lower level are advertised String userSetConfigStability = null; - builder.setConfigStability(org.jboss.galleon.Constants.STABILITY_COMMUNITY); + builder.setDefaultConfigStability(org.jboss.galleon.Constants.STABILITY_COMMUNITY); if (stability.isPresent()) { if (configStability.isPresent()) { throw new Exception(Constants.CONFIG_STABILITY_OPTION + " can't be set when " + Constants.STABILITY_OPTION + " is set"); @@ -298,11 +335,12 @@ public Integer call() throws Exception { throw new Exception("Can only set a docker image name when provisioning a docker image. Remove the " + Constants.DOCKER_IMAGE_NAME_OPTION + " option"); } } + Utils.addDisableDeployersFromConfig(configMap, disableDeployers); + Utils.addEnableDeployersFromConfig(configMap, enableDeployers); builder.setIsCli(true); - MavenRepoManager directMavenResolver = MavenResolver.newMavenResolver(); - ScanResults scanResults = GlowSession.scan(repoManager == null ? directMavenResolver : repoManager, builder.build(), GlowMessageWriter.DEFAULT); + ScanResults scanResults = GlowSession.scan(repoManager, builder.build(), GlowMessageWriter.DEFAULT); ConfigurationResolver configurationResolver = new CLIConfigurationResolver((provision.isPresent() && provision.get().equals(OPENSHIFT)), - disableDeployers); + disableDeployers, enableDeployers); scanResults.outputInformation(configurationResolver); if (provision.isEmpty()) { if (!compact) { @@ -363,7 +401,11 @@ public Integer call() throws Exception { break; } case OPENSHIFT: { - print("@|bold Openshift build and deploy...|@"); + if(dryRun.isPresent()) { + print("@|bold Openshift resources generation...|@"); + } else { + print("@|bold Openshift build and deploy...|@"); + } break; } } @@ -411,6 +453,15 @@ public Integer call() throws Exception { } break; + } + case CHANNEL_FILE: { + switch (provision.get()) { + case PROVISIONING_XML: { + print("@|bold WildFly Channel is located in " + rel + " file|@"); + } + } + break; + } case SERVER_DIR: { if (cloud.orElse(false)) { @@ -424,20 +475,27 @@ public Integer call() throws Exception { } if (OutputFormat.OPENSHIFT.equals(provision.get())) { OpenShiftSupport.deploy(deployments, + appName.orElse(null), GlowMessageWriter.DEFAULT, target, scanResults, - haProfile.orElse(false), + haProfileEnabled, extraEnv, buildExtraEnv, disableDeployers, + enableDeployers, initScriptFile.orElse(null), cliScriptFile.orElse(null), new OpenShiftConfiguration.Builder().build(), - directMavenResolver, + MavenResolver.newMavenResolver(), userSetConfigStability, - Collections.emptyMap()); - print("@|bold \nOpenshift build and deploy DONE.|@"); + Collections.emptyMap(), dryRun.isPresent(), + scanResults.getChannels()); + if(dryRun.isPresent()) { + print("@|bold \nCloud resources generation DONE.|@"); + } else { + print("@|bold \nOpenshift build and deploy DONE.|@"); + } } else { if (content.getDockerImageName() != null) { print("@|bold To run the image call: '[docker | podman] run -p 8080:8080 -p 9990:9990 " + content.getDockerImageName() + "'|@"); diff --git a/cli/src/main/java/org/wildfly/glow/cli/commands/ShowConfigurationCommand.java b/cli/src/main/java/org/wildfly/glow/cli/commands/ShowConfigurationCommand.java index 9fd8e5c6..8da3d325 100644 --- a/cli/src/main/java/org/wildfly/glow/cli/commands/ShowConfigurationCommand.java +++ b/cli/src/main/java/org/wildfly/glow/cli/commands/ShowConfigurationCommand.java @@ -16,12 +16,15 @@ */ package org.wildfly.glow.cli.commands; +import java.nio.file.Files; import org.wildfly.glow.cli.support.AbstractCommand; import org.wildfly.glow.cli.support.Constants; import org.wildfly.glow.ProvisioningUtils; import java.nio.file.Path; +import java.util.Collections; import java.util.LinkedHashMap; import java.util.LinkedHashSet; +import java.util.List; import java.util.Map; import java.util.Optional; import java.util.ServiceLoader; @@ -32,11 +35,15 @@ import org.jboss.galleon.universe.FeaturePackLocation; import org.jboss.galleon.universe.FeaturePackLocation.FPID; import org.jboss.galleon.universe.FeaturePackLocation.ProducerSpec; +import org.jboss.galleon.universe.maven.repo.MavenRepoManager; +import org.wildfly.channel.Channel; +import org.wildfly.channel.ChannelMapper; import org.wildfly.glow.maven.MavenResolver; import org.wildfly.glow.Arguments; import org.wildfly.glow.FeaturePacks; import org.wildfly.glow.Layer; import org.wildfly.glow.LayerMapping; +import org.wildfly.glow.ScanArguments; import org.wildfly.glow.deployment.openshift.api.Deployer; import picocli.CommandLine; @@ -98,7 +105,18 @@ public void consume(GalleonProvisioningConfig provisioning, Map a print(configStr); } }; - ProvisioningUtils.traverseProvisioning(consumer, context, provisioningXml.orElse(null), wildflyServerVersion.isEmpty(), vers, wildflyPreview.orElse(false), MavenResolver.buildMavenResolver(channelsFile.orElse(null))); + ScanArguments.Builder builder = Arguments.scanBuilder(); + MavenRepoManager repoManager; + List channels = Collections.emptyList(); + if (channelsFile.isPresent()) { + String content = Files.readString(channelsFile.get()); + channels = ChannelMapper.fromString(content); + builder.setChannels(channels); + repoManager = MavenResolver.newMavenResolver(channels); + } else { + repoManager = MavenResolver.newMavenResolver(); + } + ProvisioningUtils.traverseProvisioning(consumer, context, provisioningXml.orElse(null), wildflyServerVersion.isEmpty(), vers, wildflyPreview.orElse(false), channels, repoManager); return 0; } diff --git a/cli/src/main/resources/UsageMessages.properties b/cli/src/main/resources/UsageMessages.properties index 67501ee5..6100ea93 100644 --- a/cli/src/main/resources/UsageMessages.properties +++ b/cli/src/main/resources/UsageMessages.properties @@ -8,18 +8,21 @@ glow.welcomeMessage = @|bold \nWelcome to WildFly Glow CLI!|@\n\ \n\ WildFly Glow helps you create a WildFly server based on the content of your WAR/JAR/EAR deployment(s).\n\ Call @|fg(yellow) wildfly-glow scan |@ to get started.\n - +app-name= Name to use when creating OpenShift deployment. Optional, by default the scanned deployment name (without extension) is used. add-layers-for-jndi = List of layers. In the case some layers are missing, consider adding them manually with this option. add-ons = List of add-ons to enable. To get the list of possible add-ons, use the @|fg(yellow) show-add-ons|@ command. batch = Batch mode disables any colorization of the output. build-env-file = The path to a file that contains environment variables (in the form env=value) to be passed to the OpenShift build. Can only be used with @|fg(yellow) OPENSHIFT|@ kind of provisioning. channels= Path to a yaml file containing one or more channels. +config= Path to a file containing the options. cli-script = The path to a CLI script file that only contains CLI commands in order to fine tune the server on OpenShift deployment. Can only be used with @|fg(yellow) OPENSHIFT|@ kind of provisioning. cloud = When deploying your application to the cloud. It will fine tune the WildFly server for the cloud. N.B.: Building a Bootable JAR is not supported for the cloud. config-stability-level = Specify a stability to be used when provisioning the server configuration. WildFly Glow can identify server features that would be not provisioned if no stability level were specified. The stability can be @|fg(yellow) default|@, @|fg(yellow) community|@, @|fg(yellow) preview|@, @|fg(yellow) experimental|@. deployments = List of path to war|jar|ear files to scan. disable-deployers = A comma separated list of deployer names to disable. To retrieve all the deployer names call the @|fg(yellow) show-configuration|@ operation. To disable them all, use @|fg(yellow) ALL|@ value. Can only be used with @|fg(yellow) OPENSHIFT|@ kind of provisioning. docker-image-name = Name of the docker image when --provision=DOCKER is specified. By default an image name is computed based on the WildFly server version. +dry-run = When using the provision target 'openshift', set this option to only generate the Dockerfiles and OpenShift resources allowing you to setup your docker builds and application deployment outside of OpenShift cluster, in a flexible way. +enable-deployers = A comma separated list of deployer names to enable. Can only be used when @|fg(yellow)--disable-deployers|@ is set to @|fg(yellow)ALL|@. To retrieve all the deployer names call the @|fg(yellow) show-configuration|@ operation. Can only be used with @|fg(yellow) OPENSHIFT|@ kind of provisioning. env-file = The path to a file that contains environment variables (in the form env=value) to be passed to the OpenShift deployment. Can only be used with @|fg(yellow) OPENSHIFT|@ kind of provisioning. exclude-archives-from-scan = Comma-separated list of archives to exclude from scanning. Wildcards ('*') are allowed. N.B. Just the name of the archive is matched, do not attempt to specify a full path within the jar. The following example would be a valid list: @|fg(italic) my-jar.jar,*-internal.rar|@: fails-on-error= By default provisioning will be aborted if an error is reported by WildFly Glow. Set this option to false to not fail and process with the provisioning. diff --git a/core/pom.xml b/core/pom.xml index 1c955cc0..4eb96a12 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -5,7 +5,7 @@ org.wildfly.glow wildfly-glow-parent - 1.0.7.Final-SNAPSHOT + 1.1.0.Final-SNAPSHOT wildfly-glow-core diff --git a/core/src/main/java/org/wildfly/glow/Arguments.java b/core/src/main/java/org/wildfly/glow/Arguments.java index 45b3a8d9..a89b51e0 100644 --- a/core/src/main/java/org/wildfly/glow/Arguments.java +++ b/core/src/main/java/org/wildfly/glow/Arguments.java @@ -7,7 +7,7 @@ import java.util.List; import java.util.Set; import java.util.regex.Pattern; -import org.wildfly.channel.ChannelSession; +import org.wildfly.channel.Channel; public class Arguments implements GoOfflineArguments, ScanArguments { @@ -35,8 +35,9 @@ public class Arguments implements GoOfflineArguments, ScanArguments { private final Set excludeArchivesFromScan; private final String configStability; private final String packageStability; + private final String defaultConfigStability; private final boolean isCli; - private final ChannelSession channelSession; + private final List channels; protected Arguments( String executionContext, @@ -54,8 +55,9 @@ protected Arguments( Set excludeArchivesFromScan, String configStability, String packageStability, + String defaultConfigStability, boolean isCli, - ChannelSession channelSession) { + List channels) { this.executionProfiles = executionProfiles; this.userEnabledAddOns = userEnabledAddOns; this.binaries = binaries; @@ -71,7 +73,7 @@ protected Arguments( this.excludeArchivesFromScan = excludeArchivesFromScan; this.configStability = configStability; this.packageStability = packageStability; - + this.defaultConfigStability = defaultConfigStability; HiddenPropertiesAccessor hiddenPropertiesAccessor = new HiddenPropertiesAccessor(); this.compact = Boolean.parseBoolean(hiddenPropertiesAccessor.getProperty(COMPACT_PROPERTY)); String manualLayers = hiddenPropertiesAccessor.getProperty(MANUAL_LAYERS_PROPERTY); @@ -81,7 +83,7 @@ protected Arguments( this.manualLayers = new HashSet<>(Arrays.asList(manualLayers.split(","))); } this.isCli = isCli; - this.channelSession = channelSession; + this.channels = channels; } /** @@ -197,6 +199,11 @@ public String getPackageStability() { return packageStability; } + @Override + public String getDefaultConfigStability() { + return defaultConfigStability; + } + /** * @return the isCli */ @@ -209,8 +216,8 @@ public boolean isCli() { * @return the channel session */ @Override - public ChannelSession getChannelSession() { - return channelSession; + public List getChannels() { + return channels; } static GoOfflineArguments.Builder goOfflineBuilder() { diff --git a/core/src/main/java/org/wildfly/glow/BaseArgumentsBuilder.java b/core/src/main/java/org/wildfly/glow/BaseArgumentsBuilder.java index 20d0b8ff..37e5d83a 100644 --- a/core/src/main/java/org/wildfly/glow/BaseArgumentsBuilder.java +++ b/core/src/main/java/org/wildfly/glow/BaseArgumentsBuilder.java @@ -23,7 +23,7 @@ import java.util.Set; import java.util.regex.Pattern; import java.util.stream.Collectors; -import org.wildfly.channel.ChannelSession; +import org.wildfly.channel.Channel; public class BaseArgumentsBuilder { protected Set executionProfiles = Collections.emptySet(); @@ -42,8 +42,9 @@ public class BaseArgumentsBuilder { protected Set excludeJarsFromScan = Collections.emptySet(); protected String packageStability; protected String configStability; + protected String defaultConfigStability; protected boolean isCli; - protected ChannelSession channelSession; + protected List channels; protected BaseArgumentsBuilder() { @@ -70,7 +71,8 @@ public Arguments build() { excludeJarsFromScan, configStability, packageStability, + defaultConfigStability, isCli, - channelSession); + channels); } } diff --git a/core/src/main/java/org/wildfly/glow/GlowSession.java b/core/src/main/java/org/wildfly/glow/GlowSession.java index 908e675d..5595d117 100644 --- a/core/src/main/java/org/wildfly/glow/GlowSession.java +++ b/core/src/main/java/org/wildfly/glow/GlowSession.java @@ -61,9 +61,9 @@ import org.jboss.galleon.api.config.GalleonFeaturePackConfig; import org.jboss.galleon.api.config.GalleonProvisioningConfig; import org.jboss.galleon.universe.UniverseResolver; -import org.wildfly.channel.ChannelSession; -import org.wildfly.channel.NoStreamFoundException; -import org.wildfly.channel.VersionResult; +import org.jboss.galleon.universe.maven.MavenArtifact; +import org.wildfly.channel.Channel; +import org.wildfly.channel.ChannelMapper; import static org.wildfly.glow.error.ErrorLevel.ERROR; import org.wildfly.plugin.tools.bootablejar.BootableJarSupport; @@ -82,11 +82,17 @@ public class GlowSession { private final MavenRepoManager resolver; private final Arguments arguments; private final GlowMessageWriter writer; - - private GlowSession(MavenRepoManager resolver, Arguments arguments, GlowMessageWriter writer) { - this.resolver = resolver; + private final List channels = new ArrayList<>(); + private GlowSession(MavenRepoManager resolver, Arguments arguments, GlowMessageWriter writer) throws Exception { this.arguments = arguments; this.writer = writer; + MavenRepoManager repoManager = resolver; + if (!Files.exists(OFFLINE_ZIP)) { + if (arguments.getChannels() != null) { + channels.addAll(arguments.getChannels()); + } + } + this.resolver = repoManager; } public static void goOffline(MavenRepoManager resolver, GoOfflineArguments arguments, GlowMessageWriter writer) throws Exception { @@ -137,7 +143,6 @@ public static ScanResults scan(MavenRepoManager resolver, ScanArguments argument } public ScanResults scan() throws Exception { - Set layers = new LinkedHashSet<>(); Set possibleAddOns = new TreeSet<>(); ErrorIdentificationSession errorSession = new ErrorIdentificationSession(); @@ -165,38 +170,32 @@ public ScanResults scan() throws Exception { } else { provisioning = provider.newProvisioningBuilder(config).setInstallationHome(fakeHome).build(); } - // Channel handling + // Handle cases were no version is provided Map fpVersions = new HashMap<>(); - Map resolvedInChannel = new HashMap<>(); - if (arguments.getChannelSession() != null) { - ChannelSession channelSession = arguments.getChannelSession(); - // Compute versions based on channel. - GalleonProvisioningConfig.Builder outputConfigBuilder = GalleonProvisioningConfig.builder(); - for (GalleonFeaturePackConfig dep : config.getFeaturePackDeps()) { - FeaturePackLocation.FPID fpid = Utils.toMavenCoordinates(dep.getLocation().getFPID(), universeResolver); - String[] coordinates = fpid.toString().split(":"); - String groupId = coordinates[0]; - String artifactId = coordinates[1]; - FeaturePackLocation loc; - try { - VersionResult res = channelSession.findLatestMavenArtifactVersion(groupId, artifactId, - "zip", null, null); - loc = dep.getLocation().replaceBuild(res.getVersion()); - } catch(NoStreamFoundException ex) { - writer.warn("WARNING: Feature-pack " + dep.getLocation() + " is not present in the configured channel, ignoring it."); - continue; - } - outputConfigBuilder.addFeaturePackDep(loc); - fpVersions.put(fpid.getProducer(), loc.getFPID()); - resolvedInChannel.put(fpid.getProducer(), loc.getFPID()); - } - config = outputConfigBuilder.build(); - } else { - for (GalleonFeaturePackConfig dep : config.getFeaturePackDeps()) { - FeaturePackLocation.FPID fpid = Utils.toMavenCoordinates(dep.getLocation().getFPID(), universeResolver); - fpVersions.put(fpid.getProducer(), dep.getLocation().getFPID()); + Map originalVersions = new HashMap<>(); + // Resolve feature-packs + GalleonProvisioningConfig.Builder outputConfigBuilder = GalleonProvisioningConfig.builder(); + for (GalleonFeaturePackConfig dep : config.getFeaturePackDeps()) { + FeaturePackLocation.FPID fpid = Utils.toMavenCoordinates(dep.getLocation().getFPID(), universeResolver); + String[] coordinates = fpid.toString().split(":"); + String groupId = coordinates[0]; + String artifactId = coordinates[1]; + String version = null; + MavenArtifact artifact = new MavenArtifact(); + artifact.setArtifactId(artifactId); + artifact.setGroupId(groupId); + if(coordinates.length >= 3) { + version = coordinates[2]; } + artifact.setVersion(version); + artifact.setExtension("zip"); + resolver.resolve(artifact); + FeaturePackLocation loc = dep.getLocation().replaceBuild(artifact.getVersion()); + outputConfigBuilder.addFeaturePackDep(loc); + fpVersions.put(fpid.getProducer(), loc.getFPID()); + originalVersions.put(fpid.getProducer(), fpid); } + config = outputConfigBuilder.build(); // BUILD MODEL Map> fpDependencies = new HashMap<>(); Map all @@ -536,10 +535,11 @@ public ScanResults scan() throws Exception { } // Identify the active feature-packs. GalleonProvisioningConfig activeConfig = buildProvisioningConfig(config, - universeResolver, allBaseLayers, baseLayer, decorators, excludedLayers, fpDependencies, arguments.getConfigName(), arguments.getConfigStability(), arguments.getPackageStability(), resolvedInChannel); + universeResolver, allBaseLayers, baseLayer, decorators, excludedLayers, fpDependencies, arguments.getConfigName(), arguments.getConfigStability(), arguments.getPackageStability(), originalVersions); // Handle stability - if (arguments.getConfigStability() != null) { + String configStability = arguments.getConfigStability() == null ? arguments.getDefaultConfigStability() : arguments.getConfigStability(); + if (configStability != null) { List checkLayers = new ArrayList<>(); checkLayers.add(baseLayer); checkLayers.addAll(decorators); @@ -558,7 +558,7 @@ public ScanResults scan() throws Exception { List lst = rt.getAllFeatures(); for (GalleonFeatureSpec spec : lst) { String stab = spec.getStability(); - if (stab != null && !StabilitySupport.enables(arguments.getConfigStability(), stab)) { + if (stab != null && !StabilitySupport.enables(configStability, stab)) { Set set = excludedFeatures.get(layer); if (set == null) { set = new HashSet<>(); @@ -568,7 +568,7 @@ public ScanResults scan() throws Exception { } for (GalleonFeatureParamSpec pspec : spec.getParams()) { String pstab = pspec.getStability(); - if (pstab != null && !StabilitySupport.enables(arguments.getConfigStability(), pstab)) { + if (pstab != null && !StabilitySupport.enables(configStability, pstab)) { Set set = excludedFeatures.get(layer); if (set == null) { set = new HashSet<>(); @@ -619,7 +619,8 @@ public ScanResults scan() throws Exception { errorSession, excludedPackages, excludedFeatures, - fpVersions + fpVersions, + channels ); return scanResults; @@ -640,7 +641,8 @@ OutputContent outputConfig(ScanResults scanResults, Path target, String dockerIm writer.warn("You are provisioning a server although some errors still exist. You should first fix them."); } } - if (!OutputFormat.PROVISIONING_XML.equals(arguments.getOutput()) && !OutputFormat.OPENSHIFT.equals(arguments.getOutput())) { + if (!OutputFormat.PROVISIONING_XML.equals(arguments.getOutput()) && + !OutputFormat.OPENSHIFT.equals(arguments.getOutput())) { Path generatedArtifact = provisionServer(arguments.getBinaries(), scanResults.getProvisioningConfig(), resolver, arguments.getOutput(), arguments.isCloud(), target); @@ -674,6 +676,12 @@ OutputContent outputConfig(ScanResults scanResults, Path target, String dockerIm Path prov = target.resolve("provisioning.xml"); provisioning.storeProvisioningConfig(scanResults.getProvisioningConfig(),prov); files.put(OutputContent.OutputFile.PROVISIONING_XML_FILE, prov.toAbsolutePath()); + if(!channels.isEmpty()) { + String channelsContent = ChannelMapper.toYaml(channels); + Path channelsFile = target.resolve("channel.yaml"); + Files.write(channelsFile, channelsContent.getBytes()); + files.put(OutputContent.OutputFile.CHANNEL_FILE, channelsFile.toAbsolutePath()); + } } StringBuilder envFileContent = new StringBuilder(); if (!scanResults.getSuggestions().getStronglySuggestedConfigurations().isEmpty() || @@ -997,7 +1005,7 @@ private static GalleonProvisioningConfig buildProvisioningConfig(GalleonProvisio // Reset the version if ruled by channel FPID orig = channelVersions.get(cfg.getLocation().getProducer()); if ( orig != null && orig.getLocation().isMavenCoordinates()) { - gav = gav.getLocation().replaceBuild("").getFPID(); + gav = gav.getLocation().replaceBuild(orig.getBuild()).getFPID(); } activeFeaturePacks.add(gav); } diff --git a/core/src/main/java/org/wildfly/glow/OutputContent.java b/core/src/main/java/org/wildfly/glow/OutputContent.java index 9a3e6f84..30e5f947 100644 --- a/core/src/main/java/org/wildfly/glow/OutputContent.java +++ b/core/src/main/java/org/wildfly/glow/OutputContent.java @@ -17,6 +17,7 @@ public enum OutputFile { DOCKER_FILE, SERVER_DIR, PROVISIONING_XML_FILE, + CHANNEL_FILE, ENV_FILE } diff --git a/core/src/main/java/org/wildfly/glow/OutputFormat.java b/core/src/main/java/org/wildfly/glow/OutputFormat.java index dfe73781..ccbff3f7 100644 --- a/core/src/main/java/org/wildfly/glow/OutputFormat.java +++ b/core/src/main/java/org/wildfly/glow/OutputFormat.java @@ -27,7 +27,6 @@ public enum OutputFormat { BOOTABLE_JAR("bootable-jar", "Provision a WildFly bootable jar."), DOCKER_IMAGE("docker-image", "Produce a docker image."), OPENSHIFT("openshift", "Build and deploy on OpenShift."); - public final String name; public final String description; diff --git a/core/src/main/java/org/wildfly/glow/ProvisioningUtils.java b/core/src/main/java/org/wildfly/glow/ProvisioningUtils.java index 586f00e3..60ab5e4d 100644 --- a/core/src/main/java/org/wildfly/glow/ProvisioningUtils.java +++ b/core/src/main/java/org/wildfly/glow/ProvisioningUtils.java @@ -19,6 +19,7 @@ import java.nio.file.Path; import java.util.Collections; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.Set; import org.jboss.galleon.api.GalleonBuilder; @@ -28,6 +29,7 @@ import org.jboss.galleon.universe.UniverseResolver; import org.jboss.galleon.universe.maven.repo.MavenRepoManager; import org.jboss.galleon.util.IoUtils; +import org.wildfly.channel.Channel; import static org.wildfly.glow.GlowSession.OFFLINE_CONTENT; /** @@ -43,7 +45,7 @@ void consume(GalleonProvisioningConfig provisioning, Map all, } public static void traverseProvisioning(ProvisioningConsumer consumer, - String executionContext, Path provisioningXML, boolean isLatest, String wildflyServerVersion, boolean wildflyPreview, MavenRepoManager resolver) throws Exception { + String executionContext, Path provisioningXML, boolean isLatest, String wildflyServerVersion, boolean wildflyPreview, List channels, MavenRepoManager resolver) throws Exception { UniverseResolver universeResolver = UniverseResolver.builder().addArtifactResolver(resolver).build(); GalleonBuilder provider = new GalleonBuilder(); provider.addArtifactResolver(resolver); diff --git a/core/src/main/java/org/wildfly/glow/ScanArguments.java b/core/src/main/java/org/wildfly/glow/ScanArguments.java index 74345aa0..6c4bd616 100644 --- a/core/src/main/java/org/wildfly/glow/ScanArguments.java +++ b/core/src/main/java/org/wildfly/glow/ScanArguments.java @@ -23,7 +23,7 @@ import java.util.List; import java.util.Set; import java.util.regex.Pattern; -import org.wildfly.channel.ChannelSession; +import org.wildfly.channel.Channel; public interface ScanArguments { @@ -76,8 +76,10 @@ public interface ScanArguments { String getConfigStability(); String getPackageStability(); + String getDefaultConfigStability(); + boolean isCli(); - ChannelSession getChannelSession(); + List getChannels(); default Builder createScanArgumentsBuilder() { return new Builder(); @@ -165,12 +167,16 @@ public Builder setConfigStability(String stability) { this.configStability = stability; return this; } + public Builder setDefaultConfigStability(String stability) { + this.defaultConfigStability = stability; + return this; + } public Builder setIsCli(boolean isCli) { this.isCli = isCli; return this; } - public Builder setChannelSession(ChannelSession channelSession) { - this.channelSession = channelSession; + public Builder setChannels(List channels) { + this.channels = channels; return this; } } diff --git a/core/src/main/java/org/wildfly/glow/ScanResults.java b/core/src/main/java/org/wildfly/glow/ScanResults.java index bae79fcf..48a87050 100644 --- a/core/src/main/java/org/wildfly/glow/ScanResults.java +++ b/core/src/main/java/org/wildfly/glow/ScanResults.java @@ -17,6 +17,7 @@ package org.wildfly.glow; import java.nio.file.Path; +import java.util.List; import org.wildfly.glow.error.ErrorIdentificationSession; import java.util.Map; @@ -25,6 +26,7 @@ import org.jboss.galleon.api.config.GalleonProvisioningConfig; import org.jboss.galleon.universe.FeaturePackLocation.FPID; import org.jboss.galleon.universe.FeaturePackLocation.ProducerSpec; +import org.wildfly.channel.Channel; public class ScanResults implements AutoCloseable { @@ -43,6 +45,7 @@ public class ScanResults implements AutoCloseable { private final Set excludedPackages; private final Map> excludedFeatures; private final Map fpVersions; + private final List channels; ScanResults(GlowSession glowSession, Set discoveredLayers, Set excludedLayers, @@ -57,7 +60,8 @@ public class ScanResults implements AutoCloseable { ErrorIdentificationSession errorSession, Set excludedPackages, Map> excludedFeatures, - Map fpVersions) { + Map fpVersions, + List channels) { this.glowSession = glowSession; this.discoveredLayers = discoveredLayers; this.excludedLayers = excludedLayers; @@ -73,6 +77,7 @@ public class ScanResults implements AutoCloseable { this.excludedPackages = excludedPackages; this.excludedFeatures = excludedFeatures; this.fpVersions = fpVersions; + this.channels = channels; } public Set getDiscoveredLayers() { @@ -169,4 +174,8 @@ public Map> getExcludedFeatures() { public Map getFeaturePackVersions() { return fpVersions; } + + public List getChannels() { + return channels; + } } diff --git a/core/src/main/java/org/wildfly/glow/ScanResultsPrinter.java b/core/src/main/java/org/wildfly/glow/ScanResultsPrinter.java index 96e80bde..bc383e26 100644 --- a/core/src/main/java/org/wildfly/glow/ScanResultsPrinter.java +++ b/core/src/main/java/org/wildfly/glow/ScanResultsPrinter.java @@ -228,11 +228,11 @@ private void detailed(ScanArguments arguments, ScanResults scanResults) throws E } writer.warn(""); } - if (arguments.getConfigStability() != null || arguments.getPackageStability() != null) { + if (arguments.getDefaultConfigStability() != null || arguments.getConfigStability() != null || arguments.getPackageStability() != null) { boolean needCR = false; if (!scanResults.getExcludedFeatures().isEmpty()) { - writer.warn("The following features would be disabled if provisioning a server at the '" - + arguments.getConfigStability() + "' stability level. Make sure to set the '--config-stability-level=' option:"); + String msg = arguments.getConfigStability() == null ? "" : " at the '" + arguments.getConfigStability() + "' stability level"; + writer.warn("The following features would be disabled if provisioning a server" + msg + ". Make sure to set the '--config-stability-level=' option:"); needCR = true; for (Layer l : scanResults.getExcludedFeatures().keySet()) { writer.warn(l.getName() + " features:"); diff --git a/doc-plugin/pom.xml b/doc-plugin/pom.xml index c8db1a7f..6bf717bd 100644 --- a/doc-plugin/pom.xml +++ b/doc-plugin/pom.xml @@ -5,7 +5,7 @@ org.wildfly.glow wildfly-glow-parent - 1.0.7.Final-SNAPSHOT + 1.1.0.Final-SNAPSHOT wildfly-glow-doc-plugin A plugin to generate adoc documentation for layer rules diff --git a/docs/guide/intro/index.adoc b/docs/guide/intro/index.adoc index 804ff0b8..e93cde6c 100644 --- a/docs/guide/intro/index.adoc +++ b/docs/guide/intro/index.adoc @@ -80,6 +80,47 @@ If WildFly Glow detects the need for these technologies, it will automatically d When the HA profile is enabled, 2 pods will be created for the deployment. The JGroups `dns.DNS_PING` protocol is enabled for the members of the cluster to discover each others. +#### OpenShift dry-run mode + +When using the `--dry-run` option, WildFly glow will not interact with the cluster but will generate all the needed resources that are required to build +and deploy your application in the cluster of your choice. You can then assemble the images and deploy to your cluster in a flexible way. +The generated resources allow, for example, to setup a link:https://tekton.dev/[Tekton] pipeline to produce images, +and use link:https://argoproj.github.io/cd/[Argo CD] to manage your k8s resources. + +##### Docker files for the server and application images + +WildFly Glow separates the build of your application image into 2 steps. A first step to build the server image that only contains the WildFly server. +A second step that, from the server image, produces an application image that contains the deployments, the WildFly CLI script (if any) and the bash initialization script (if any). + +In order to share the server image between various applications, a server image tag is computed based on all that makes the content of the server unique: + +* The generated provisioning XML file +* Env variables (if any) used at provisioning time. +* WildFly channel (if any) used at provisioning time. + +###### Bulding and pushing the server image + +Once WildFly Glow has generated the resources, you need to: + +* Provision the server using the generated `/galleon/provisioning.xml` file (using Galleon CLI) inside the directory `/docker/server/server`. +* Build and push the server image using the server tag generated in `/docker/image.properties` and the docker file `/docker/server/Dockerfile`. +Makes sure to run the build inside the directory `/docker/server`, that is the expected build docker context. + +###### Bulding and pushing the application image + +Once you have pushed the server in a container image repository, you need to: + +* In the `/docker/app/Dockerfile` file replace the `WILDFLY_GLOW_SERVER_IMAGE_REPOSITORY` placeholder with the pushed server image repository. +* Build and push the application image using the application tag generated in `/docker/image.properties` and the docker file `/docker/app/Dockerfile`. +Makes sure to run the build inside the directory `/docker/app`, that is the expected build docker context. + +##### Updating the k8s deployment with the generated application image + +The generated deployment (or the statefulSet in case of HA application) in `/resources/deployment/` directory references the container image using the placeholder `WILDFLY_GLOW_APP_IMAGE_REPOSITORY`. Replaces it +with the actual application image repository. + +You are good to go. You can now create the k8s resources in your cluster. + #### WildFly additional features discovery Some WildFly server features can't be discovered by scanning application deployment. A good example is the usage of SSL to secure the http diff --git a/docs/pom.xml b/docs/pom.xml index 44a9df0a..0bf7738d 100644 --- a/docs/pom.xml +++ b/docs/pom.xml @@ -23,7 +23,7 @@ org.wildfly.glow wildfly-glow-parent - 1.0.7.Final-SNAPSHOT + 1.1.0.Final-SNAPSHOT wildfly-glow-doc pom diff --git a/maven-resolver/pom.xml b/maven-resolver/pom.xml index bdb6d805..095a02bf 100644 --- a/maven-resolver/pom.xml +++ b/maven-resolver/pom.xml @@ -5,7 +5,7 @@ org.wildfly.glow wildfly-glow-parent - 1.0.7.Final-SNAPSHOT + 1.1.0.Final-SNAPSHOT wildfly-glow-maven-resolver diff --git a/maven-resolver/src/main/java/org/wildfly/glow/maven/ChannelMavenArtifactRepositoryManager.java b/maven-resolver/src/main/java/org/wildfly/glow/maven/ChannelMavenArtifactRepositoryManager.java index 1d9c519a..a54b3a62 100644 --- a/maven-resolver/src/main/java/org/wildfly/glow/maven/ChannelMavenArtifactRepositoryManager.java +++ b/maven-resolver/src/main/java/org/wildfly/glow/maven/ChannelMavenArtifactRepositoryManager.java @@ -16,28 +16,72 @@ */ package org.wildfly.glow.maven; +import static org.wildfly.channel.maven.VersionResolverFactory.DEFAULT_REPOSITORY_MAPPER; + import java.nio.file.Path; +import java.util.HashMap; import java.util.List; +import java.util.Map; +import java.util.function.Function; import java.util.regex.Pattern; -import org.jboss.galleon.api.MavenStreamResolver; +import org.apache.maven.repository.internal.MavenRepositorySystemUtils; +import org.eclipse.aether.DefaultRepositorySystemSession; +import org.eclipse.aether.RepositorySystem; +import org.eclipse.aether.RepositorySystemSession; +import org.eclipse.aether.artifact.Artifact; +import org.eclipse.aether.artifact.DefaultArtifact; +import org.eclipse.aether.repository.RemoteRepository; +import org.eclipse.aether.resolution.VersionRangeRequest; +import org.eclipse.aether.resolution.VersionRangeResolutionException; +import org.eclipse.aether.resolution.VersionRangeResult; +import org.jboss.galleon.api.MavenStreamResolver; import org.jboss.galleon.universe.maven.MavenArtifact; import org.jboss.galleon.universe.maven.MavenUniverseException; import org.jboss.galleon.universe.maven.repo.MavenRepoManager; import org.wildfly.channel.ArtifactTransferException; +import org.wildfly.channel.Channel; import org.wildfly.channel.ChannelSession; import org.wildfly.channel.NoStreamFoundException; +import org.wildfly.channel.Repository; import org.wildfly.channel.UnresolvedMavenArtifactException; +import org.wildfly.channel.VersionResult; +import org.wildfly.channel.maven.VersionResolverFactory; import org.wildfly.channel.spi.ChannelResolvable; public class ChannelMavenArtifactRepositoryManager implements MavenRepoManager, ChannelResolvable, MavenStreamResolver { - private static final String REQUIRE_CHANNEL_FOR_ALL_ARTIFACT = "org.wildfly.plugins.galleon.all.artifact.requires.channel.resolution"; - private final ChannelSession channelSession; + private final RepositorySystem system; + private final DefaultRepositorySystemSession session; + private final List repositories; + + public ChannelMavenArtifactRepositoryManager(List channels, + RepositorySystem system, + RepositorySystemSession contextSession, + List repositories) + throws Exception { + session = MavenRepositorySystemUtils.newSession(); + this.repositories = repositories; + session.setLocalRepositoryManager(contextSession.getLocalRepositoryManager()); + Map mapping = new HashMap<>(); + for (RemoteRepository r : repositories) { + mapping.put(r.getId(), r); + } + Function mapper = r -> { + RemoteRepository rep = mapping.get(r.getId()); + if (rep == null) { + rep = DEFAULT_REPOSITORY_MAPPER.apply(r); + } + return rep; + }; + VersionResolverFactory factory = new VersionResolverFactory(system, session, mapper); + channelSession = new ChannelSession(channels, factory); + this.system = system; + } - public ChannelMavenArtifactRepositoryManager(ChannelSession channelSession) { - this.channelSession = channelSession; + public ChannelSession getChannelSession() { + return channelSession; } @Override @@ -47,23 +91,20 @@ public void resolve(MavenArtifact artifact) throws MavenUniverseException { } catch (ArtifactTransferException ex) { throw new MavenUniverseException(ex.getLocalizedMessage(), ex); } catch (NoStreamFoundException ex) { - boolean requireChannel = Boolean.parseBoolean(artifact.getMetadata().get(REQUIRE_CHANNEL_FOR_ALL_ARTIFACT)); - - // unable to resolve the artifact through the channel. - // if the version is defined, let's resolve it directly - if (artifact.getVersion() == null) { - throw new MavenUniverseException(ex.getLocalizedMessage(), ex); - } - try { - - org.wildfly.channel.MavenArtifact mavenArtifact = channelSession.resolveDirectMavenArtifact( - artifact.getGroupId(), artifact.getArtifactId(), artifact.getExtension(), artifact.getClassifier(), - artifact.getVersion()); - artifact.setPath(mavenArtifact.getFile().toPath()); - } catch (UnresolvedMavenArtifactException e) { - // if the artifact can not be resolved directly either, we abort - throw new MavenUniverseException(e.getLocalizedMessage(), e); - } + // unable to resolve the artifact through the channel. + // if the version is defined, let's resolve it directly + if (artifact.getVersion() == null || artifact.getVersion().isEmpty()) { + throw new MavenUniverseException(ex.getLocalizedMessage(), ex); + } + try { + org.wildfly.channel.MavenArtifact mavenArtifact = channelSession.resolveDirectMavenArtifact( + artifact.getGroupId(), artifact.getArtifactId(), artifact.getExtension(), artifact.getClassifier(), + artifact.getVersion()); + artifact.setPath(mavenArtifact.getFile().toPath()); + } catch (UnresolvedMavenArtifactException e) { + // if the artifact can not be resolved directly either, we abort + throw new MavenUniverseException(e.getLocalizedMessage(), e); + } } } @@ -76,9 +117,7 @@ private void resolveFromChannels(MavenArtifact artifact) throws UnresolvedMavenA @Override public void resolveLatestVersion(MavenArtifact artifact) throws MavenUniverseException { - // To resolve community universe - MavenRepoManager resolver = MavenResolver.newMavenResolver(); - resolver.resolveLatestVersion(artifact); + resolveLatestVersion(artifact, null, false); } @Override @@ -94,33 +133,37 @@ public boolean isLatestVersionResolved(MavenArtifact artifact, String lowestQual @Override public void resolveLatestVersion(MavenArtifact artifact, String lowestQualifier, Pattern includeVersion, Pattern excludeVersion) throws MavenUniverseException { - // To resolve community universe - MavenRepoManager resolver = MavenResolver.newMavenResolver(); - resolver.resolveLatestVersion(artifact, lowestQualifier, includeVersion, excludeVersion); + resolveLatestVersion(artifact, null, false); } @Override public void resolveLatestVersion(MavenArtifact artifact, String lowestQualifier, boolean locallyAvailable) throws MavenUniverseException { - // To resolve community universe - MavenRepoManager resolver = MavenResolver.newMavenResolver(); - resolver.resolveLatestVersion(artifact, lowestQualifier, locallyAvailable); + artifact.setVersion(getLatestVersion(artifact)); + resolve(artifact); } @Override public String getLatestVersion(MavenArtifact artifact) throws MavenUniverseException { - throw new MavenUniverseException("Channel resolution can't be applied to Galleon universe"); + return getLatestVersion(artifact, null, null, null); } @Override public String getLatestVersion(MavenArtifact artifact, String lowestQualifier) throws MavenUniverseException { - throw new MavenUniverseException("Channel resolution can't be applied to Galleon universe"); + return getLatestVersion(artifact, lowestQualifier, null, null); } @Override public String getLatestVersion(MavenArtifact artifact, String lowestQualifier, Pattern includeVersion, Pattern excludeVersion) throws MavenUniverseException { - throw new MavenUniverseException("Channel resolution can't be applied to Galleon universe"); + try { + return channelSession.resolveMavenArtifact(artifact.getGroupId(), artifact.getArtifactId(), artifact.getExtension(), + artifact.getClassifier(), null).getVersion(); + } catch (UnresolvedMavenArtifactException e) { + VersionRangeResult res = getVersionRange(new DefaultArtifact(artifact.getGroupId(), + artifact.getArtifactId(), artifact.getExtension(), artifact.getVersionRange())); + return res.getHighestVersion().toString(); + } } @Override @@ -141,9 +184,22 @@ public void install(MavenArtifact artifact, Path path) throws MavenUniverseExcep @Override public String getLatestVersion(String groupId, String artifactId, String extension, String classifier, String baseVersion) { - org.wildfly.channel.MavenArtifact result = channelSession.resolveMavenArtifact(groupId, - artifactId, "jar", classifier, baseVersion); - return result.getVersion(); + VersionResult res = channelSession.findLatestMavenArtifactVersion(groupId, artifactId, extension, classifier, + baseVersion); + return res.getVersion(); + } + + private VersionRangeResult getVersionRange(Artifact artifact) throws MavenUniverseException { + VersionRangeRequest rangeRequest = new VersionRangeRequest(); + rangeRequest.setArtifact(artifact); + rangeRequest.setRepositories(repositories); + VersionRangeResult rangeResult; + try { + rangeResult = system.resolveVersionRange(session, rangeRequest); + } catch (VersionRangeResolutionException ex) { + throw new MavenUniverseException(ex.getLocalizedMessage(), ex); + } + return rangeResult; } } diff --git a/maven-resolver/src/main/java/org/wildfly/glow/maven/MavenResolver.java b/maven-resolver/src/main/java/org/wildfly/glow/maven/MavenResolver.java index 52272208..82ccc2cd 100644 --- a/maven-resolver/src/main/java/org/wildfly/glow/maven/MavenResolver.java +++ b/maven-resolver/src/main/java/org/wildfly/glow/maven/MavenResolver.java @@ -17,7 +17,6 @@ */ package org.wildfly.glow.maven; -import java.nio.file.Files; import java.nio.file.Path; import org.apache.maven.repository.internal.MavenRepositorySystemUtils; import org.eclipse.aether.DefaultRepositorySystemSession; @@ -36,11 +35,9 @@ import java.nio.file.Paths; import java.util.ArrayList; import java.util.List; +import org.eclipse.aether.RepositorySystemSession; import org.eclipse.aether.repository.RepositoryPolicy; import org.wildfly.channel.Channel; -import org.wildfly.channel.ChannelMapper; -import org.wildfly.channel.ChannelSession; -import org.wildfly.channel.maven.VersionResolverFactory; /** * @@ -54,12 +51,32 @@ public final class MavenResolver { public static final String SPRING_REPO_URL = "https://repo.spring.io/milestone"; public static MavenRepoManager newMavenResolver() { - RepositorySystem repoSystem = MavenResolver.newRepositorySystem(); - DefaultRepositorySystemSession session = MavenRepositorySystemUtils.newSession(); - String localPath = System.getProperty("maven.repo.local"); - Path localCache = localPath == null ? Paths.get(System.getProperty("user.home"), ".m2", "repository") : Paths.get(localPath); - LocalRepository localRepo = new LocalRepository(localCache.toFile()); - session.setLocalRepositoryManager(repoSystem.newLocalRepositoryManager(session, localRepo)); + RepositorySystem repoSystem = newRepositorySystem(); + MavenRepoManager resolver + = new MavenArtifactRepositoryManager(repoSystem, newMavenSession(repoSystem), getRemoteRepositories()); + return resolver; + } + + public static MavenRepoManager newMavenResolver(List channels) throws Exception { + RepositorySystem repoSystem = newRepositorySystem(); + MavenRepoManager resolver + = new ChannelMavenArtifactRepositoryManager(channels, repoSystem, newMavenSession(repoSystem), getRemoteRepositories()); + return resolver; + } + public static MavenRepoManager newRHMavenResolver(List channels) throws Exception { + RepositorySystem repoSystem = newRepositorySystem(); + MavenRepoManager resolver + = new ChannelMavenArtifactRepositoryManager(channels, repoSystem, newMavenSession(repoSystem), getRHRemoteRepositories()); + return resolver; + } + public static MavenRepoManager newRHMavenResolver() { + RepositorySystem repoSystem = newRepositorySystem(); + MavenArtifactRepositoryManager resolver + = new MavenArtifactRepositoryManager(repoSystem, newMavenSession(repoSystem), getRHRemoteRepositories()); + return resolver; + } + + public static List getRemoteRepositories() { List repos = new ArrayList<>(); RemoteRepository.Builder central = new RemoteRepository.Builder("central", "default", CENTRAL_REPO_URL); central.setSnapshotPolicy(new RepositoryPolicy(false, RepositoryPolicy.UPDATE_POLICY_NEVER, @@ -77,26 +94,10 @@ public static MavenRepoManager newMavenResolver() { spring.setSnapshotPolicy(new RepositoryPolicy(false, RepositoryPolicy.UPDATE_POLICY_NEVER, RepositoryPolicy.CHECKSUM_POLICY_IGNORE)); repos.add(spring.build()); - MavenArtifactRepositoryManager resolver - = new MavenArtifactRepositoryManager(repoSystem, session, repos); - return resolver; + return repos; } - static RepositorySystem newRepositorySystem() { - DefaultServiceLocator locator = MavenRepositorySystemUtils.newServiceLocator(); - locator.addService(RepositoryConnectorFactory.class, BasicRepositoryConnectorFactory.class); - locator.addService(TransporterFactory.class, FileTransporterFactory.class); - locator.addService(TransporterFactory.class, HttpTransporterFactory.class); - return locator.getService(RepositorySystem.class); - } - - public static MavenRepoManager newRHMavenResolver() { - RepositorySystem repoSystem = newRepositorySystem(); - DefaultRepositorySystemSession session = MavenRepositorySystemUtils.newSession(); - String localPath = System.getProperty("maven.repo.local"); - Path localCache = localPath == null ? Paths.get(System.getProperty("user.home"), ".m2", "repository") : Paths.get(localPath); - LocalRepository localRepo = new LocalRepository(localCache.toFile()); - session.setLocalRepositoryManager(repoSystem.newLocalRepositoryManager(session, localRepo)); + public static List getRHRemoteRepositories() { List repos = new ArrayList<>(); RemoteRepository.Builder ga = new RemoteRepository.Builder("redhat-ga", "default", GA_REPO_URL); ga.setSnapshotPolicy(new RepositoryPolicy(false, RepositoryPolicy.UPDATE_POLICY_NEVER, @@ -104,34 +105,23 @@ public static MavenRepoManager newRHMavenResolver() { ga.setReleasePolicy(new RepositoryPolicy(true, RepositoryPolicy.UPDATE_POLICY_NEVER, RepositoryPolicy.CHECKSUM_POLICY_IGNORE)); repos.add(ga.build()); - MavenArtifactRepositoryManager resolver - = new MavenArtifactRepositoryManager(repoSystem, session, repos); - return resolver; - } - - public static MavenRepoManager buildMavenResolver(Path channelsFile) throws Exception { - MavenRepoManager resolver = null; - if (channelsFile != null) { - if (!Files.exists(channelsFile)) { - throw new Exception(channelsFile + " file doesn't exist"); - } - ChannelSession session = buildChannelSession(channelsFile); - resolver = new ChannelMavenArtifactRepositoryManager(session); - } else { - resolver = MavenResolver.newMavenResolver(); - } - return resolver; + return repos; } - public static ChannelSession buildChannelSession(Path path) throws Exception { - String content = Files.readString(path); - List channels = ChannelMapper.fromString(content); - Path localCache = Paths.get(System.getProperty("user.home"), ".m2", "repository"); - LocalRepository localRepo = new LocalRepository(localCache.toFile()); - RepositorySystem repoSystem = MavenResolver.newRepositorySystem(); + public static RepositorySystemSession newMavenSession(RepositorySystem repoSystem) { DefaultRepositorySystemSession session = MavenRepositorySystemUtils.newSession(); + String localPath = System.getProperty("maven.repo.local"); + Path localCache = localPath == null ? Paths.get(System.getProperty("user.home"), ".m2", "repository") : Paths.get(localPath); + LocalRepository localRepo = new LocalRepository(localCache.toFile()); session.setLocalRepositoryManager(repoSystem.newLocalRepositoryManager(session, localRepo)); - VersionResolverFactory factory = new VersionResolverFactory(repoSystem, session); - return new ChannelSession(channels, factory); + return session; + } + + public static RepositorySystem newRepositorySystem() { + DefaultServiceLocator locator = MavenRepositorySystemUtils.newServiceLocator(); + locator.addService(RepositoryConnectorFactory.class, BasicRepositoryConnectorFactory.class); + locator.addService(TransporterFactory.class, FileTransporterFactory.class); + locator.addService(TransporterFactory.class, HttpTransporterFactory.class); + return locator.getService(RepositorySystem.class); } } diff --git a/openshift-deployment/amq-broker/pom.xml b/openshift-deployment/amq-broker/pom.xml index 32686353..8f8457ce 100644 --- a/openshift-deployment/amq-broker/pom.xml +++ b/openshift-deployment/amq-broker/pom.xml @@ -5,7 +5,7 @@ org.wildfly.glow wildfly-glow-openshift-deployment - 1.0.7.Final-SNAPSHOT + 1.1.0.Final-SNAPSHOT wildfly-glow-openshift-deployment-amq diff --git a/openshift-deployment/amq-broker/src/main/java/org/wildfly/glow/deployment/openshift/amq/AMQDeployer.java b/openshift-deployment/amq-broker/src/main/java/org/wildfly/glow/deployment/openshift/amq/AMQDeployer.java index 2bc78209..3dc41e0c 100644 --- a/openshift-deployment/amq-broker/src/main/java/org/wildfly/glow/deployment/openshift/amq/AMQDeployer.java +++ b/openshift-deployment/amq-broker/src/main/java/org/wildfly/glow/deployment/openshift/amq/AMQDeployer.java @@ -38,6 +38,7 @@ import org.wildfly.glow.Env; import org.wildfly.glow.GlowMessageWriter; import org.wildfly.glow.deployment.openshift.api.Deployer; +import org.wildfly.glow.deployment.openshift.api.OpenShiftSupport; import org.wildfly.glow.deployment.openshift.api.Utils; /** @@ -85,7 +86,7 @@ public class AMQDeployer implements Deployer { @Override public Map deploy(GlowMessageWriter writer, Path target, OpenShiftClient osClient, - Map env, String appHost, String appName, String matching, Map extraEnv) throws Exception { + Map env, String appHost, String appName, String matching, Map extraEnv, boolean dryRun) throws Exception { writer.info("Deploying AMQ Messaging Broker"); Map labels = new HashMap<>(); labels.put(LABEL, REMOTE_BROKER_NAME); @@ -120,17 +121,25 @@ public Map deploy(GlowMessageWriter writer, Path target, OpenShi withNewTemplate().withNewMetadata().withLabels(labels).endMetadata().withNewSpec(). withContainers(container).withRestartPolicy("Always"). endSpec().endTemplate().withNewStrategy().withType("RollingUpdate").endStrategy().endSpec().build(); - osClient.resources(Deployment.class).resource(deployment).createOr(NonDeletingOperation::update); - Utils.persistResource(target, deployment, REMOTE_BROKER_NAME + "-deployment.yaml"); + if (!dryRun) { + osClient.resources(Deployment.class).resource(deployment).createOr(NonDeletingOperation::update); + } + Utils.persistResource(OpenShiftSupport.getDeployersDirectory(target), deployment, REMOTE_BROKER_NAME + "-deployment.yaml"); IntOrString v = new IntOrString(); v.setValue(61616); Service service = new ServiceBuilder().withNewMetadata().withName(REMOTE_BROKER_NAME).endMetadata(). withNewSpec().withPorts(new ServicePort().toBuilder().withName("61616-tcp").withProtocol("TCP"). withPort(61616). withTargetPort(v).build()).withType("ClusterIP").withSessionAffinity("None").withSelector(labels).endSpec().build(); - osClient.services().resource(service).createOr(NonDeletingOperation::update); - Utils.persistResource(target, service, REMOTE_BROKER_NAME + "-service.yaml"); - writer.info("AMQ Messaging Broker has been deployed"); + if (!dryRun) { + osClient.services().resource(service).createOr(NonDeletingOperation::update); + } + Utils.persistResource(OpenShiftSupport.getDeployersDirectory(target), service, REMOTE_BROKER_NAME + "-service.yaml"); + if (dryRun) { + writer.info("Resources for AMQ Messaging Broker have been generated"); + } else { + writer.info("AMQ Messaging Broker has been deployed"); + } return REMOTE_BROKER_APP_MAP; } diff --git a/openshift-deployment/api/pom.xml b/openshift-deployment/api/pom.xml index 8be0a388..13616ee4 100644 --- a/openshift-deployment/api/pom.xml +++ b/openshift-deployment/api/pom.xml @@ -5,7 +5,7 @@ org.wildfly.glow wildfly-glow-openshift-deployment - 1.0.7.Final-SNAPSHOT + 1.1.0.Final-SNAPSHOT wildfly-glow-openshift-deployment-api diff --git a/openshift-deployment/api/src/main/java/org/wildfly/glow/deployment/openshift/api/AbstractDatabaseDeployer.java b/openshift-deployment/api/src/main/java/org/wildfly/glow/deployment/openshift/api/AbstractDatabaseDeployer.java index 9a0ef269..de1fe214 100644 --- a/openshift-deployment/api/src/main/java/org/wildfly/glow/deployment/openshift/api/AbstractDatabaseDeployer.java +++ b/openshift-deployment/api/src/main/java/org/wildfly/glow/deployment/openshift/api/AbstractDatabaseDeployer.java @@ -103,7 +103,7 @@ private Map getExistingEnv(Map env) { @Override public Map deploy(GlowMessageWriter writer, Path target, OpenShiftClient osClient, - Map env, String appHost, String appName, String matching, Map extraEnv) throws Exception { + Map env, String appHost, String appName, String matching, Map extraEnv, boolean dryRun) throws Exception { writer.info("Deploying " + dbName); Map labels = new HashMap<>(); labels.put(LABEL, dbName); @@ -131,20 +131,28 @@ public Map deploy(GlowMessageWriter writer, Path target, OpenShi withNewTemplate().withNewMetadata().withLabels(labels).endMetadata().withNewSpec(). withContainers(container).withRestartPolicy("Always"). endSpec().endTemplate().withNewStrategy().withType("RollingUpdate").endStrategy().endSpec().build(); - osClient.resources(Deployment.class).resource(deployment).createOr(NonDeletingOperation::update); - Utils.persistResource(target, deployment, dbName + "-deployment.yaml"); + if (!dryRun) { + osClient.resources(Deployment.class).resource(deployment).createOr(NonDeletingOperation::update); + } + Utils.persistResource(OpenShiftSupport.getDeployersDirectory(target), deployment, dbName + "-deployment.yaml"); IntOrString v = new IntOrString(); v.setValue(this.port); Service service = new ServiceBuilder().withNewMetadata().withName(dbName).endMetadata(). withNewSpec().withPorts(new ServicePort().toBuilder().withName(this.port + "-tcp").withProtocol("TCP"). withPort(this.port). withTargetPort(v).build()).withType("ClusterIP").withSessionAffinity("None").withSelector(labels).endSpec().build(); - osClient.services().resource(service).createOr(NonDeletingOperation::update); - Utils.persistResource(target, service, dbName + "-service.yaml"); + if (!dryRun) { + osClient.services().resource(service).createOr(NonDeletingOperation::update); + } + Utils.persistResource(OpenShiftSupport.getDeployersDirectory(target), service, dbName + "-service.yaml"); Map ret = new HashMap<>(); ret.putAll(getExistingEnv(env)); ret.putAll(APP_MAP); - writer.info(dbName + " server has been deployed"); + if(dryRun) { + writer.info("Resources for " + dbName + " have been generated"); + } else { + writer.info(dbName + " server has been deployed"); + } return ret; } diff --git a/openshift-deployment/api/src/main/java/org/wildfly/glow/deployment/openshift/api/Deployer.java b/openshift-deployment/api/src/main/java/org/wildfly/glow/deployment/openshift/api/Deployer.java index 6348054d..b49ec6cf 100644 --- a/openshift-deployment/api/src/main/java/org/wildfly/glow/deployment/openshift/api/Deployer.java +++ b/openshift-deployment/api/src/main/java/org/wildfly/glow/deployment/openshift/api/Deployer.java @@ -35,7 +35,7 @@ public interface Deployer { String getName(); - Map deploy(GlowMessageWriter writer, Path target, OpenShiftClient osClient, Map env, String appHost, String appName, String matching, Map extraEnv) throws Exception; + Map deploy(GlowMessageWriter writer, Path target, OpenShiftClient osClient, Map env, String appHost, String appName, String matching, Map extraEnv, boolean dryRun) throws Exception; default Map disabledDeploy(String appHost, String appName, String matching, Map env) { return Collections.emptyMap(); diff --git a/openshift-deployment/api/src/main/java/org/wildfly/glow/deployment/openshift/api/OpenShiftSupport.java b/openshift-deployment/api/src/main/java/org/wildfly/glow/deployment/openshift/api/OpenShiftSupport.java index 2b4280e8..fbd2d7d7 100644 --- a/openshift-deployment/api/src/main/java/org/wildfly/glow/deployment/openshift/api/OpenShiftSupport.java +++ b/openshift-deployment/api/src/main/java/org/wildfly/glow/deployment/openshift/api/OpenShiftSupport.java @@ -51,18 +51,21 @@ import io.fabric8.openshift.api.model.RouteTargetReference; import io.fabric8.openshift.api.model.TLSConfig; import io.fabric8.openshift.client.OpenShiftClient; +import java.io.FileOutputStream; +import java.io.IOException; import java.nio.charset.Charset; import java.nio.file.Files; import java.nio.file.Path; import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; import java.util.ArrayList; -import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; +import java.util.Properties; import java.util.ServiceLoader; import java.util.Set; import java.util.TreeMap; @@ -77,17 +80,32 @@ import org.jboss.galleon.universe.maven.repo.MavenRepoManager; import org.jboss.galleon.util.IoUtils; import org.jboss.galleon.util.ZipUtils; +import org.wildfly.channel.Channel; +import org.wildfly.channel.ChannelMapper; import org.wildfly.glow.ConfigurationResolver; import org.wildfly.glow.Env; import org.wildfly.glow.GlowMessageWriter; import org.wildfly.glow.Layer; import org.wildfly.glow.ScanResults; - /** * * @author jdenise */ public class OpenShiftSupport { + private static final String DEPLOYMENT_RESOURCE_DIR = "deployment"; + private static final String BUILD_RESOURCE_DIR = "build"; + private static final String IMAGES_RESOURCE_DIR = "images"; + private static final String DEPLOYMENTS_DIR = "deployments"; + private static final String DOCKER_DIR = "docker"; + private static final String EXTENSIONS_DIR = "extensions"; + private static final String TMP_DIR = "tmp"; + private static final String DOCKER_SERVER_DIR = "server"; + private static final String DOCKER_APP_DIR = "app"; + private static final String DEPLOYERS_RESOURCE_DIR = "deployers"; + private static final String RESOURCES_DIR = "resources"; + private static final String WILDFLY_GLOW_SERVER_IMAGE_REPOSITORY = "WILDFLY_GLOW_SERVER_IMAGE_REPOSITORY"; + private static final String WILDFLY_GLOW_APP_IMAGE_REPOSITORY = "WILDFLY_GLOW_APP_IMAGE_REPOSITORY"; + private static final String IMAGE_PROPERTIES_FILE = "images.properties"; private static class BuildWatcher implements Watcher, AutoCloseable { @@ -133,18 +151,87 @@ public void close() throws Exception { } } + public static Path getDeploymentDirectory(Path target) throws IOException { + return createResourcesDirectory(target, DEPLOYMENT_RESOURCE_DIR); + } + + public static Path getBuildDirectory(Path target) throws IOException { + return createResourcesDirectory(target, BUILD_RESOURCE_DIR); + } + + public static Path getImagesDirectory(Path target) throws IOException { + return createResourcesDirectory(target, IMAGES_RESOURCE_DIR); + } + + public static Path getDockerServerDirectory(Path target) throws IOException { + return createDockerDirectory(target, DOCKER_SERVER_DIR); + } + + public static Path getDockerAppDirectory(Path target) throws IOException { + return createDockerDirectory(target, DOCKER_APP_DIR); + } + + public static Path getExtensionsDirectory(Path target) throws IOException { + Path ext = createDockerDirectory(target, DOCKER_APP_DIR).resolve(EXTENSIONS_DIR); + Files.createDirectories(ext); + return ext; + } + + public static Path getDeploymentsDirectory(Path target) throws IOException { + Path deps = createDockerDirectory(target, DOCKER_APP_DIR).resolve(DEPLOYMENTS_DIR); + Files.createDirectories(deps); + return deps; + } + + public static Path getDockerDirectory(Path target) throws IOException { + Path dir = target.resolve(DOCKER_DIR); + Files.createDirectories(dir); + return dir; + } + + public static Path getTmpDirectory(Path target) throws IOException { + Path dir = target.resolve(TMP_DIR); + Files.createDirectories(dir); + return dir; + } + + public static Path getDeployersDirectory(Path target) throws IOException { + return createResourcesDirectory(target, DEPLOYERS_RESOURCE_DIR); + } + + private static Path createResourcesDirectory(Path path, String name) throws IOException { + Path dir = getResourcesDirectory(path).resolve(name); + Files.createDirectories(dir); + return dir; + } + private static Path getResourcesDirectory(Path path) throws IOException { + Path dir = path.resolve(RESOURCES_DIR); + Files.createDirectories(dir); + return dir; + } + private static Path createDockerDirectory(Path path, String name) throws IOException { + Path dir = getDockerDirectory(path).resolve(name); + Files.createDirectories(dir); + return dir; + } + private static void createAppDeployment(GlowMessageWriter writer, Path target, - OpenShiftClient osClient, String name, Map env, boolean ha, OpenShiftConfiguration config, String deploymentKind) throws Exception { + OpenShiftClient osClient, String appName, Map env, boolean ha, + OpenShiftConfiguration config, + String deploymentKind, String appImageTag) throws Exception { Map matchLabels = new HashMap<>(); - matchLabels.put(Deployer.LABEL, name); + matchLabels.put(Deployer.LABEL, appName); IntOrString value = new IntOrString(); value.setValue(8080); - Service service = new ServiceBuilder().withNewMetadata().withLabels(createCommonLabels(config)).withName(name).endMetadata(). + Service service = new ServiceBuilder().withNewMetadata().withLabels(createCommonLabels(config)).withName(appName).endMetadata(). withNewSpec().withPorts(new ServicePort().toBuilder().withProtocol("TCP"). withPort(8080). withTargetPort(value).build()).withType("ClusterIP").withSessionAffinity("None").withSelector(matchLabels).endSpec().build(); - osClient.services().resource(service).createOr(NonDeletingOperation::update); - Utils.persistResource(target, service, name + "-service.yaml"); + if (osClient != null) { + osClient.services().resource(service).createOr(NonDeletingOperation::update); + } + + Utils.persistResource(getDeploymentDirectory(target), service, appName + "-service.yaml"); ContainerPort port = new ContainerPort(); port.setContainerPort(8080); @@ -167,7 +254,7 @@ private static void createAppDeployment(GlowMessageWriter writer, Path target, writer.info("\nHA enabled, 2 replicas will be started."); IntOrString v = new IntOrString(); v.setValue(8888); - Service pingService = new ServiceBuilder().withNewMetadata().withLabels(createCommonLabels(config)).withName(name + "-ping").endMetadata(). + Service pingService = new ServiceBuilder().withNewMetadata().withLabels(createCommonLabels(config)).withName(appName + "-ping").endMetadata(). withNewSpec().withPorts(new ServicePort().toBuilder().withProtocol("TCP"). withPort(8888). withName("ping"). @@ -176,12 +263,15 @@ private static void createAppDeployment(GlowMessageWriter writer, Path target, withInternalTrafficPolicy("Cluster").withClusterIPs("None"). withType("ClusterIP").withIpFamilyPolicy("SingleStack"). withSessionAffinity("None").withSelector(matchLabels).endSpec().build(); - osClient.services().resource(pingService).createOr(NonDeletingOperation::update); - Utils.persistResource(target, pingService, name + "-ping-service.yaml"); + if (osClient != null) { + osClient.services().resource(pingService).createOr(NonDeletingOperation::update); + } + Utils.persistResource(getDeploymentDirectory(target), pingService, appName+"-ping-service.yaml"); } Container container = new Container(); - container.setName(name); - container.setImage(name + ":latest"); + container.setName(appName); + String imageName = osClient == null ? (WILDFLY_GLOW_APP_IMAGE_REPOSITORY + ":" + appImageTag) : (appName + ":latest"); + container.setImage(imageName); container.setPorts(ports); container.setEnv(vars); container.setImagePullPolicy("IfNotPresent"); @@ -215,33 +305,43 @@ private static void createAppDeployment(GlowMessageWriter writer, Path target, Map labels = createCommonLabels(config); labels.putAll(matchLabels); - writer.info("\nWaiting until the application " + deploymentKind + " is ready ..."); + if (osClient != null) { + writer.info("\nWaiting until the application " + deploymentKind + " is ready ..."); + } if (ha) { - StatefulSet deployment = new StatefulSetBuilder().withNewMetadata().withLabels(labels).withName(name).endMetadata(). - withNewSpec().withReplicas(ha ? 2 : 1). + StatefulSet deployment = new StatefulSetBuilder().withNewMetadata().withLabels(labels).withName(appName).endMetadata(). + withNewSpec().withReplicas(2). withNewSelector().withMatchLabels(matchLabels).endSelector(). withNewTemplate().withNewMetadata().withLabels(labels).endMetadata().withNewSpec(). withContainers(container).withRestartPolicy("Always"). endSpec().endTemplate().withNewUpdateStrategy().withType("RollingUpdate").endUpdateStrategy().endSpec().build(); - osClient.resources(StatefulSet.class).resource(deployment).createOr(NonDeletingOperation::update); - Utils.persistResource(target, deployment, name + "-statefulset.yaml"); - osClient.resources(StatefulSet.class).resource(deployment).waitUntilReady(5, TimeUnit.MINUTES); + if (osClient != null) { + osClient.resources(StatefulSet.class).resource(deployment).createOr(NonDeletingOperation::update); + } + Utils.persistResource(getDeploymentDirectory(target), deployment, appName+ "-statefulset.yaml"); + if (osClient != null) { + osClient.resources(StatefulSet.class).resource(deployment).waitUntilReady(5, TimeUnit.MINUTES); + } } else { - Deployment deployment = new DeploymentBuilder().withNewMetadata().withLabels(labels).withName(name).endMetadata(). - withNewSpec().withReplicas(ha ? 2 : 1). + Deployment deployment = new DeploymentBuilder().withNewMetadata().withLabels(labels).withName(appName).endMetadata(). + withNewSpec().withReplicas(1). withNewSelector().withMatchLabels(matchLabels).endSelector(). withNewTemplate().withNewMetadata().withLabels(labels).endMetadata().withNewSpec(). withContainers(container).withRestartPolicy("Always"). endSpec().endTemplate().withNewStrategy().withType("RollingUpdate").endStrategy().endSpec().build(); - osClient.resources(Deployment.class).resource(deployment).createOr(NonDeletingOperation::update); - Utils.persistResource(target, deployment, name + "-deployment.yaml"); - osClient.resources(Deployment.class).resource(deployment).waitUntilReady(5, TimeUnit.MINUTES); + if (osClient != null) { + osClient.resources(Deployment.class).resource(deployment).createOr(NonDeletingOperation::update); + } + Utils.persistResource(getDeploymentDirectory(target), deployment, appName + "-deployment.yaml"); + if (osClient != null) { + osClient.resources(Deployment.class).resource(deployment).waitUntilReady(5, TimeUnit.MINUTES); + } } } - public static ConfigurationResolver.ResolvedEnvs getResolvedEnvs(Layer layer, Set input, Set disabledDeployers) throws Exception { + public static ConfigurationResolver.ResolvedEnvs getResolvedEnvs(Layer layer, Set input, Set disabledDeployers, Set enabledDeployers) throws Exception { ConfigurationResolver.ResolvedEnvs resolved = null; - List deployers = getEnabledDeployers(disabledDeployers); + List deployers = getEnabledDeployers(disabledDeployers, enabledDeployers); for (Deployer d : deployers) { if (d.getSupportedLayers().contains(layer.getName())) { Set envs = d.getResolvedEnvs(input); @@ -254,8 +354,8 @@ public static ConfigurationResolver.ResolvedEnvs getResolvedEnvs(Layer layer, Se return resolved; } - public static String getPossibleDeployer(Set layers, Set disabledDeployers) throws Exception { - List deployers = getEnabledDeployers(disabledDeployers); + public static String getPossibleDeployer(Set layers, Set disabledDeployers, Set enabledDeployers) throws Exception { + List deployers = getEnabledDeployers(disabledDeployers, enabledDeployers); for (Deployer d : deployers) { for (Layer l : layers) { if (d.getSupportedLayers().contains(l.getName())) { @@ -266,11 +366,24 @@ public static String getPossibleDeployer(Set layers, Set disabled return null; } - private static List getEnabledDeployers(Set disabledDeployers) throws Exception { - Map existingDeployers = new HashMap<>(); + private static List getEnabledDeployers(Set disabledDeployers, Set enabledDeployers) throws Exception { + List existingDeployers = getAllDeployers(disabledDeployers, enabledDeployers); + List deployers = new ArrayList<>(); + for (Deployer d : existingDeployers) { + boolean isDisabled = isDisabled(d.getName(), disabledDeployers, enabledDeployers); + if (!isDisabled) { + deployers.add(d); + } + } + return deployers; + } + private static List getAllDeployers(Set disabledDeployers, Set enabledDeployers) throws Exception { + Map existingDeployers = new HashMap<>(); + List deployers = new ArrayList<>(); for (Deployer d : ServiceLoader.load(Deployer.class)) { existingDeployers.put(d.getName(), d); + deployers.add(d); } for (String disabled : disabledDeployers) { if (!"ALL".equals(disabled)) { @@ -279,11 +392,14 @@ private static List getEnabledDeployers(Set disabledDeployers) } } } - List deployers = new ArrayList<>(); - for (Deployer d : existingDeployers.values()) { - boolean isDisabled = isDisabled(d.getName(), disabledDeployers); - if (!isDisabled) { - deployers.add(d); + if (!enabledDeployers.isEmpty()) { + if (!disabledDeployers.contains("ALL")) { + throw new Exception("Enabled deployers is not empty although not ALL deployers are disabled."); + } + } + for (String enabled : enabledDeployers) { + if (!existingDeployers.containsKey(enabled)) { + throw new Exception("Invalid deployer to enable: " + enabled); } } return deployers; @@ -336,6 +452,7 @@ private static String truncateValue(String val) { } public static void deploy(List deployments, + String applicationName, GlowMessageWriter writer, Path target, ScanResults scanResults, @@ -343,28 +460,30 @@ public static void deploy(List deployments, Map extraEnv, Map buildExtraEnv, Set disabledDeployers, + Set enabledDeployers, Path initScript, Path cliScript, OpenShiftConfiguration config, MavenRepoManager mvnResolver, String stability, - Map serverImageBuildLabels) throws Exception { + Map serverImageBuildLabels, boolean dryRun, List channels) throws Exception { Set layers = scanResults.getDiscoveredLayers(); Set metadataOnlyLayers = scanResults.getMetadataOnlyLayers(); Map> requiredBuildTime = scanResults.getSuggestions().getBuildTimeRequiredConfigurations(); - String appName = ""; String originalAppName = null; if (deployments != null && !deployments.isEmpty()) { - Path deploymentsDir = target.resolve("deployments"); + Path deploymentsDir = getDeploymentsDirectory(target); Files.createDirectories(deploymentsDir); for (Path p : deployments) { Files.copy(p, deploymentsDir.resolve(p.getFileName())); int ext = p.getFileName().toString().lastIndexOf("."); - appName += p.getFileName().toString().substring(0, ext); + if (applicationName == null || applicationName.isEmpty()) { + applicationName = p.getFileName().toString().substring(0, ext); + applicationName = generateValidName(applicationName); + } if (originalAppName == null) { - originalAppName = appName; + originalAppName = p.getFileName().toString().substring(0, ext); } - appName = generateValidName(appName); } } else { throw new Exception("No application to deploy to OpenShift"); @@ -380,21 +499,30 @@ public static void deploy(List deployments, allLayers.addAll(metadataOnlyLayers); Map actualEnv = new TreeMap<>(); Map actualBuildEnv = new TreeMap<>(); - OpenShiftClient osClient = new KubernetesClientBuilder().build().adapt(OpenShiftClient.class); - writer.info("\nConnected to OpenShift cluster"); + OpenShiftClient osClient = null; + if(!dryRun) { + osClient = new KubernetesClientBuilder().build().adapt(OpenShiftClient.class); + writer.info("\nConnected to OpenShift cluster"); + } // First create the future route to the application, can be needed by deployers - Route route = new RouteBuilder().withNewMetadata().withLabels(createCommonLabels(config)).withName(appName). + Route route = new RouteBuilder().withNewMetadata().withLabels(createCommonLabels(config)).withName(applicationName). endMetadata().withNewSpec(). - withTo(new RouteTargetReference("Service", appName, 100)). + withTo(new RouteTargetReference("Service", applicationName, 100)). withTls(new TLSConfig().toBuilder().withTermination("edge"). withInsecureEdgeTerminationPolicy("Redirect").build()).endSpec().build(); - osClient.routes().resource(route).createOr(NonDeletingOperation::update); - Utils.persistResource(target, route, appName + "-route.yaml"); - String host = osClient.routes().resource(route).get().getSpec().getHost(); + if (osClient != null) { + osClient.routes().resource(route).createOr(NonDeletingOperation::update); + } + Utils.persistResource(getDeploymentDirectory(target), route, applicationName + "-route.yaml"); + String host = null; + if(osClient != null) { + host = osClient.routes().resource(route).get().getSpec().getHost(); + } // Done route creation - List deployers = getEnabledDeployers(disabledDeployers); + + List deployers = getAllDeployers(disabledDeployers, enabledDeployers); for (Deployer d : deployers) { - boolean isDisabled = isDisabled(d.getName(), disabledDeployers); + boolean isDisabled = isDisabled(d.getName(), disabledDeployers, enabledDeployers); for (Layer l : allLayers) { if (d.getSupportedLayers().contains(l.getName())) { if (!isDisabled) { @@ -402,7 +530,7 @@ public static void deploy(List deployments, } else { writer.warn("\nThe deployer " + d.getName() + " has been disabled"); } - actualEnv.putAll(isDisabled ? Collections.emptyMap() : d.deploy(writer, target, osClient, env, host, appName, l.getName(), extraEnv)); + actualEnv.putAll(isDisabled ? d.disabledDeploy(host, applicationName, host, env) : d.deploy(writer, target, osClient, env, host, applicationName, l.getName(), extraEnv, dryRun)); Set buildEnv = requiredBuildTime.get(l); if (buildEnv != null) { Set names = new HashSet<>(); @@ -417,7 +545,9 @@ public static void deploy(List deployments, } } } - actualEnv.put("APPLICATION_ROUTE_HOST", host); + if (!dryRun) { + actualEnv.put("APPLICATION_ROUTE_HOST", host); + } actualEnv.putAll(extraEnv); if (stability != null) { String val = actualEnv.get("SERVER_ARGS"); @@ -438,42 +568,72 @@ public static void deploy(List deployments, } String deploymentKind = ha ? "StatefulSet" : "Deployment"; if (!disabledDeployers.isEmpty()) { - writer.warn("The following environment variables will be set in the " + appName + " " + deploymentKind + ". Make sure that the required env variables for the disabled deployer(s) have been set:\n"); + writer.warn("The following environment variables will be set in the " + applicationName + " " + deploymentKind + ". Make sure that the required env variables for the disabled deployer(s) have been set:\n"); } else { - writer.warn("The following environment variables will be set in the " + appName + " " + deploymentKind + ":\n"); + writer.warn("The following environment variables will be set in the " + applicationName + " " + deploymentKind + ":\n"); } if (ha) { actualEnv.put("JGROUPS_PING_PROTOCOL", "openshift.DNS_PING"); actualEnv.put("OPENSHIFT_DNS_PING_SERVICE_PORT", "8888"); - actualEnv.put("OPENSHIFT_DNS_PING_SERVICE_NAME", appName + "-ping"); + actualEnv.put("OPENSHIFT_DNS_PING_SERVICE_NAME", applicationName + "-ping"); } for (Entry entry : actualEnv.entrySet()) { writer.warn(entry.getKey() + "=" + entry.getValue()); } // Can be overriden by user actualBuildEnv.putAll(buildExtraEnv); - createBuild(writer, target, osClient, appName, initScript, cliScript, actualBuildEnv, config, serverImageBuildLabels); - writer.info("Deploying application image on OpenShift"); - createAppDeployment(writer, target, osClient, appName, actualEnv, ha, config, deploymentKind); - writer.info("Application route: https://" + host + ( "ROOT.war".equals(appName) ? "" : "/" + originalAppName)); + Properties properties = new Properties(); + createBuild(writer, target, osClient, applicationName, initScript, cliScript, actualBuildEnv, config, serverImageBuildLabels, properties, channels); + if (!dryRun) { + writer.info("Deploying application image on OpenShift"); + } + String appImageTag = null; + if( osClient == null) { + appImageTag = generateClientImageHash(deployments, target, buildExtraEnv, initScript, cliScript, channels); + properties.setProperty("app-image-tag", appImageTag); + } + createAppDeployment(writer, target, osClient, applicationName, actualEnv, ha, config, deploymentKind, appImageTag); + try(FileOutputStream out = new FileOutputStream(getDockerDirectory(target).resolve(IMAGE_PROPERTIES_FILE).toFile())) { + properties.store(out, null); + } + if (dryRun) { + writer.info("\nThe following generated content can be used to produce server and application container images and deploy to OpenShift.\n"+ + "NOTE: Some editing is required. Check the following steps:\n"); + writer.info("* Directory '" + target.resolve("galleon") + "' contains the provisioning file used to provision a server.\n"); + writer.info("* Directory '" + getDockerServerDirectory(target) + "' contains the Dockerfile to build the server image. This Dockerfile expects that you first provision a server (e.g.: using Galleon CLI) in the directory '" + getDockerServerDirectory(target) + "' using the generated provisioning.xml.\n"); + writer.info("NOTE: The file '" + getDockerDirectory(target).resolve(IMAGE_PROPERTIES_FILE) + "' contains the server image tag that is expected by the application Dockerfile.\n"); + writer.info("* Directory '" + getDockerAppDirectory(target) + "' contains the Dockerfile to build the application image. Make sure to replace the '" + WILDFLY_GLOW_SERVER_IMAGE_REPOSITORY + "' string with the repository where the server image has been pushed (e.g.: 'quay.io/my-organization/wildfly-servers').\n"); + writer.info("NOTE: The file '" + getDockerDirectory(target).resolve(IMAGE_PROPERTIES_FILE) + "' contains the aplication image tag that is expected by the Deployment.\n"); + writer.info("* Directory '" + getDeploymentDirectory(target) + "' contains the openshift resources. Make sure to replace the '" +WILDFLY_GLOW_APP_IMAGE_REPOSITORY +"' string with the repository where the application image has been pushed (e.g.: 'quay.io/my-organization/" + applicationName + "-image').\n"); + } else { + writer.info("Application route: https://" + host + ( "ROOT.war".equals(applicationName) ? "" : "/" + originalAppName)); + } + IoUtils.recursiveDelete(getTmpDirectory(target)); } private static void createBuild(GlowMessageWriter writer, Path target, OpenShiftClient osClient, - String name, + String appName, Path initScript, Path cliScript, Map buildExtraEnv, OpenShiftConfiguration config, - Map serverImageBuildLabels) throws Exception { - String serverImageName = doServerImageBuild(writer, target, osClient, buildExtraEnv, config, serverImageBuildLabels); - doAppImageBuild(serverImageName, writer, target, osClient, name, initScript, cliScript, config); + Map serverImageBuildLabels, Properties properties, Listchannels) throws Exception { + if (osClient == null) { + generateDockerServerImage(writer, target, buildExtraEnv, config); + String serverImageTag = generateServerImageHash(target, buildExtraEnv, channels); + properties.setProperty("server-image-tag", serverImageTag); + doAppImageBuild(null, writer, target, osClient, appName, initScript, cliScript, config, serverImageTag); + } else { + String serverImageName = doServerImageBuild(writer, target, osClient, buildExtraEnv, config, serverImageBuildLabels, channels); + doAppImageBuild(serverImageName, writer, target, osClient, appName, initScript, cliScript, config, null); + } } - private static boolean packageInitScript(Path initScript, Path cliScript, Path target) throws Exception { + private static boolean packageInitScript(boolean dryRun, Path initScript, Path cliScript, Path target) throws Exception { if (initScript != null || cliScript != null) { - Path extensions = target.resolve("extensions"); + Path extensions = dryRun? getExtensionsDirectory(target) : target.resolve("extensions"); Files.createDirectories(extensions); StringBuilder initExecution = new StringBuilder(); initExecution.append("#!/bin/bash").append("\n"); @@ -496,8 +656,8 @@ private static boolean packageInitScript(Path initScript, Path cliScript, Path t return false; } - private static boolean isDisabled(String name, Set disabledDeployers) { - return disabledDeployers.contains("ALL") || disabledDeployers.contains(name); + private static boolean isDisabled(String name, Set disabledDeployers, Set enabledDeployers) { + return !enabledDeployers.contains(name) && ( disabledDeployers.contains("ALL") || disabledDeployers.contains(name)); } private static String bytesToHex(byte[] hash) { @@ -521,57 +681,131 @@ private static Map createCommonLabels(OpenShiftConfiguration osC private static Map createServerImageLabels(Path target, Path provisioning, OpenShiftConfiguration osConfig, Map serverImageBuildLabels) throws Exception { GalleonBuilder provider = new GalleonBuilder(); - Path dir = target.resolve("tmp").resolve("tmpHome"); - Files.createDirectory(dir); - StringBuilder fps = new StringBuilder(); + Path dir = getTmpDirectory(target).resolve("tmpHome"); Map labels = new HashMap<>(); - try (Provisioning p = provider.newProvisioningBuilder(provisioning).setInstallationHome(dir).build()) { - GalleonProvisioningConfig config = provider.newProvisioningBuilder(provisioning).setInstallationHome(dir).build().loadProvisioningConfig(provisioning); - GalleonConfigurationWithLayers cl = config.getDefinedConfig(new ConfigId("standalone", "standalone.xml")); - for (String s : cl.getIncludedLayers()) { - labels.put(truncateValue(osConfig.getLabelRadical() + ".layer." + s), ""); - } - for (String s : cl.getExcludedLayers()) { - labels.put(truncateValue(osConfig.getLabelRadical() + ".excluded.layer." + s), ""); - } - for (GalleonFeaturePackConfig gfpc : config.getFeaturePackDeps()) { - if (fps.length() != 0) { - fps.append("_"); + try { + Files.createDirectories(dir); + StringBuilder fps = new StringBuilder(); + try (Provisioning p = provider.newProvisioningBuilder(provisioning).setInstallationHome(dir).build()) { + GalleonProvisioningConfig config = p.loadProvisioningConfig(provisioning); + GalleonConfigurationWithLayers cl = config.getDefinedConfig(new ConfigId("standalone", "standalone.xml")); + for (String s : cl.getIncludedLayers()) { + labels.put(truncateValue(osConfig.getLabelRadical() + ".layer." + s), ""); } - String producerName = gfpc.getLocation().getProducerName(); - producerName = producerName.replaceAll("::zip", ""); - int i = producerName.indexOf(":"); - if (i > 0) { - producerName = producerName.substring(i + 1); + for (String s : cl.getExcludedLayers()) { + labels.put(truncateValue(osConfig.getLabelRadical() + ".excluded.layer." + s), ""); } - producerName = producerName.replaceAll(":", "-"); - labels.put(truncateValue(osConfig.getLabelRadical() + producerName), gfpc.getLocation().getBuild()); + for (GalleonFeaturePackConfig gfpc : config.getFeaturePackDeps()) { + if (fps.length() != 0) { + fps.append("_"); + } + String producerName = gfpc.getLocation().getProducerName(); + producerName = producerName.replaceAll("::zip", ""); + int i = producerName.indexOf(":"); + if (i > 0) { + producerName = producerName.substring(i + 1); + } + producerName = producerName.replaceAll(":", "-"); + labels.put(truncateValue(osConfig.getLabelRadical() + "." + producerName), gfpc.getLocation().getBuild()); + } + } + + for (Entry entry : serverImageBuildLabels.entrySet()) { + labels.put(truncateValue(entry.getKey()), truncateValue(entry.getValue())); } + } finally { + IoUtils.recursiveDelete(dir); } + return labels; + } - for (Entry entry : serverImageBuildLabels.entrySet()) { - labels.put(truncateValue(entry.getKey()), truncateValue(entry.getValue())); + private static Map createDockerImageLabels(Path target, Path provisioning, OpenShiftConfiguration osConfig) throws Exception { + GalleonBuilder provider = new GalleonBuilder(); + Path dir = getTmpDirectory(target).resolve("tmpHome"); + Map labels = new HashMap<>(); + try { + Files.createDirectories(dir); + try (Provisioning p = provider.newProvisioningBuilder(provisioning).setInstallationHome(dir).build()) { + GalleonProvisioningConfig config = p.loadProvisioningConfig(provisioning); + GalleonConfigurationWithLayers cl = config.getDefinedConfig(new ConfigId("standalone", "standalone.xml")); + for (String s : cl.getIncludedLayers()) { + String current = labels.get(osConfig.getLabelRadical() + ".layers"); + labels.put(osConfig.getLabelRadical() + ".layers", (current == null ? "" : current + ",") +s); + } + for (String s : cl.getExcludedLayers()) { + String current = labels.get(osConfig.getLabelRadical() + ".excluded-layers"); + labels.put(osConfig.getLabelRadical() + ".excluded-layers", (current == null ? "" : current+",") + s); + } + for (GalleonFeaturePackConfig gfpc : config.getFeaturePackDeps()) { + String producerName = gfpc.getLocation().getProducerName(); + producerName = producerName.replaceAll("::zip", ""); + int i = producerName.indexOf(":"); + if (i > 0) { + producerName = producerName.substring(i + 1); + } + producerName = producerName.replaceAll(":", "_"); + String version = gfpc.getLocation().getBuild(); + if (version != null) { + producerName += "_" + version; + } + String current = labels.get(osConfig.getLabelRadical() + ".feature-packs"); + labels.put(osConfig.getLabelRadical() + ".feature-packs", (current == null ? "" : current+",") + producerName); + } + } + } finally { + IoUtils.recursiveDelete(dir); } - labels.putAll(createCommonLabels(osConfig)); return labels; } - private static String doServerImageBuild(GlowMessageWriter writer, Path target, OpenShiftClient osClient, - Map buildExtraEnv, - OpenShiftConfiguration config, - Map serverImageBuildLabels) throws Exception { + private static String generateServerImageHash(Path target, + Map buildExtraEnv, List channels) throws IOException, NoSuchAlgorithmException { // To compute a hash we need build time env variables StringBuilder contentBuilder = new StringBuilder(); Path provisioning = target.resolve("galleon").resolve("provisioning.xml"); contentBuilder.append(Files.readString(provisioning, Charset.forName("UTF-8"))); for (Entry entry : buildExtraEnv.entrySet()) { - contentBuilder.append(entry.getKey() + "=" + entry.getValue()); + contentBuilder.append(entry.getKey()).append("=").append(entry.getValue()); + } + if (channels != null && !channels.isEmpty()) { + contentBuilder.append(ChannelMapper.toYaml(channels)); } MessageDigest digest = MessageDigest.getInstance("MD5"); byte[] encodedhash = digest.digest(contentBuilder.toString().getBytes()); String key = bytesToHex(encodedhash); - String serverImageName = config.getServerImageNameRadical() + key; + return key; + } + + private static String generateClientImageHash(List deployments, Path target, + Map buildExtraEnv, Path initScript, Path cliScript, List channels) throws IOException, NoSuchAlgorithmException { + String server=generateServerImageHash(target, buildExtraEnv, channels); + StringBuilder contentBuilder = new StringBuilder(); + contentBuilder.append(server); + for (Path p : deployments) { + MessageDigest digest = MessageDigest.getInstance("MD5"); + byte[] encodedhash = digest.digest(Files.readAllBytes(p)); + String key = bytesToHex(encodedhash); + contentBuilder.append(key); + } + if (initScript != null) { + contentBuilder.append(Files.readString(initScript, Charset.forName("UTF-8"))); + } + if (cliScript != null) { + contentBuilder.append(Files.readString(cliScript, Charset.forName("UTF-8"))); + } + MessageDigest digest = MessageDigest.getInstance("MD5"); + byte[] encodedhash = digest.digest(contentBuilder.toString().getBytes()); + String key = bytesToHex(encodedhash); + return key; + } + + private static String doServerImageBuild(GlowMessageWriter writer, Path target, OpenShiftClient osClient, + Map buildExtraEnv, + OpenShiftConfiguration config, + Map serverImageBuildLabels, List channels) throws Exception { + Path provisioning = target.resolve("galleon").resolve("provisioning.xml"); + String serverImageName = config.getServerImageNameRadical() + generateServerImageHash(target, buildExtraEnv, channels); ImageStream stream = new ImageStreamBuilder().withNewMetadata().withLabels(createCommonLabels(config)).withName(serverImageName). endMetadata().withNewSpec().withLookupPolicy(new ImageLookupPolicy(Boolean.TRUE)).endSpec().build(); // check if it exists @@ -579,18 +813,18 @@ private static String doServerImageBuild(GlowMessageWriter writer, Path target, if (existingStream == null) { writer.info("\nBuilding server image (this can take up to few minutes)..."); // zip deployment and provisioning.xml to be pushed to OpenShift - Path file = target.resolve("tmp").resolve("openshiftServer.zip"); + Path file = getTmpDirectory(target).resolve("openshiftServer.zip"); if (Files.exists(file)) { Files.delete(file); } // First do a build of the naked server - Path stepOne = target.resolve("tmp").resolve("step-one"); + Path stepOne = getTmpDirectory(target).resolve("step-one"); Files.createDirectories(stepOne); IoUtils.copy(target.resolve("galleon"), stepOne.resolve("galleon")); ZipUtils.zip(stepOne, file); stream = stream.toBuilder().editOrNewMetadata().withLabels(createServerImageLabels(target, provisioning, config, serverImageBuildLabels)).endMetadata().build(); osClient.imageStreams().resource(stream).createOr(NonDeletingOperation::update); - Utils.persistResource(target, stream, serverImageName + "-image-stream.yaml"); + Utils.persistResource(getBuildDirectory(target), stream, serverImageName + "-image-stream.yaml"); BuildConfigBuilder builder = new BuildConfigBuilder(); ObjectReference ref = new ObjectReference(); ref.setKind("ImageStreamTag"); @@ -618,7 +852,7 @@ private static String doServerImageBuild(GlowMessageWriter writer, Path target, endSourceStrategy().endStrategy().withNewSource(). withType("Binary").endSource().endSpec().build(); osClient.buildConfigs().resource(buildConfig).createOr(NonDeletingOperation::update); - Utils.persistResource(target, buildConfig, serverImageName + "-build-config.yaml"); + Utils.persistResource(getBuildDirectory(target), buildConfig, serverImageName + "-build-config.yaml"); Build build = osClient.buildConfigs().withName(serverImageName + "-build").instantiateBinary().fromFile(file.toFile()); BuildWatcher buildWatcher = new BuildWatcher(writer); @@ -633,70 +867,110 @@ private static String doServerImageBuild(GlowMessageWriter writer, Path target, return serverImageName; } + private static void generateDockerServerImage(GlowMessageWriter writer, Path target, + Map buildExtraEnv, + OpenShiftConfiguration config) throws Exception { + Path provisioning = target.resolve("galleon").resolve("provisioning.xml"); + Map labels = createDockerImageLabels(target, provisioning, config); + StringBuilder dockerFileBuilder = new StringBuilder(); + for(Entry entry : labels.entrySet()) { + dockerFileBuilder.append("LABEL ").append(entry.getKey()).append("=").append(entry.getValue()).append("\n"); + } + if (!buildExtraEnv.isEmpty()) { + writer.warn("\nThe following environment variables have been set in the server Dockerfile:\n"); + for (Map.Entry entry : buildExtraEnv.entrySet()) { + String val = buildExtraEnv.get(entry.getKey()); + writer.warn(entry.getKey() + "=" + entry.getValue()); + dockerFileBuilder.append("ENV ").append(entry.getKey()).append("=").append(val == null ? entry.getValue() : val).append("\n"); + } + } + dockerFileBuilder.append("FROM ").append(config.getRuntimeImage()).append("\n"); + dockerFileBuilder.append("COPY server $JBOSS_HOME\n"); + dockerFileBuilder.append("USER root\n"); + dockerFileBuilder.append("RUN chown -R jboss:root $JBOSS_HOME && chmod -R ug+rwX $JBOSS_HOME\n"); + dockerFileBuilder.append("USER jboss"); + Files.write(getDockerServerDirectory(target).resolve("Dockerfile"), dockerFileBuilder.toString().getBytes()); + } + private static void doAppImageBuild(String serverImageName, GlowMessageWriter writer, Path target, OpenShiftClient osClient, - String name, + String appName, Path initScript, Path cliScript, - OpenShiftConfiguration config) throws Exception { + OpenShiftConfiguration config, + String serverImageTag) throws Exception { // Now step 2 // From the server image, do a docker build, copy the server and copy in it the deployments and init file. - Path stepTwo = target.resolve("tmp").resolve("step-two"); - IoUtils.copy(target.resolve("deployments"), stepTwo.resolve("deployments")); + Path stepTwo = target; + if (osClient != null) { + stepTwo = getTmpDirectory(target).resolve("step-two"); + IoUtils.copy(getDeploymentsDirectory(target), stepTwo.resolve("deployments")); + } StringBuilder dockerFileBuilder = new StringBuilder(); - dockerFileBuilder.append("FROM " + config.getRuntimeImage() + "\n"); - dockerFileBuilder.append("COPY --chown=jboss:root /server $JBOSS_HOME\n"); + if (osClient != null) { + dockerFileBuilder.append("FROM ").append(config.getRuntimeImage()).append("\n"); + dockerFileBuilder.append("COPY --chown=jboss:root /server $JBOSS_HOME\n"); + } else { + dockerFileBuilder.append("FROM ").append(WILDFLY_GLOW_SERVER_IMAGE_REPOSITORY).append(":").append(serverImageTag).append("\n"); + } dockerFileBuilder.append("COPY --chown=jboss:root deployments/* $JBOSS_HOME/standalone/deployments\n"); - if (packageInitScript(initScript, cliScript, stepTwo)) { + if (packageInitScript(osClient == null, initScript, cliScript, stepTwo)) { dockerFileBuilder.append("COPY --chown=jboss:root extensions $JBOSS_HOME/extensions\n"); dockerFileBuilder.append("RUN chmod ug+rwx $JBOSS_HOME/extensions/postconfigure.sh\n"); } dockerFileBuilder.append("RUN chmod -R ug+rwX $JBOSS_HOME\n"); - - Path dockerFile = stepTwo.resolve("Dockerfile"); - Files.write(dockerFile, dockerFileBuilder.toString().getBytes()); - Path file2 = target.resolve("tmp").resolve("openshiftApp.zip"); - if (Files.exists(file2)) { - Files.delete(file2); + Files.write(getDockerAppDirectory(target).resolve("Dockerfile"), dockerFileBuilder.toString().getBytes()); + Path file2 = null; + if (osClient != null) { + Path dockerFile = stepTwo.resolve("Dockerfile"); + Files.write(dockerFile, dockerFileBuilder.toString().getBytes()); + file2 = getTmpDirectory(target).resolve("openshiftApp.zip"); + if (Files.exists(file2)) { + Files.delete(file2); + } + ZipUtils.zip(stepTwo, file2); + writer.info("\nBuilding application image..."); } - ZipUtils.zip(stepTwo, file2); - writer.info("\nBuilding application image..."); - ImageStream appStream = new ImageStreamBuilder().withNewMetadata().withLabels(createCommonLabels(config)).withName(name). + ImageStream appStream = new ImageStreamBuilder().withNewMetadata().withLabels(createCommonLabels(config)).withName(appName). endMetadata().withNewSpec().withLookupPolicy(new ImageLookupPolicy(Boolean.TRUE)).endSpec().build(); - osClient.imageStreams().resource(appStream).createOr(NonDeletingOperation::update); + if (osClient != null) { + osClient.imageStreams().resource(appStream).createOr(NonDeletingOperation::update); + Utils.persistResource(getImagesDirectory(target), appStream, appName + "-image-stream.yaml"); + } BuildConfigBuilder builder = new BuildConfigBuilder(); ObjectReference ref = new ObjectReference(); ref.setKind("ImageStreamTag"); ref.setName(serverImageName + ":latest"); ImageSourcePath srcPath = new ImageSourcePathBuilder().withSourcePath("/opt/server").withDestinationDir(".").build(); ImageSource imageSource = new ImageSourceBuilder().withFrom(ref).withPaths(srcPath).build(); - BuildConfig buildConfig2 = builder. - withNewMetadata().withLabels(createCommonLabels(config)).withName(name + "-build").endMetadata().withNewSpec(). - withNewOutput(). - withNewTo(). - withKind("ImageStreamTag"). - withName(name + ":latest").endTo(). - endOutput(). - withNewSource().withType("Binary").withImages(imageSource).endSource(). - withNewStrategy().withNewDockerStrategy().withNewFrom().withKind("DockerImage"). - withName("quay.io/wildfly/wildfly-runtime:latest").endFrom(). - withDockerfilePath("./Dockerfile"). - endDockerStrategy().endStrategy().endSpec().build(); - osClient.buildConfigs().resource(buildConfig2).createOr(NonDeletingOperation::update); - Utils.persistResource(target, buildConfig2, name + "-build-config.yaml"); - - Build build = osClient.buildConfigs().withName(name + "-build").instantiateBinary().fromFile(file2.toFile()); - CountDownLatch latch = new CountDownLatch(1); - BuildWatcher buildWatcher = new BuildWatcher(writer); - try (Watch watcher = osClient.builds().withName(build.getMetadata().getName()).watch(buildWatcher)) { - buildWatcher.await(); - } - if (buildWatcher.isFailed()) { - osClient.imageStreams().resource(appStream).delete(); - throw new Exception("Application image build has failed. Check the OpenShift build log."); + if (osClient != null) { + BuildConfig buildConfig2 = builder. + withNewMetadata().withLabels(createCommonLabels(config)).withName(appName + "-build").endMetadata().withNewSpec(). + withNewOutput(). + withNewTo(). + withKind("ImageStreamTag"). + withName(appName + ":latest").endTo(). + endOutput(). + withNewSource().withType("Binary").withImages(imageSource).endSource(). + withNewStrategy().withNewDockerStrategy().withNewFrom().withKind("DockerImage"). + withName("quay.io/wildfly/wildfly-runtime:latest").endFrom(). + withDockerfilePath("./Dockerfile"). + endDockerStrategy().endStrategy().endSpec().build(); + osClient.buildConfigs().resource(buildConfig2).createOr(NonDeletingOperation::update); + Utils.persistResource(getBuildDirectory(target), buildConfig2, appName + "-build-config.yaml"); + + Build build = osClient.buildConfigs().withName(appName + "-build").instantiateBinary().fromFile(file2.toFile()); + BuildWatcher buildWatcher = new BuildWatcher(writer); + try (Watch watcher = osClient.builds().withName(build.getMetadata().getName()).watch(buildWatcher)) { + buildWatcher.await(); + } + if (buildWatcher.isFailed()) { + osClient.imageStreams().resource(appStream).delete(); + throw new Exception("Application image build has failed. Check the OpenShift build log."); + } } } } diff --git a/openshift-deployment/api/src/main/java/org/wildfly/glow/deployment/openshift/api/Utils.java b/openshift-deployment/api/src/main/java/org/wildfly/glow/deployment/openshift/api/Utils.java index 8baace36..222c3374 100644 --- a/openshift-deployment/api/src/main/java/org/wildfly/glow/deployment/openshift/api/Utils.java +++ b/openshift-deployment/api/src/main/java/org/wildfly/glow/deployment/openshift/api/Utils.java @@ -28,9 +28,7 @@ public class Utils { public static void persistResource(Path target, Object resource, String name) throws IOException { - Path dir = target.resolve("resources"); - Files.createDirectories(dir); - Path resourceFile = dir.resolve(name); + Path resourceFile = target.resolve(name); Files.write(resourceFile, Serialization.asYaml(resource).getBytes()); } } diff --git a/openshift-deployment/keycloak/pom.xml b/openshift-deployment/keycloak/pom.xml index 3615a83e..f4f45d24 100644 --- a/openshift-deployment/keycloak/pom.xml +++ b/openshift-deployment/keycloak/pom.xml @@ -5,7 +5,7 @@ org.wildfly.glow wildfly-glow-openshift-deployment - 1.0.7.Final-SNAPSHOT + 1.1.0.Final-SNAPSHOT wildfly-glow-openshift-deployment-keycloak diff --git a/openshift-deployment/keycloak/src/main/java/org/wildfly/glow/deployment/openshift/keycloak/KeycloakDeployer.java b/openshift-deployment/keycloak/src/main/java/org/wildfly/glow/deployment/openshift/keycloak/KeycloakDeployer.java index 7c7dd4fb..ca4d4022 100644 --- a/openshift-deployment/keycloak/src/main/java/org/wildfly/glow/deployment/openshift/keycloak/KeycloakDeployer.java +++ b/openshift-deployment/keycloak/src/main/java/org/wildfly/glow/deployment/openshift/keycloak/KeycloakDeployer.java @@ -35,6 +35,7 @@ import org.wildfly.glow.Env; import org.wildfly.glow.GlowMessageWriter; import org.wildfly.glow.deployment.openshift.api.Deployer; +import org.wildfly.glow.deployment.openshift.api.OpenShiftSupport; import org.wildfly.glow.deployment.openshift.api.Utils; /** @@ -90,7 +91,7 @@ private Map getExistingEnv(Map env) { @Override public Map deploy(GlowMessageWriter writer, Path target, OpenShiftClient osClient, Map env, - String appHost, String appName, String matching, Map extraEnv) throws Exception { + String appHost, String appName, String matching, Map extraEnv, boolean dryRun) throws Exception { writer.info("Deploying Keycloak server"); Map parameters = new HashMap<>(); String adminVal = extraEnv.get(KEYCLOAK_ADMIN_ENV); @@ -103,15 +104,19 @@ public Map deploy(GlowMessageWriter writer, Path target, OpenShi final KubernetesList processedTemplateWithCustomParameters = osClient.templates(). withName(KEYCLOAK_NAME) .process(parameters); - osClient.resourceList(processedTemplateWithCustomParameters).createOrReplace(); - Utils.persistResource(target, processedTemplateWithCustomParameters, KEYCLOAK_NAME + "-resources.yaml"); + if (!dryRun) { + osClient.resourceList(processedTemplateWithCustomParameters).createOrReplace(); + } + Utils.persistResource(OpenShiftSupport.getDeployersDirectory(target), processedTemplateWithCustomParameters, KEYCLOAK_NAME + "-resources.yaml"); writer.info("Waiting until keycloak is ready ..."); DeploymentConfig dc = new DeploymentConfigBuilder().withNewMetadata().withName(KEYCLOAK_NAME).endMetadata().build(); - osClient.resources(DeploymentConfig.class).resource(dc).waitUntilReady(5, TimeUnit.MINUTES); + if (!dryRun) { + osClient.resources(DeploymentConfig.class).resource(dc).waitUntilReady(5, TimeUnit.MINUTES); + } Route route = new RouteBuilder().withNewMetadata().withName(KEYCLOAK_NAME). endMetadata().build(); - String host = osClient.routes().resource(route).get().getSpec().getHost(); + String host = dryRun ? "TO_BE_DEFINED" : osClient.routes().resource(route).get().getSpec().getHost(); String url = "https://" + host; writer.info("Keycloak route: " + url); Map retEnv = new HashMap<>(); @@ -136,7 +141,11 @@ public Map deploy(GlowMessageWriter writer, Path target, OpenShi retEnv.put(OIDC_USER_PASSWORD_ENV, KEYCLOAK_DEMO_PASSWORD); retEnv.put(OIDC_HOSTNAME_HTTPS_ENV, appHost); } - writer.info("Keycloak server has been deployed"); + if (dryRun) { + writer.info("Resources for Keycloak server have been generated"); + } else { + writer.info("Keycloak server has been deployed"); + } return retEnv; } diff --git a/openshift-deployment/mariadb/pom.xml b/openshift-deployment/mariadb/pom.xml index a594980c..4891300e 100644 --- a/openshift-deployment/mariadb/pom.xml +++ b/openshift-deployment/mariadb/pom.xml @@ -5,7 +5,7 @@ org.wildfly.glow wildfly-glow-openshift-deployment - 1.0.7.Final-SNAPSHOT + 1.1.0.Final-SNAPSHOT wildfly-glow-openshift-deployment-mariadb diff --git a/openshift-deployment/mysql/pom.xml b/openshift-deployment/mysql/pom.xml index 706fad12..a424a824 100644 --- a/openshift-deployment/mysql/pom.xml +++ b/openshift-deployment/mysql/pom.xml @@ -5,7 +5,7 @@ org.wildfly.glow wildfly-glow-openshift-deployment - 1.0.7.Final-SNAPSHOT + 1.1.0.Final-SNAPSHOT wildfly-glow-openshift-deployment-mysql diff --git a/openshift-deployment/pom.xml b/openshift-deployment/pom.xml index a699af3d..6a8ec620 100644 --- a/openshift-deployment/pom.xml +++ b/openshift-deployment/pom.xml @@ -5,7 +5,7 @@ org.wildfly.glow wildfly-glow-parent - 1.0.7.Final-SNAPSHOT + 1.1.0.Final-SNAPSHOT wildfly-glow-openshift-deployment diff --git a/openshift-deployment/postgresql/pom.xml b/openshift-deployment/postgresql/pom.xml index 362329f1..a2461cdb 100644 --- a/openshift-deployment/postgresql/pom.xml +++ b/openshift-deployment/postgresql/pom.xml @@ -5,7 +5,7 @@ org.wildfly.glow wildfly-glow-openshift-deployment - 1.0.7.Final-SNAPSHOT + 1.1.0.Final-SNAPSHOT wildfly-glow-openshift-deployment-postgresql diff --git a/pom.xml b/pom.xml index 6500d432..27fa9ee2 100644 --- a/pom.xml +++ b/pom.xml @@ -9,7 +9,7 @@ org.wildfly.glow wildfly-glow-parent - 1.0.7.Final-SNAPSHOT + 1.1.0.Final-SNAPSHOT pom UTF-8 @@ -24,13 +24,13 @@ 1.0.0.Final 1.7.0.Final 8.18 - 2.14.2 + 2.17.0 3.1.6 3.8.6 3.0.0 1.6.3 6.0.0.Final - 1.0.5.Final + 1.1.0.Final 1.2.1.Final 2.0 3.12.0 diff --git a/tests/arquillian-plugin-tests/pom.xml b/tests/arquillian-plugin-tests/pom.xml index 79fccc99..353d20da 100644 --- a/tests/arquillian-plugin-tests/pom.xml +++ b/tests/arquillian-plugin-tests/pom.xml @@ -5,7 +5,7 @@ org.wildfly.glow wildfly-glow-tests-parent - 1.0.7.Final-SNAPSHOT + 1.1.0.Final-SNAPSHOT wildfly-glow-arquillian-plugin-tests diff --git a/tests/glow-tests/pom.xml b/tests/glow-tests/pom.xml index e594d664..08ca811d 100644 --- a/tests/glow-tests/pom.xml +++ b/tests/glow-tests/pom.xml @@ -5,7 +5,7 @@ org.wildfly.glow wildfly-glow-tests-parent - 1.0.7.Final-SNAPSHOT + 1.1.0.Final-SNAPSHOT wildfly-glow-tests diff --git a/tests/pom.xml b/tests/pom.xml index b15f375b..3dda71b9 100644 --- a/tests/pom.xml +++ b/tests/pom.xml @@ -5,7 +5,7 @@ org.wildfly.glow wildfly-glow-parent - 1.0.7.Final-SNAPSHOT + 1.1.0.Final-SNAPSHOT wildfly-glow-tests-parent diff --git a/tests/test-feature-pack/galleon-pack/pom.xml b/tests/test-feature-pack/galleon-pack/pom.xml index b594b143..c51e8a70 100644 --- a/tests/test-feature-pack/galleon-pack/pom.xml +++ b/tests/test-feature-pack/galleon-pack/pom.xml @@ -5,7 +5,7 @@ org.wildfly.glow wildfly-glow-test-feature-pack-parent - 1.0.7.Final-SNAPSHOT + 1.1.0.Final-SNAPSHOT wildfly-glow-test-galleon-pack diff --git a/tests/test-feature-pack/layer-metadata-tests/pom.xml b/tests/test-feature-pack/layer-metadata-tests/pom.xml index 9ef746b5..6ffb58a4 100644 --- a/tests/test-feature-pack/layer-metadata-tests/pom.xml +++ b/tests/test-feature-pack/layer-metadata-tests/pom.xml @@ -5,7 +5,7 @@ org.wildfly.glow wildfly-glow-test-feature-pack-parent - 1.0.7.Final-SNAPSHOT + 1.1.0.Final-SNAPSHOT wildfly-glow-test-feature-pack-layer-metadata-tests diff --git a/tests/test-feature-pack/pom.xml b/tests/test-feature-pack/pom.xml index 9b623cf7..1d45bbba 100644 --- a/tests/test-feature-pack/pom.xml +++ b/tests/test-feature-pack/pom.xml @@ -5,7 +5,7 @@ org.wildfly.glow wildfly-glow-tests-parent - 1.0.7.Final-SNAPSHOT + 1.1.0.Final-SNAPSHOT wildfly-glow-test-feature-pack-parent diff --git a/tests/test-feature-pack/test-classes/pom.xml b/tests/test-feature-pack/test-classes/pom.xml index 77e76da8..d7f1627f 100644 --- a/tests/test-feature-pack/test-classes/pom.xml +++ b/tests/test-feature-pack/test-classes/pom.xml @@ -5,7 +5,7 @@ org.wildfly.glow wildfly-glow-test-feature-pack-parent - 1.0.7.Final-SNAPSHOT + 1.1.0.Final-SNAPSHOT wildfly-glow-test-feature-pack-classes