diff --git a/cli/pom.xml b/cli/pom.xml
index 91305d01..ad696e8c 100644
--- a/cli/pom.xml
+++ b/cli/pom.xml
@@ -29,10 +29,30 @@
             <groupId>${project.groupId}</groupId>
             <artifactId>wildfly-glow-core</artifactId>
         </dependency>
+        <dependency>
+            <groupId>${project.groupId}</groupId>
+            <artifactId>wildfly-glow-openshift-deployment-api</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>${project.groupId}</groupId>
+            <artifactId>wildfly-glow-openshift-deployment-postgresql</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>${project.groupId}</groupId>
+            <artifactId>wildfly-glow-openshift-deployment-artemis</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>${project.groupId}</groupId>
+            <artifactId>wildfly-glow-openshift-deployment-keycloak</artifactId>
+        </dependency>
         <dependency>
             <groupId>info.picocli</groupId>
             <artifactId>picocli</artifactId>
         </dependency>
+        <dependency>
+            <groupId>io.fabric8</groupId>
+            <artifactId>openshift-client</artifactId>
+        </dependency>
         <!-- to fix slf4j warning when provisioning -->
         <dependency>
             <groupId>org.jboss.slf4j</groupId>
diff --git a/cli/src/main/java/org/wildfly/glow/cli/commands/Constants.java b/cli/src/main/java/org/wildfly/glow/cli/commands/Constants.java
index fcca6faa..9a636267 100644
--- a/cli/src/main/java/org/wildfly/glow/cli/commands/Constants.java
+++ b/cli/src/main/java/org/wildfly/glow/cli/commands/Constants.java
@@ -41,10 +41,14 @@ public interface Constants {
 
     String CLOUD_OPTION = "--cloud";
     String CLOUD_OPTION_SHORT = "-c";
+    String DISABLE_DEPLOYERS = "--disable-deployers";
+    String DISABLE_DEPLOYERS_LABEL = "<ALL|deployer name>";
     String DOCKER_IMAGE_NAME_OPTION = "--docker-image-name";
     String DOCKER_IMAGE_NAME_OPTION_LABEL = "<docker image name>";
     String DOCKER_IMAGE_NAME_OPTION_SHORT = "-di";
-
+    String ENV_FILE_OPTION = "--env-file";
+    String ENV_FILE_OPTION_SHORT = "-ef";
+    String ENV_FILE_OPTION_LABEL = "<env file path>";
     String EXCLUDE_ARCHIVES_FROM_SCAN_OPTION = "--exclude-archives-from-scan";
     String EXCLUDE_ARCHIVES_FROM_SCAN_OPTION_LABEL = "<list of nested archive names>";
     String EXCLUDE_ARCHIVES_FROM_SCAN_OPTION_SHORT = "-ea";
diff --git a/cli/src/main/java/org/wildfly/glow/cli/commands/OpenShiftSupport.java b/cli/src/main/java/org/wildfly/glow/cli/commands/OpenShiftSupport.java
new file mode 100644
index 00000000..39e824b5
--- /dev/null
+++ b/cli/src/main/java/org/wildfly/glow/cli/commands/OpenShiftSupport.java
@@ -0,0 +1,288 @@
+/*
+ * JBoss, Home of Professional Open Source.
+ * Copyright 2024 Red Hat, Inc., and individual contributors
+ * as indicated by the @author tags.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.wildfly.glow.cli.commands;
+
+import io.fabric8.kubernetes.api.model.Container;
+import io.fabric8.kubernetes.api.model.ContainerPort;
+import io.fabric8.kubernetes.api.model.EnvVar;
+import io.fabric8.kubernetes.api.model.HTTPGetAction;
+import io.fabric8.kubernetes.api.model.IntOrString;
+import io.fabric8.kubernetes.api.model.ObjectReference;
+import io.fabric8.kubernetes.api.model.Probe;
+import io.fabric8.kubernetes.api.model.Service;
+import io.fabric8.kubernetes.api.model.ServiceBuilder;
+import io.fabric8.kubernetes.api.model.ServicePort;
+import io.fabric8.kubernetes.api.model.apps.Deployment;
+import io.fabric8.kubernetes.api.model.apps.DeploymentBuilder;
+import io.fabric8.kubernetes.client.KubernetesClientBuilder;
+import io.fabric8.kubernetes.client.Watch;
+import io.fabric8.kubernetes.client.Watcher;
+import io.fabric8.kubernetes.client.WatcherException;
+import io.fabric8.kubernetes.client.dsl.NonDeletingOperation;
+import io.fabric8.kubernetes.client.utils.Serialization;
+import io.fabric8.openshift.api.model.Build;
+import io.fabric8.openshift.api.model.BuildConfig;
+import io.fabric8.openshift.api.model.BuildConfigBuilder;
+import io.fabric8.openshift.api.model.ImageLookupPolicy;
+import io.fabric8.openshift.api.model.ImageStream;
+import io.fabric8.openshift.api.model.ImageStreamBuilder;
+import io.fabric8.openshift.api.model.Route;
+import io.fabric8.openshift.api.model.RouteBuilder;
+import io.fabric8.openshift.api.model.RouteTargetReference;
+import io.fabric8.openshift.api.model.TLSConfig;
+import io.fabric8.openshift.client.OpenShiftClient;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.ServiceLoader;
+import java.util.Set;
+import java.util.TreeMap;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.TimeUnit;
+import org.jboss.galleon.util.ZipUtils;
+import org.wildfly.glow.AddOn;
+import org.wildfly.glow.GlowMessageWriter;
+import org.wildfly.glow.Layer;
+import org.wildfly.glow.deployment.openshift.api.Deployer;
+
+/**
+ *
+ * @author jdenise
+ */
+class OpenShiftSupport {
+
+    private static void createAppDeployment(GlowMessageWriter writer, Path target, OpenShiftClient osClient, String name, Map<String, String> env, boolean ha) throws Exception {
+        writer.info("Deploying application image on OpenShift");
+        Map<String, String> labels = new HashMap<>();
+        labels.put(Deployer.LABEL, name);
+        ContainerPort port = new ContainerPort();
+        port.setContainerPort(8080);
+        port.setName("http");
+        port.setProtocol("TCP");
+
+        ContainerPort portAdmin = new ContainerPort();
+        portAdmin.setContainerPort(9990);
+        portAdmin.setName("admin");
+        portAdmin.setProtocol("TCP");
+
+        List<ContainerPort> ports = new ArrayList<>();
+        ports.add(port);
+        ports.add(portAdmin);
+        List<EnvVar> vars = new ArrayList<>();
+        for (Entry<String, String> entry : env.entrySet()) {
+            vars.add(new EnvVar().toBuilder().withName(entry.getKey()).withValue(entry.getValue()).build());
+        }
+        Container container = new Container();
+        container.setName(name);
+        container.setImage(name + ":latest");
+        container.setPorts(ports);
+        container.setEnv(vars);
+        container.setImagePullPolicy("IfNotPresent");
+        Probe readinessProbe = new Probe();
+        HTTPGetAction getAction = new HTTPGetAction();
+        getAction.setPath("/health/ready");
+        IntOrString pp = new IntOrString("admin");
+        getAction.setPort(pp);
+        getAction.setScheme("HTTP");
+        readinessProbe.setHttpGet(getAction);
+        readinessProbe.setTimeoutSeconds(1);
+        readinessProbe.setPeriodSeconds(10);
+        readinessProbe.setSuccessThreshold(1);
+        readinessProbe.setFailureThreshold(3);
+
+        container.setReadinessProbe(readinessProbe);
+        container.setTerminationMessagePath("/dev/termination-log");
+
+        Probe livenessProbe = new Probe();
+        HTTPGetAction getAction2 = new HTTPGetAction();
+        getAction2.setPath("/health/live");
+        IntOrString pp2 = new IntOrString("admin");
+        getAction2.setPort(pp2);
+        getAction2.setScheme("HTTP");
+        livenessProbe.setHttpGet(getAction);
+        livenessProbe.setTimeoutSeconds(1);
+        livenessProbe.setPeriodSeconds(10);
+        livenessProbe.setSuccessThreshold(1);
+        livenessProbe.setFailureThreshold(3);
+        container.setLivenessProbe(livenessProbe);
+
+        Deployment deployment = new DeploymentBuilder().withNewMetadata().withName(name).endMetadata().
+                withNewSpec().withReplicas(ha ? 2 : 1).
+                withNewSelector().withMatchLabels(labels).endSelector().
+                withNewTemplate().withNewMetadata().withLabels(labels).endMetadata().withNewSpec().
+                withContainers(container).withRestartPolicy("Always").
+                endSpec().endTemplate().withNewStrategy().withType("RollingUpdate").endStrategy().endSpec().build();
+        osClient.resources(Deployment.class).resource(deployment).createOr(NonDeletingOperation::update);
+        Files.write(target.resolve(name + "-deployment.yaml"), Serialization.asYaml(deployment).getBytes());
+        IntOrString v = new IntOrString();
+        v.setValue(8080);
+        Service service = new ServiceBuilder().withNewMetadata().withName(name).endMetadata().
+                withNewSpec().withPorts(new ServicePort().toBuilder().withProtocol("TCP").
+                        withPort(8080).
+                        withTargetPort(v).build()).withType("ClusterIP").withSessionAffinity("None").withSelector(labels).endSpec().build();
+        osClient.services().resource(service).createOr(NonDeletingOperation::update);
+        Files.write(target.resolve(name + "-service.yaml"), Serialization.asYaml(service).getBytes());
+
+        writer.info("Waiting until the application is ready ...");
+        osClient.resources(Deployment.class).resource(deployment).waitUntilReady(5, TimeUnit.MINUTES);
+    }
+
+    static void deploy(GlowMessageWriter writer, Path target, String appName, Map<String, String> env, Set<Layer> layers, Set<AddOn> addOns, boolean ha,
+            Map<String, String> extraEnv, Set<String> disabledDeployers) throws Exception {
+        Map<String, String> actualEnv = new TreeMap<>();
+        OpenShiftClient osClient = new KubernetesClientBuilder().build().adapt(OpenShiftClient.class);
+        writer.info("\nConnected to OpenShift cluster");
+        // First create the future route to the application, can be needed by deployers
+        Route route = new RouteBuilder().withNewMetadata().withName(appName).
+                endMetadata().withNewSpec().
+                withTo(new RouteTargetReference("Service", appName, 100)).
+                withTls(new TLSConfig().toBuilder().withTermination("edge").
+                        withInsecureEdgeTerminationPolicy("Redirect").build()).endSpec().build();
+        osClient.routes().resource(route).createOr(NonDeletingOperation::update);
+        Files.write(target.resolve(appName + "-route.yaml"), Serialization.asYaml(route).getBytes());
+        String host = osClient.routes().resource(route).get().getSpec().getHost();
+        // Done route creation
+        Map<String, Deployer> existingDeployers = new HashMap<>();
+
+        for (Deployer d : ServiceLoader.load(Deployer.class)) {
+            existingDeployers.put(d.getName(), d);
+        }
+        for (String disabled : disabledDeployers) {
+            if (!"ALL".equals(disabled)) {
+                if (!existingDeployers.containsKey(disabled)) {
+                    throw new Exception("Invalid deployer to disable: " + disabled);
+                }
+            }
+        }
+        for (Deployer d : existingDeployers.values()) {
+            boolean deployed = false;
+            boolean isDisabled = isDisabled(d.getName(), disabledDeployers);
+            for (Layer l : layers) {
+                if (d.getSupportedLayers().contains(l.getName())) {
+                    deployed = true;
+                    if (!isDisabled) {
+                        writer.info("Found deployer " + d.getName() + " for " + l.getName());
+                    } else {
+                        writer.warn("The deployer " + d.getName() + " has been disabled");
+                    }
+                    actualEnv.putAll(isDisabled ? d.disabledDeploy(host, appName, l.getName(), env) : d.deploy(writer, target, osClient, env, host, appName, l.getName()));
+                    break;
+                }
+            }
+            if (!deployed) {
+                for (AddOn ao : addOns) {
+                    if (ao.getFamily().equals(d.getSupportedAddOnFamily())
+                            && d.getSupportedAddOns().contains(ao.getName())) {
+                        if (!isDisabled) {
+                            writer.info("Found deployer " + d.getName() + " for " + ao.getName());
+                        } else {
+                            writer.warn("The deployer " + d.getName() + " has been disabled");
+                        }
+                        actualEnv.putAll(isDisabled ? d.disabledDeploy(host, appName, ao.getName(), env) : d.deploy(writer, target, osClient, env, host, appName, ao.getName()));
+                        break;
+                    }
+                }
+            }
+        }
+
+        createBuild(writer, target, osClient, appName);
+
+        actualEnv.putAll(extraEnv);
+        if (!actualEnv.isEmpty()) {
+            if (!disabledDeployers.isEmpty()) {
+                writer.warn("\nThe following environment variables have been set in the " + appName + " deployment. WARN: Some of them need possibly to be updated in the deployment:");
+            } else {
+                writer.warn("\nThe following environment variables have been set in the " + appName + " deployment:");
+            }
+            for (Entry<String, String> entry : actualEnv.entrySet()) {
+                writer.warn(entry.getKey() + "=" + entry.getValue());
+            }
+        }
+        createAppDeployment(writer, target, osClient, appName, actualEnv, ha);
+        writer.info("\nApplication route: https://" + host + ("ROOT.war".equals(appName) ? "" : "/" + appName));
+    }
+
+    private static boolean isDisabled(String name, Set<String> disabledDeployers) {
+        return disabledDeployers.contains("ALL") || disabledDeployers.contains(name);
+    }
+
+    static void createBuild(GlowMessageWriter writer, Path target, OpenShiftClient osClient, String name) throws Exception {
+        // zip deployment and provisioning.xml to be pushed to OpenShift
+        Path file = Paths.get("openshiftApp.zip");
+        if (Files.exists(file)) {
+            Files.delete(file);
+        }
+        file.toFile().deleteOnExit();
+        ZipUtils.zip(target, file);
+        writer.info("\nCreating and starting application image build on OpenShift (this can take up to few minutes)...");
+        ImageStream stream = new ImageStreamBuilder().withNewMetadata().withName(name).
+                endMetadata().withNewSpec().withLookupPolicy(new ImageLookupPolicy(Boolean.TRUE)).endSpec().build();
+        osClient.imageStreams().resource(stream).createOr(NonDeletingOperation::update);
+        Files.write(target.resolve(name + "-image-stream.yaml"), Serialization.asYaml(stream).getBytes());
+        BuildConfigBuilder builder = new BuildConfigBuilder();
+        ObjectReference ref = new ObjectReference();
+        ref.setKind("ImageStreamTag");
+        ref.setName(name + ":latest");
+        BuildConfig buildConfig = builder.
+                withNewMetadata().withName(name + "-build").endMetadata().withNewSpec().
+                withNewOutput().
+                withNewTo().
+                withKind("ImageStreamTag").
+                withName(name + ":latest").endTo().
+                endOutput().withNewStrategy().withNewSourceStrategy().withNewFrom().withKind("DockerImage").
+                withName("quay.io/wildfly/wildfly-s2i:latest").endFrom().
+                withIncremental(true).
+                withEnv(new EnvVar().toBuilder().withName("GALLEON_USE_LOCAL_FILE").withValue("true").build()).
+                endSourceStrategy().endStrategy().withNewSource().
+                withType("Binary").endSource().endSpec().build();
+        osClient.buildConfigs().resource(buildConfig).createOr(NonDeletingOperation::update);
+        Files.write(target.resolve(name + "-build-config.yaml"), Serialization.asYaml(buildConfig).getBytes());
+
+        Build build = osClient.buildConfigs().withName(name + "-build").instantiateBinary().fromFile(file.toFile());
+        CountDownLatch latch = new CountDownLatch(1);
+        try (Watch watcher = osClient.builds().withName(build.getMetadata().getName()).watch(getBuildWatcher(writer, latch))) {
+            latch.await();
+        }
+    }
+
+    private static Watcher<Build> getBuildWatcher(GlowMessageWriter writer, final CountDownLatch latch) {
+        return new Watcher<Build>() {
+            @Override
+            public void eventReceived(Action action, Build build) {
+                //buildHolder.set(build);
+                String phase = build.getStatus().getPhase();
+                if ("Running".equals(phase)) {
+                    writer.info("Build is running...");
+                }
+                if ("Complete".equals(phase)) {
+                    writer.info("Build is complete.");
+                    latch.countDown();
+                }
+            }
+
+            @Override
+            public void onClose(WatcherException cause) {
+            }
+        };
+    }
+}
diff --git a/cli/src/main/java/org/wildfly/glow/cli/commands/ScanCommand.java b/cli/src/main/java/org/wildfly/glow/cli/commands/ScanCommand.java
index b363c2b6..4024dc83 100644
--- a/cli/src/main/java/org/wildfly/glow/cli/commands/ScanCommand.java
+++ b/cli/src/main/java/org/wildfly/glow/cli/commands/ScanCommand.java
@@ -16,6 +16,7 @@
  */
 package org.wildfly.glow.cli.commands;
 
+import java.nio.file.Files;
 import org.jboss.galleon.util.IoUtils;
 import org.wildfly.glow.Arguments;
 import org.wildfly.glow.FeaturePacks;
@@ -33,17 +34,21 @@
 
 import java.nio.file.Path;
 import java.nio.file.Paths;
+import java.util.HashMap;
 import java.util.HashSet;
 import java.util.LinkedHashSet;
 import java.util.List;
+import java.util.Map;
 import java.util.Optional;
 import java.util.Set;
 import org.jboss.as.version.Stability;
 
 import static org.wildfly.glow.Arguments.CLOUD_EXECUTION_CONTEXT;
 import static org.wildfly.glow.Arguments.COMPACT_PROPERTY;
+import org.wildfly.glow.Env;
 import static org.wildfly.glow.OutputFormat.BOOTABLE_JAR;
 import static org.wildfly.glow.OutputFormat.DOCKER_IMAGE;
+import static org.wildfly.glow.OutputFormat.OPENSHIFT;
 
 @CommandLine.Command(
         name = Constants.SCAN_COMMAND,
@@ -106,6 +111,12 @@ public Stability convert(String value) throws Exception {
     @CommandLine.Option(converter = StabilityConverter.class, names = {Constants.STABILITY_OPTION, Constants.STABILITY_OPTION_SHORT}, paramLabel = Constants.STABILITY_LABEL)
     Optional<Stability> stability;
 
+    @CommandLine.Option(names = {Constants.ENV_FILE_OPTION_SHORT, Constants.ENV_FILE_OPTION}, paramLabel = Constants.ENV_FILE_OPTION_LABEL)
+    Optional<Path>  envFile;
+
+    @CommandLine.Option(names = Constants.DISABLE_DEPLOYERS, split = ",", paramLabel = Constants.ADD_ONS_OPTION_LABEL)
+    Set<String> disableDeployers = new LinkedHashSet<>();
+
     @Override
     public Integer call() throws Exception {
         HiddenPropertiesAccessor hiddenPropertiesAccessor = new HiddenPropertiesAccessor();
@@ -134,6 +145,29 @@ public Integer call() throws Exception {
         if (wildflyServerVersion.isPresent()) {
             builder.setVersion(wildflyServerVersion.get());
         }
+        Map<String, String> extraEnv = new HashMap<>();
+        if (envFile.isPresent()) {
+            if (provision.isPresent()) {
+                if (!OPENSHIFT.equals(provision.get())) {
+                    throw new Exception("Env file is only usable when --provision=" + OPENSHIFT + " option is set.");
+                }
+            } else {
+                throw new Exception("Env file is only usable when --provision=" + OPENSHIFT + " option is set.");
+            }
+            Path p = envFile.get();
+            if (!Files.exists(p)) {
+                throw new Exception(p + " file doesn't exist");
+            }
+            for(String l : Files.readAllLines(p)) {
+                if (!l.startsWith("#")) {
+                    int i = l.indexOf("=");
+                    if (i < 0 || i == l.length() - 1) {
+                        throw new Exception("Invalid environment variable " + l + " in " + p);
+                    }
+                    extraEnv.put(l.substring(0, i), l.substring(i+1));
+                }
+            }
+        }
         builder.setVerbose(verbose);
         if (!addOns.isEmpty()) {
             builder.setUserEnabledAddOns(addOns);
@@ -151,6 +185,9 @@ public Integer call() throws Exception {
             if (DOCKER_IMAGE.equals(provision.get()) && !cloud.orElse(false)) {
                 throw new Exception("Can't produce a Docker image if cloud is not enabled. Use the " + Constants.CLOUD_OPTION + " option.");
             }
+            if (OPENSHIFT.equals(provision.get()) && !cloud.orElse(false)) {
+                throw new Exception("Can't build/deploy on openShift if cloud is not enabled. Use the " + Constants.CLOUD_OPTION + " option.");
+            }
             builder.setOutput(provision.get());
         }
         builder.setExcludeArchivesFromScan(excludeArchivesFromScan);
@@ -218,6 +255,10 @@ public Integer call() throws Exception {
                     print("@|bold Generating docker image...|@");
                     break;
                 }
+                case OPENSHIFT: {
+                    print("@|bold Openshift build and deploy...|@");
+                    break;
+                }
             }
             OutputContent content = scanResults.outputConfig(target, dockerImageName.orElse(null));
             Path base = Paths.get("").toAbsolutePath();
@@ -235,7 +276,9 @@ public Integer call() throws Exception {
                         break;
                     }
                     case ENV_FILE: {
-                        print("@|bold The file " + rel + " contains the list of environment variables that you must set prior to start the server.|@");
+                        if (!OutputFormat.OPENSHIFT.equals(provision.get())) {
+                            print("@|bold The file " + rel + " contains the list of environment variables that you must set prior to start the server.|@");
+                        }
                         switch (provision.get()) {
                             case SERVER: {
                                 print("@|bold Export the suggested env variables for the server to take them into account.|@");
@@ -253,9 +296,14 @@ public Integer call() throws Exception {
                         break;
                     }
                     case PROVISIONING_XML_FILE: {
-                        print("@|bold Generation DONE.|@");
-                        print("@|bold Galleon Provisioning configuration is located in " + rel + " file|@");
+                        switch (provision.get()) {
+                            case PROVISIONING_XML: {
+                                print("@|bold Generation DONE.|@");
+                                print("@|bold Galleon Provisioning configuration is located in " + rel + " file|@");
+                            }
+                        }
                         break;
+
                     }
                     case SERVER_DIR: {
                         print("@|bold Provisioning DONE.|@");
@@ -268,6 +316,23 @@ public Integer call() throws Exception {
                     }
                 }
             }
+            if (OutputFormat.OPENSHIFT.equals(provision.get())) {
+                String name = null;
+                for (Path p : deployments) {
+                    Files.copy(p, target.resolve(p.getFileName()));
+                    int ext = p.getFileName().toString().indexOf(".");
+                    name = p.getFileName().toString().substring(0, ext);
+                }
+                Map<String, String> envMap = new HashMap<>();
+                for(Set<Env> envs : scanResults.getSuggestions().getStronglySuggestedConfigurations().values()) {
+                    for(Env env : envs) {
+                        envMap.put(env.getName(), env.getDescription());
+                    }
+                }
+                OpenShiftSupport.deploy(GlowMessageWriter.DEFAULT, target, name == null ? "app-from-wildfly-glow" : name, envMap, scanResults.getDiscoveredLayers(),
+                        scanResults.getEnabledAddOns(), haProfile.orElse(false), extraEnv, disableDeployers);
+                print("@|bold Openshift build and deploy DONE.|@");
+            }
             if (content.getDockerImageName() != null) {
                 print("@|bold To run the image call: 'docker run " + content.getDockerImageName() + "'|@");
             }
diff --git a/cli/src/main/java/org/wildfly/glow/cli/commands/ShowConfigurationCommand.java b/cli/src/main/java/org/wildfly/glow/cli/commands/ShowConfigurationCommand.java
index 43d57b61..7d4c55c5 100644
--- a/cli/src/main/java/org/wildfly/glow/cli/commands/ShowConfigurationCommand.java
+++ b/cli/src/main/java/org/wildfly/glow/cli/commands/ShowConfigurationCommand.java
@@ -20,6 +20,7 @@
 import java.util.LinkedHashSet;
 import java.util.Map;
 import java.util.Optional;
+import java.util.ServiceLoader;
 import java.util.Set;
 import java.util.TreeSet;
 import org.jboss.galleon.api.config.GalleonFeaturePackConfig;
@@ -29,6 +30,7 @@
 import org.wildfly.glow.FeaturePacks;
 import org.wildfly.glow.Layer;
 import org.wildfly.glow.LayerMapping;
+import org.wildfly.glow.deployment.openshift.api.Deployer;
 
 import picocli.CommandLine;
 
@@ -53,6 +55,18 @@ public class ShowConfigurationCommand extends AbstractCommand {
     @Override
     public Integer call() throws Exception {
         print("Wildfly Glow is retrieving known provisioning configuration...");
+        StringBuilder ocBuilder = new StringBuilder();
+        ocBuilder.append("\nDeployers enabled when provisioning to OpenShift:\n");
+        for (Deployer d : ServiceLoader.load(Deployer.class)) {
+            ocBuilder.append("* " + d.getName() + ". Enabled when one of the following ");
+            if (!d.getSupportedLayers().isEmpty()) {
+                ocBuilder.append("layer(s) " + d.getSupportedLayers() + " is/are discovered.\n");
+            } else {
+                ocBuilder.append("add-on(s) " + d.getSupportedAddOns() + " is/are enabled.\n");
+            }
+        }
+        print(ocBuilder.toString());
+
         String context = Arguments.BARE_METAL_EXECUTION_CONTEXT;
         if (cloud.orElse(false)) {
             context = Arguments.CLOUD_EXECUTION_CONTEXT;
diff --git a/cli/src/main/resources/UsageMessages.properties b/cli/src/main/resources/UsageMessages.properties
index 02bc28ee..812db6f9 100644
--- a/cli/src/main/resources/UsageMessages.properties
+++ b/cli/src/main/resources/UsageMessages.properties
@@ -31,10 +31,13 @@ suggest = WildFly Glow will suggest additional add-ons and environment variables
 add-ons = List of add-ons to enable. To get the list of possible add-ons, use the @|fg(yellow) show-add-ons|@ command.
 deployments = List of path to war|jar|ear files to scan.
 input-feature-packs-file = Galleon feature-packs used by wildfly-glow are retrieved from an online registry. To override the set of feature-packs you can specify a path to a Galleon provisioning XML file containing the set of Galleon feature-packs to be used by wildfly-glow.
-provision = The kind of provisioning to produce based on what has been discovered. Can be @|fg(yellow) SERVER|@: a provisioned WildFly server, @|fg(yellow) BOOTABLE_JAR|@: a WildFly Bootable JAR, @|fg(yellow) DOCKER_IMAGE|@: a Docker image or @|fg(yellow) PROVISIONING_XML|@: a Galleon provisioning.xml file.
+provision = The kind of provisioning to produce based on what has been discovered. Can be @|fg(yellow) SERVER|@: a provisioned WildFly server, @|fg(yellow) BOOTABLE_JAR|@: a WildFly Bootable JAR, @|fg(yellow) DOCKER_IMAGE|@: a Docker image, @|fg(yellow) OPENSHIFT|@: a server built and deploy on OpenShift, you must be logged to a cluster, or @|fg(yellow) PROVISIONING_XML|@: a Galleon provisioning.xml file.
 output-dir = If specifying to provision, the directory where the result will be output.
 wildfly-preview = Use only WildFly preview feature-packs as input.
 stability-level = Specify a stability to be used when provisioning a server. The stability is also used to identify server features that would be not enabled by the specified stability. The stability is by default the minimum stability of each Galleon feature-packs. The stability can be @|fg(yellow) default|@, @|fg(yellow) community|@, @|fg(yellow) preview|@, @|fg(yellow) experimental|@.
+env-file = The path to a file that contains environment variables (in the form env=value) to be passed to the OpenShift deployment. Can only be used with @|fg(yellow) OPENSHIFT|@ kind of provisioning.
+disable-deployers = A comma separated list of deployer names to disable. To retrieve all the deployer names call the @|fg(yellow) show-configuration|@ operation. To disable them all, use @|fg(yellow) ALL|@ value. Can only be used with @|fg(yellow) OPENSHIFT|@ kind of provisioning.
+
 usage.synopsisHeading = %nUsage:\u0020
 # for the main command do not prepend with new line character:
 wildfly-glow.usage.synopsisHeading = Usage:\u0020
diff --git a/core/src/main/java/org/wildfly/glow/GlowSession.java b/core/src/main/java/org/wildfly/glow/GlowSession.java
index b40dc1da..bad872dc 100644
--- a/core/src/main/java/org/wildfly/glow/GlowSession.java
+++ b/core/src/main/java/org/wildfly/glow/GlowSession.java
@@ -577,6 +577,8 @@ OutputContent outputConfig(ScanResults scanResults, Path target, String dockerIm
             if (scanResults.getErrorSession().hasErrors()) {
                 writer.warn("You are provisioning a server although some errors still exist. You should first fix them.");
             }
+        }
+        if (!OutputFormat.PROVISIONING_XML.equals(arguments.getOutput()) && !OutputFormat.OPENSHIFT.equals(arguments.getOutput())) {
             Path generatedArtifact = provisionServer(arguments.getBinaries(),
                     scanResults.getProvisioningConfig(), resolver, arguments.getOutput(),
                     arguments.isCloud(), target);
@@ -601,8 +603,18 @@ OutputContent outputConfig(ScanResults scanResults, Path target, String dockerIm
                     files.put(OutputContent.OutputFile.SERVER_DIR, generatedArtifact.toAbsolutePath());
                     break;
                 }
+                case OPENSHIFT: {
+                    Files.createDirectories(target.resolve("galleon"));
+                    Path prov = target.resolve("provisioning.xml");
+                    provisioning.storeProvisioningConfig(scanResults.getProvisioningConfig(),prov);
+                    files.put(OutputContent.OutputFile.PROVISIONING_XML_FILE, prov.toAbsolutePath());
+                    break;
+                }
             }
         } else {
+            if (OutputFormat.OPENSHIFT.equals(arguments.getOutput())) {
+                target = target.resolve("galleon");
+            }
             Files.createDirectories(target);
             Path prov = target.resolve("provisioning.xml");
             provisioning.storeProvisioningConfig(scanResults.getProvisioningConfig(),prov);
diff --git a/core/src/main/java/org/wildfly/glow/OutputFormat.java b/core/src/main/java/org/wildfly/glow/OutputFormat.java
index f2d9f8d2..dfe73781 100644
--- a/core/src/main/java/org/wildfly/glow/OutputFormat.java
+++ b/core/src/main/java/org/wildfly/glow/OutputFormat.java
@@ -25,7 +25,8 @@ public enum OutputFormat {
     PROVISIONING_XML("provisioning.xml", "Galleon provisioning file usable with Galleon tooling."),
     SERVER("server", "Provision a WildFly server."),
     BOOTABLE_JAR("bootable-jar", "Provision a WildFly bootable jar."),
-    DOCKER_IMAGE("docker-image", "Produce a docker image.");
+    DOCKER_IMAGE("docker-image", "Produce a docker image."),
+    OPENSHIFT("openshift", "Build and deploy on OpenShift.");
 
     public final String name;
     public final String description;
diff --git a/docs/guide/intro/index.adoc b/docs/guide/intro/index.adoc
index 879541c7..83b4e167 100644
--- a/docs/guide/intro/index.adoc
+++ b/docs/guide/intro/index.adoc
@@ -34,7 +34,44 @@ WildFly Glow does more than identifying Galleon Feature-packs and Layers.
 
 #### Provisioning
 
-WildFly Glow CLI allows you to provision a WildFly server, a WildFly Bootable JAR or produce a Docker image.
+WildFly Glow CLI allows you to provision a WildFly server, a WildFly Bootable JAR, produce a Docker image and deploy your application to OpenShift.
+
+##### OpenShift provisioning
+
+A good way to start with OpenShift is by using the link:https://developers.redhat.com/developer-sandbox[OpenShift Sandbox]. 
+You can create a cluster in few clicks.
+
+###### Pre-requisites
+
+Once you have a custer up and running, there are few steps needed in order for WildFly Glow to log onto the cluster:
+
+* Download the `oc` command from your cluster. Click on the `?` icon on the top right corner of the OpenShift admin console, then select `Command Line Tools`, 
+then download `oc` for your platform.
+* Retrieve the command to log to the Cluster. Click on your user name on the top right corner, 
+then select `Copy login command`. This will open a page, copy the login command.
+That should be something like: `oc login --token=sha256~ITC16QZxiVk5vm7NCdrRIx2yqvlB-L_6Wg-BrtIhnLE --server=https://api.sandbox-m3.1530.p1.openshiftapps.com:6443`
+
+* Paste the login command in your terminal.
+
+WildFly Glow can now interact with your cluster.
+
+###### Deploying to OpenShift
+
+That is done from the `wildfly-glow` command line tool. You can specify `--cloud --provision OPENSHIFT` to the `scan` command. For example:
+
+`wildfly-glow scan examples/kitchensink.war --cloud --provision OPENSHIFT`
+
+The kitchensink war file is analyzed, the Galleon configuration is generated, then both the war and the configuration are sent to OpenShift to start a server 
+provisioning and create your application deployment.
+
+At the end of the build, the application is deployed and the route to your application inside the cluster is printed. 
+Use it to interact with your application.
+
+###### Automatic deployment of PostGreSQL, Artemis JMS Broker and Keycloak
+
+If WildFly Glow detects the need for these technologies, it will automatically deploy the required servers and will bound the application to them.
+
+This feature is currently specified by this link:https://github.com/wildfly/wildfly-glow/issues/49[GitHub Issue].
 
 #### WildFly additional features discovery
 
diff --git a/openshift-deployment/api/pom.xml b/openshift-deployment/api/pom.xml
new file mode 100644
index 00000000..40098953
--- /dev/null
+++ b/openshift-deployment/api/pom.xml
@@ -0,0 +1,23 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" 
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+    <parent>
+        <groupId>org.wildfly.glow</groupId>
+        <artifactId>wildfly-glow-openshift-deployment</artifactId>
+        <version>1.0.0.Beta10-SNAPSHOT</version>
+    </parent>
+
+    <artifactId>wildfly-glow-openshift-deployment-api</artifactId>
+    <packaging>jar</packaging>
+    <dependencies>
+        <dependency>
+            <groupId>${project.groupId}</groupId>
+            <artifactId>wildfly-glow-core</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>io.fabric8</groupId>
+            <artifactId>openshift-client</artifactId>
+        </dependency>
+    </dependencies>
+</project>
\ No newline at end of file
diff --git a/openshift-deployment/api/src/main/java/org/wildfly/glow/deployment/openshift/api/Deployer.java b/openshift-deployment/api/src/main/java/org/wildfly/glow/deployment/openshift/api/Deployer.java
new file mode 100644
index 00000000..9429cbd6
--- /dev/null
+++ b/openshift-deployment/api/src/main/java/org/wildfly/glow/deployment/openshift/api/Deployer.java
@@ -0,0 +1,53 @@
+/*
+ * JBoss, Home of Professional Open Source.
+ * Copyright 2024 Red Hat, Inc., and individual contributors
+ * as indicated by the @author tags.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.wildfly.glow.deployment.openshift.api;
+
+import io.fabric8.openshift.client.OpenShiftClient;
+import java.nio.file.Path;
+import java.util.Collections;
+import java.util.Map;
+import java.util.Set;
+import org.wildfly.glow.GlowMessageWriter;
+
+/**
+ *
+ * @author jdenise
+ */
+public interface Deployer {
+
+    static final String LABEL = "deployment";
+
+    String getName();
+
+    Map<String, String> deploy(GlowMessageWriter writer, Path target, OpenShiftClient osClient, Map<String, String> env, String appHost, String appName, String matching) throws Exception;
+
+    default Map<String, String> disabledDeploy(String appHost, String appName, String matching, Map<String, String> env) {
+        return Collections.emptyMap();
+    }
+
+    default Set<String> getSupportedLayers() {
+        return Collections.emptySet();
+    }
+
+    default String getSupportedAddOnFamily() {
+        return null;
+    }
+
+    default Set<String> getSupportedAddOns() {
+        return Collections.emptySet();
+    }
+}
diff --git a/openshift-deployment/artemis-broker/pom.xml b/openshift-deployment/artemis-broker/pom.xml
new file mode 100644
index 00000000..17a9dc56
--- /dev/null
+++ b/openshift-deployment/artemis-broker/pom.xml
@@ -0,0 +1,19 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" 
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+    <parent>
+        <groupId>org.wildfly.glow</groupId>
+        <artifactId>wildfly-glow-openshift-deployment</artifactId>
+        <version>1.0.0.Beta10-SNAPSHOT</version>
+    </parent>
+
+    <artifactId>wildfly-glow-openshift-deployment-artemis</artifactId>
+    <packaging>jar</packaging>
+    <dependencies>
+        <dependency>
+            <groupId>${project.groupId}</groupId>
+            <artifactId>wildfly-glow-openshift-deployment-api</artifactId>
+        </dependency>
+    </dependencies>
+</project>
\ No newline at end of file
diff --git a/openshift-deployment/artemis-broker/src/main/java/org/wildfly/glow/deployment/openshift/artemis/ArtemisDeployer.java b/openshift-deployment/artemis-broker/src/main/java/org/wildfly/glow/deployment/openshift/artemis/ArtemisDeployer.java
new file mode 100644
index 00000000..312dff5c
--- /dev/null
+++ b/openshift-deployment/artemis-broker/src/main/java/org/wildfly/glow/deployment/openshift/artemis/ArtemisDeployer.java
@@ -0,0 +1,153 @@
+/*
+ * JBoss, Home of Professional Open Source.
+ * Copyright 2024 Red Hat, Inc., and individual contributors
+ * as indicated by the @author tags.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.wildfly.glow.deployment.openshift.artemis;
+
+import io.fabric8.kubernetes.api.model.Container;
+import io.fabric8.kubernetes.api.model.ContainerPort;
+import io.fabric8.kubernetes.api.model.EnvVar;
+import io.fabric8.kubernetes.api.model.IntOrString;
+import io.fabric8.kubernetes.api.model.Service;
+import io.fabric8.kubernetes.api.model.ServiceBuilder;
+import io.fabric8.kubernetes.api.model.ServicePort;
+import io.fabric8.kubernetes.api.model.apps.Deployment;
+import io.fabric8.kubernetes.api.model.apps.DeploymentBuilder;
+import io.fabric8.kubernetes.client.dsl.NonDeletingOperation;
+import io.fabric8.kubernetes.client.utils.Serialization;
+import io.fabric8.openshift.client.OpenShiftClient;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import org.wildfly.glow.GlowMessageWriter;
+import org.wildfly.glow.deployment.openshift.api.Deployer;
+
+/**
+ *
+ * @author jdenise
+ */
+public class ArtemisDeployer implements Deployer {
+
+    private static final String REMOTE_BROKER_NAME = "artemis-broker";
+    private static final String REMOTE_BROKER_PASSWORD = "admin";
+    private static final String REMOTE_BROKER_USER = "admin";
+
+    private static final Map<String, String> REMOTE_BROKER_CONNECTION_MAP = new HashMap<>();
+    private static final Map<String, String> REMOTE_BROKER_APP_MAP = new HashMap<>();
+
+    static {
+
+        REMOTE_BROKER_CONNECTION_MAP.put("AMQ_USER", REMOTE_BROKER_USER);
+        REMOTE_BROKER_CONNECTION_MAP.put("AMQ_PASSWORD", REMOTE_BROKER_PASSWORD);
+        REMOTE_BROKER_CONNECTION_MAP.put("AMQ_DATA_DIR", "/home/jboss/data");
+
+        REMOTE_BROKER_APP_MAP.put("MQ_SERVICE_PREFIX_MAPPING", "broker-amq7=BROKER_AMQ");
+        REMOTE_BROKER_APP_MAP.put("MQ_SERVICE_PREFIX_MAPPING", "broker-amq7=BROKER_AMQ");
+        REMOTE_BROKER_APP_MAP.put("BROKER_AMQ_TCP_SERVICE_HOST", REMOTE_BROKER_NAME);
+        REMOTE_BROKER_APP_MAP.put("BROKER_AMQ_TCP_SERVICE_PORT", "61616");
+        REMOTE_BROKER_APP_MAP.put("BROKER_AMQ_PASSWORD", REMOTE_BROKER_PASSWORD);
+        REMOTE_BROKER_APP_MAP.put("BROKER_AMQ_USERNAME", REMOTE_BROKER_USER);
+    }
+
+    @Override
+    public Map<String, String> deploy(GlowMessageWriter writer, Path target, OpenShiftClient osClient,
+            Map<String, String> env, String appHost, String appName, String matching) throws Exception {
+        writer.info("\nDeploying Artemis Messaging Broker");
+        Map<String, String> labels = new HashMap<>();
+        labels.put(LABEL, REMOTE_BROKER_NAME);
+        ContainerPort port = new ContainerPort();
+        port.setContainerPort(61616);
+        port.setProtocol("TCP");
+        List<ContainerPort> ports = new ArrayList<>();
+        ports.add(port);
+        List<EnvVar> vars = new ArrayList<>();
+        for (Map.Entry<String, String> entry : REMOTE_BROKER_CONNECTION_MAP.entrySet()) {
+            vars.add(new EnvVar().toBuilder().withName(entry.getKey()).withValue(entry.getValue()).build());
+        }
+        Container container = new Container();
+        container.setName(REMOTE_BROKER_NAME);
+        container.setImage("quay.io/artemiscloud/activemq-artemis-broker-kubernetes");
+        container.setPorts(ports);
+        container.setEnv(vars);
+        container.setImagePullPolicy("IfNotPresent");
+
+        Deployment deployment = new DeploymentBuilder().withNewMetadata().withName(REMOTE_BROKER_NAME).endMetadata().
+                withNewSpec().withReplicas(1).
+                withNewSelector().withMatchLabels(labels).endSelector().
+                withNewTemplate().withNewMetadata().withLabels(labels).endMetadata().withNewSpec().
+                withContainers(container).withRestartPolicy("Always").
+                endSpec().endTemplate().withNewStrategy().withType("RollingUpdate").endStrategy().endSpec().build();
+        osClient.resources(Deployment.class).resource(deployment).createOr(NonDeletingOperation::update);
+        Files.write(target.resolve(REMOTE_BROKER_NAME + "-deployment.yaml"), Serialization.asYaml(deployment).getBytes());
+        IntOrString v = new IntOrString();
+        v.setValue(61616);
+        Service service = new ServiceBuilder().withNewMetadata().withName(REMOTE_BROKER_NAME).endMetadata().
+                withNewSpec().withPorts(new ServicePort().toBuilder().withName("61616-tcp").withProtocol("TCP").
+                        withPort(61616).
+                        withTargetPort(v).build()).withType("ClusterIP").withSessionAffinity("None").withSelector(labels).endSpec().build();
+        osClient.services().resource(service).createOr(NonDeletingOperation::update);
+        Files.write(target.resolve(REMOTE_BROKER_NAME + "-service.yaml"), Serialization.asYaml(service).getBytes());
+        Map<String, String> ret = new HashMap<>();
+        ret.putAll(REMOTE_BROKER_APP_MAP);
+        return REMOTE_BROKER_APP_MAP;
+    }
+    @Override
+    public Map<String, String> disabledDeploy(String appHost, String appName, String matching, Map<String, String> env) {
+        Map<String, String> ret = new HashMap<>();
+        String descriptionPrefix = " Replace the PREFIX with the prefix used in the MQ_SERVICE_PREFIX_MAPPING env variable";
+        String descriptionServiceName = " Replace the SERVICE-NAME with the service name used in the MQ_SERVICE_PREFIX_MAPPING env variable";
+        for (Map.Entry<String, String> entry : env.entrySet()) {
+            if (entry.getKey().startsWith("{PREFIX}")) {
+                String k = entry.getKey().replace("{PREFIX}", "PREFIX");
+                ret.put(k, entry.getValue() + descriptionPrefix);
+            } else {
+                if (entry.getKey().startsWith("{SERVICE-NAME}")) {
+                    String k = entry.getKey().replace("{SERVICE-NAME}", "SERVICE-NAME");
+                    ret.put(k, entry.getValue() + descriptionServiceName);
+                } else {
+                    if(entry.getKey().startsWith("MQ_SERVICE_PREFIX_MAPPING")) {
+                        ret.put(entry.getKey(), entry.getValue());
+                    }
+
+                }
+            }
+        }
+        return ret;
+    }
+
+    @Override
+    public String getSupportedAddOnFamily() {
+        return "messaging";
+    }
+
+    @Override
+    public Set<String> getSupportedAddOns() {
+        Set<String> ret = new HashSet<>();
+        ret.add("cloud-remote-activemq");
+        return ret;
+    }
+
+    @Override
+    public String getName() {
+        return "artemis_jms_broker";
+    }
+
+}
diff --git a/openshift-deployment/artemis-broker/src/main/resources/META-INF/services/org.wildfly.glow.deployment.openshift.api.Deployer b/openshift-deployment/artemis-broker/src/main/resources/META-INF/services/org.wildfly.glow.deployment.openshift.api.Deployer
new file mode 100644
index 00000000..6d475194
--- /dev/null
+++ b/openshift-deployment/artemis-broker/src/main/resources/META-INF/services/org.wildfly.glow.deployment.openshift.api.Deployer
@@ -0,0 +1 @@
+org.wildfly.glow.deployment.openshift.artemis.ArtemisDeployer
diff --git a/openshift-deployment/keycloak/pom.xml b/openshift-deployment/keycloak/pom.xml
new file mode 100644
index 00000000..d31a9a6b
--- /dev/null
+++ b/openshift-deployment/keycloak/pom.xml
@@ -0,0 +1,19 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" 
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+    <parent>
+        <groupId>org.wildfly.glow</groupId>
+        <artifactId>wildfly-glow-openshift-deployment</artifactId>
+        <version>1.0.0.Beta10-SNAPSHOT</version>
+    </parent>
+
+    <artifactId>wildfly-glow-openshift-deployment-keycloak</artifactId>
+    <packaging>jar</packaging>
+    <dependencies>
+        <dependency>
+            <groupId>${project.groupId}</groupId>
+            <artifactId>wildfly-glow-openshift-deployment-api</artifactId>
+        </dependency>
+    </dependencies>
+</project>
\ No newline at end of file
diff --git a/openshift-deployment/keycloak/src/main/java/org/wildfly/glow/deployment/openshift/keycloak/KeycloakDeployer.java b/openshift-deployment/keycloak/src/main/java/org/wildfly/glow/deployment/openshift/keycloak/KeycloakDeployer.java
new file mode 100644
index 00000000..c894e69a
--- /dev/null
+++ b/openshift-deployment/keycloak/src/main/java/org/wildfly/glow/deployment/openshift/keycloak/KeycloakDeployer.java
@@ -0,0 +1,143 @@
+/*
+ * JBoss, Home of Professional Open Source.
+ * Copyright 2024 Red Hat, Inc., and individual contributors
+ * as indicated by the @author tags.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.wildfly.glow.deployment.openshift.keycloak;
+
+import io.fabric8.kubernetes.api.model.KubernetesList;
+import io.fabric8.kubernetes.client.dsl.NonDeletingOperation;
+import io.fabric8.kubernetes.client.utils.Serialization;
+import io.fabric8.openshift.api.model.DeploymentConfig;
+import io.fabric8.openshift.api.model.DeploymentConfigBuilder;
+import io.fabric8.openshift.api.model.Route;
+import io.fabric8.openshift.api.model.RouteBuilder;
+import io.fabric8.openshift.api.model.Template;
+import io.fabric8.openshift.client.OpenShiftClient;
+import java.net.URL;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.TimeUnit;
+import org.wildfly.glow.GlowMessageWriter;
+import org.wildfly.glow.deployment.openshift.api.Deployer;
+
+/**
+ *
+ * @author jdenise
+ */
+public class KeycloakDeployer implements Deployer {
+
+    private static final String KEYCLOAK_TEMPLATE_URL = "https://raw.githubusercontent.com/keycloak/keycloak-quickstarts/23.0.7/openshift/keycloak.yaml";
+    private static final String KEYCLOAK_NAME = "keycloak";
+    private static final String WILDFLY_REALM_PATH = "/realms/WildFly";
+    private static final String KEYCLOAK_ADMIN = "admin";
+    private static final String KEYCLOAK_ADMIN_PASSWORD = "admin";
+    private static final String KEYCLOAK_DEMO = "demo";
+    private static final String KEYCLOAK_DEMO_PASSWORD = "demo";
+    private static final String KEYCLOAK_ADMIN_ENV = "KEYCLOAK_ADMIN";
+    private static final String KEYCLOAK_ADMIN_PASSWORD_ENV = "KEYCLOAK_ADMIN_PASSWORD";
+    private static final String OIDC_PROVIDER_URL_ENV = "OIDC_PROVIDER_URL";
+    private static final String OIDC_PROVIDER_NAME_ENV = "OIDC_PROVIDER_NAME";
+    private static final String OIDC_SECURE_DEPLOYMENT_SECRET_ENV = "OIDC_SECURE_DEPLOYMENT_SECRET";
+    private static final String OIDC_USER_NAME_ENV = "OIDC_USER_NAME";
+    private static final String OIDC_USER_PASSWORD_ENV = "OIDC_USER_PASSWORD";
+    private static final String OIDC_HOSTNAME_HTTPS_ENV = "OIDC_HOSTNAME_HTTPS";
+    private static final String MYSECRET = "mysecret";
+    private static final String NAMESPACE_ENV = "NAMESPACE";
+
+    @Override
+    public Map<String, String> disabledDeploy(String appHost, String appName, String matching, Map<String, String> env) {
+        Map<String, String> ret = new HashMap<>();
+        ret.put(OIDC_HOSTNAME_HTTPS_ENV, appHost);
+        ret.putAll(getExistingEnv(env));
+        return ret;
+    }
+
+    private Map<String, String> getExistingEnv(Map<String, String> env) {
+        Map<String, String> ret = new HashMap<>();
+        for(Map.Entry<String, String> entry : env.entrySet()) {
+            if(entry.getKey().startsWith("OIDC_")) {
+                ret.put(entry.getKey(), entry.getValue());
+            }
+        }
+        return ret;
+    }
+
+    @Override
+    public Map<String, String> deploy(GlowMessageWriter writer, Path target, OpenShiftClient osClient, Map<String, String> env,
+            String appHost, String appName, String matching) throws Exception {
+        writer.info("\nDeploying Keycloak server");
+        Map<String, String> parameters = new HashMap<>();
+        parameters.put(KEYCLOAK_ADMIN_ENV, KEYCLOAK_ADMIN);
+        parameters.put(KEYCLOAK_ADMIN_PASSWORD_ENV, KEYCLOAK_ADMIN_PASSWORD);
+        parameters.put(NAMESPACE_ENV, osClient.getNamespace());
+        Template t = osClient.templates().
+                load(new URL(KEYCLOAK_TEMPLATE_URL)).createOr(NonDeletingOperation::update);
+        final KubernetesList processedTemplateWithCustomParameters = osClient.templates().
+                withName(KEYCLOAK_NAME)
+                .process(parameters);
+        osClient.resourceList(processedTemplateWithCustomParameters).createOrReplace();
+        Files.write(target.resolve(KEYCLOAK_NAME + "-resources.yaml"), Serialization.asYaml(processedTemplateWithCustomParameters).getBytes());
+        writer.info("Waiting until keycloak is ready ...");
+        DeploymentConfig dc = new DeploymentConfigBuilder().withNewMetadata().withName(KEYCLOAK_NAME).endMetadata().build();
+        osClient.resources(DeploymentConfig.class).resource(dc).waitUntilReady(5, TimeUnit.MINUTES);
+
+        Route route = new RouteBuilder().withNewMetadata().withName(KEYCLOAK_NAME).
+                endMetadata().build();
+        String host = osClient.routes().resource(route).get().getSpec().getHost();
+        String url = "https://" + host;
+        writer.info("\nKeycloak route: " + url);
+        Map<String, String> retEnv = new HashMap<>();
+        String realmUrl = url + WILDFLY_REALM_PATH;
+        writer.warn("\nNOTE: Some actions must be taken from the keycloack console.");
+        writer.warn("1- Use admin/admin to log to the console " + url);
+        writer.warn("2- Create a realm named WildFly");
+        writer.warn("3- Create a user named demo, password demo");
+        writer.warn("4- Create a role needed by your application and assign it to the demo user");
+        if (env.containsKey(OIDC_PROVIDER_URL_ENV)) {
+            writer.warn("5- Assign the role 'realm-management create-client' to the demo user");
+            writer.warn("\nNOTE: In case your application is deployed prior you completed the keycloak admin tasks, make sure to re-deploy your application.");
+        } else {
+            writer.warn("5 - Create an OIDC Client named the way your OIDC configuration expects it. "
+                    + "Set its Root URL to  'https://" + appHost + ("ROOT.war".equals(appName) ? "" : "/" + appName) + "'");
+        }
+        retEnv.put(OIDC_PROVIDER_URL_ENV, realmUrl);
+        if (env.containsKey(OIDC_PROVIDER_URL_ENV)) {
+            retEnv.put(OIDC_PROVIDER_NAME_ENV, KEYCLOAK_NAME);
+            retEnv.put(OIDC_SECURE_DEPLOYMENT_SECRET_ENV, MYSECRET);
+            retEnv.put(OIDC_USER_NAME_ENV, KEYCLOAK_DEMO);
+            retEnv.put(OIDC_USER_PASSWORD_ENV, KEYCLOAK_DEMO_PASSWORD);
+            retEnv.put(OIDC_HOSTNAME_HTTPS_ENV, appHost);
+        }
+        return retEnv;
+    }
+
+    @Override
+    public Set<String> getSupportedLayers() {
+        Set<String> ret = new HashSet<>();
+        ret.add("elytron-oidc-client");
+        return ret;
+    }
+
+    @Override
+    public String getName() {
+        return KEYCLOAK_NAME;
+    }
+
+}
diff --git a/openshift-deployment/keycloak/src/main/resources/META-INF/services/org.wildfly.glow.deployment.openshift.api.Deployer b/openshift-deployment/keycloak/src/main/resources/META-INF/services/org.wildfly.glow.deployment.openshift.api.Deployer
new file mode 100644
index 00000000..d5ea509f
--- /dev/null
+++ b/openshift-deployment/keycloak/src/main/resources/META-INF/services/org.wildfly.glow.deployment.openshift.api.Deployer
@@ -0,0 +1,2 @@
+org.wildfly.glow.deployment.openshift.keycloak.KeycloakDeployer
+
diff --git a/openshift-deployment/pom.xml b/openshift-deployment/pom.xml
new file mode 100644
index 00000000..93d592f5
--- /dev/null
+++ b/openshift-deployment/pom.xml
@@ -0,0 +1,25 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" 
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+    <parent>
+        <groupId>org.wildfly.glow</groupId>
+        <artifactId>wildfly-glow-parent</artifactId>
+        <version>1.0.0.Beta10-SNAPSHOT</version>
+    </parent>
+
+    <artifactId>wildfly-glow-openshift-deployment</artifactId>
+    <packaging>pom</packaging>
+    <properties>
+        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+        <maven.compiler.source>11</maven.compiler.source>
+        <maven.compiler.target>11</maven.compiler.target>
+        <location.checkstyle.xml>../checkstyle/checkstyle.xml</location.checkstyle.xml>
+    </properties>
+    <modules>
+        <module>api</module>
+        <module>keycloak</module>
+        <module>postgresql</module>
+        <module>artemis-broker</module>
+    </modules>
+</project>
\ No newline at end of file
diff --git a/openshift-deployment/postgresql/pom.xml b/openshift-deployment/postgresql/pom.xml
new file mode 100644
index 00000000..64a73a75
--- /dev/null
+++ b/openshift-deployment/postgresql/pom.xml
@@ -0,0 +1,19 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" 
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+    <parent>
+        <groupId>org.wildfly.glow</groupId>
+        <artifactId>wildfly-glow-openshift-deployment</artifactId>
+        <version>1.0.0.Beta10-SNAPSHOT</version>
+    </parent>
+
+    <artifactId>wildfly-glow-openshift-deployment-postgresql</artifactId>
+    <packaging>jar</packaging>
+    <dependencies>
+        <dependency>
+            <groupId>${project.groupId}</groupId>
+            <artifactId>wildfly-glow-openshift-deployment-api</artifactId>
+        </dependency>
+    </dependencies>
+</project>
\ No newline at end of file
diff --git a/openshift-deployment/postgresql/src/main/java/org/wildfly/glow/deployment/openshift/postgresql/PostgreSQLDeployer.java b/openshift-deployment/postgresql/src/main/java/org/wildfly/glow/deployment/openshift/postgresql/PostgreSQLDeployer.java
new file mode 100644
index 00000000..00365c87
--- /dev/null
+++ b/openshift-deployment/postgresql/src/main/java/org/wildfly/glow/deployment/openshift/postgresql/PostgreSQLDeployer.java
@@ -0,0 +1,147 @@
+/*
+ * JBoss, Home of Professional Open Source.
+ * Copyright 2024 Red Hat, Inc., and individual contributors
+ * as indicated by the @author tags.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.wildfly.glow.deployment.openshift.postgresql;
+
+import io.fabric8.kubernetes.api.model.Container;
+import io.fabric8.kubernetes.api.model.ContainerPort;
+import io.fabric8.kubernetes.api.model.EnvVar;
+import io.fabric8.kubernetes.api.model.IntOrString;
+import io.fabric8.kubernetes.api.model.Service;
+import io.fabric8.kubernetes.api.model.ServiceBuilder;
+import io.fabric8.kubernetes.api.model.ServicePort;
+import io.fabric8.kubernetes.api.model.apps.Deployment;
+import io.fabric8.kubernetes.api.model.apps.DeploymentBuilder;
+import io.fabric8.kubernetes.client.dsl.NonDeletingOperation;
+import io.fabric8.kubernetes.client.utils.Serialization;
+import io.fabric8.openshift.client.OpenShiftClient;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+import org.wildfly.glow.GlowMessageWriter;
+import org.wildfly.glow.deployment.openshift.api.Deployer;
+
+/**
+ *
+ * @author jdenise
+ */
+public class PostgreSQLDeployer implements Deployer {
+
+    private static final String POSTGRESQL_NAME = "postgresql";
+    private static final String POSTGRESQL_SAMPLEDB = "sampledb";
+    private static final String POSTGRESQL_PASSWORD = "admin";
+    private static final String POSTGRESQL_USER = "admin";
+    private static final String POSTGRESQL_SERVICE_PORT = "5432";
+    private static final String POSTGRESQL_SERVICE_HOST = POSTGRESQL_NAME;
+
+    private static final String POSTGRESQL_SERVICE_PORT_ENV = "POSTGRESQL_SERVICE_PORT";
+    private static final String POSTGRESQL_SERVICE_HOST_ENV = "POSTGRESQL_SERVICE_HOST";
+    private static final Map<String, String> POSTGRESQL_CONNECTION_MAP = new HashMap<>();
+    private static final Map<String, String> POSTGRESQL_APP_MAP = new HashMap<>();
+
+    static {
+        POSTGRESQL_CONNECTION_MAP.put("POSTGRESQL_DATABASE", POSTGRESQL_SAMPLEDB);
+        POSTGRESQL_CONNECTION_MAP.put("POSTGRESQL_PASSWORD", POSTGRESQL_PASSWORD);
+        POSTGRESQL_CONNECTION_MAP.put("POSTGRESQL_USER", POSTGRESQL_USER);
+        POSTGRESQL_APP_MAP.putAll(POSTGRESQL_CONNECTION_MAP);
+        POSTGRESQL_APP_MAP.put(POSTGRESQL_SERVICE_PORT_ENV, POSTGRESQL_SERVICE_PORT);
+        POSTGRESQL_APP_MAP.put(POSTGRESQL_SERVICE_HOST_ENV, POSTGRESQL_SERVICE_HOST);
+    }
+
+    @Override
+    public Map<String, String> disabledDeploy(String appHost, String appName, String matching, Map<String, String> env) {
+        Map<String, String> ret = new HashMap<>();
+        ret.put(POSTGRESQL_SERVICE_HOST_ENV, "PostgreSQL server host name.");
+        ret.put(POSTGRESQL_SERVICE_PORT_ENV, "PostgreSQL server port.");
+        ret.putAll(getExistingEnv(env));
+        return ret;
+    }
+
+    private Map<String, String> getExistingEnv(Map<String, String> env) {
+        Map<String, String> ret = new HashMap<>();
+        for(Entry<String, String> entry : env.entrySet()) {
+            if(entry.getKey().startsWith("POSTGRESQL_")) {
+                ret.put(entry.getKey(), entry.getValue());
+            }
+        }
+        return ret;
+    }
+
+    @Override
+    public Map<String, String> deploy(GlowMessageWriter writer, Path target, OpenShiftClient osClient,
+            Map<String, String> env, String appHost, String appName, String matching) throws Exception {
+        writer.info("\nDeploying PosgreSQL server");
+        Map<String, String> labels = new HashMap<>();
+        labels.put(LABEL, POSTGRESQL_NAME);
+        ContainerPort port = new ContainerPort();
+        port.setContainerPort(5432);
+        port.setProtocol("TCP");
+        List<ContainerPort> ports = new ArrayList<>();
+        ports.add(port);
+        List<EnvVar> vars = new ArrayList<>();
+        for (Map.Entry<String, String> entry : POSTGRESQL_CONNECTION_MAP.entrySet()) {
+            vars.add(new EnvVar().toBuilder().withName(entry.getKey()).withValue(entry.getValue()).build());
+        }
+        Container container = new Container();
+        container.setName(POSTGRESQL_NAME);
+        container.setImage("registry.redhat.io/rhel8/postgresql-15");
+        container.setPorts(ports);
+        container.setEnv(vars);
+        container.setImagePullPolicy("IfNotPresent");
+
+        Deployment deployment = new DeploymentBuilder().withNewMetadata().withName(POSTGRESQL_NAME).endMetadata().
+                withNewSpec().withReplicas(1).
+                withNewSelector().withMatchLabels(labels).endSelector().
+                withNewTemplate().withNewMetadata().withLabels(labels).endMetadata().withNewSpec().
+                withContainers(container).withRestartPolicy("Always").
+                endSpec().endTemplate().withNewStrategy().withType("RollingUpdate").endStrategy().endSpec().build();
+        osClient.resources(Deployment.class).resource(deployment).createOr(NonDeletingOperation::update);
+        Files.write(target.resolve(POSTGRESQL_NAME + "-deployment.yaml"), Serialization.asYaml(deployment).getBytes());
+        IntOrString v = new IntOrString();
+        v.setValue(5432);
+        Service service = new ServiceBuilder().withNewMetadata().withName(POSTGRESQL_NAME).endMetadata().
+                withNewSpec().withPorts(new ServicePort().toBuilder().withName("5432-tcp").withProtocol("TCP").
+                        withPort(5432).
+                        withTargetPort(v).build()).withType("ClusterIP").withSessionAffinity("None").withSelector(labels).endSpec().build();
+        osClient.services().resource(service).createOr(NonDeletingOperation::update);
+        Files.write(target.resolve(POSTGRESQL_NAME + "-service.yaml"), Serialization.asYaml(service).getBytes());
+        Map<String, String> ret = new HashMap<>();
+        ret.putAll(getExistingEnv(env));
+        ret.putAll(POSTGRESQL_APP_MAP);
+        return ret;
+    }
+
+    @Override
+    public Set<String> getSupportedLayers() {
+        Set<String> ret = new HashSet<>();
+        ret.add("postgresql-datasource");
+        ret.add("postgresql-driver");
+        return ret;
+    }
+
+    @Override
+    public String getName() {
+        return "postgresql";
+    }
+
+}
diff --git a/openshift-deployment/postgresql/src/main/resources/META-INF/services/org.wildfly.glow.deployment.openshift.api.Deployer b/openshift-deployment/postgresql/src/main/resources/META-INF/services/org.wildfly.glow.deployment.openshift.api.Deployer
new file mode 100644
index 00000000..7c04fbd4
--- /dev/null
+++ b/openshift-deployment/postgresql/src/main/resources/META-INF/services/org.wildfly.glow.deployment.openshift.api.Deployer
@@ -0,0 +1,2 @@
+org.wildfly.glow.deployment.openshift.postgresql.PostgreSQLDeployer
+
diff --git a/pom.xml b/pom.xml
index 406da55a..9a2ae45d 100644
--- a/pom.xml
+++ b/pom.xml
@@ -41,6 +41,7 @@
         <version.org.apache.maven.plugin-plugin>3.6.4</version.org.apache.maven.plugin-plugin>
         <version.org.apache.maven.shared>3.2.1</version.org.apache.maven.shared>
         <version.org.apache.surefire.plugin>2.22.2</version.org.apache.surefire.plugin>
+        <version.io.fabric8>6.10.0</version.io.fabric8>
         <!-- arquillian plugin -->
         <version.org.jboss.arquillian.core>1.7.0.Alpha13</version.org.jboss.arquillian.core>
         <version.org.jboss.shrinkwrap.descriptors>2.0.0</version.org.jboss.shrinkwrap.descriptors>
@@ -218,6 +219,26 @@
                 <artifactId>wildfly-glow-arquillian-plugin-scanner</artifactId>
                 <version>${project.version}</version>
             </dependency>
+            <dependency>
+                <groupId>${project.groupId}</groupId>
+                <artifactId>wildfly-glow-openshift-deployment-api</artifactId>
+                <version>${project.version}</version>
+            </dependency>
+            <dependency>
+                <groupId>${project.groupId}</groupId>
+                <artifactId>wildfly-glow-openshift-deployment-postgresql</artifactId>
+                <version>${project.version}</version>
+            </dependency>
+            <dependency>
+                <groupId>${project.groupId}</groupId>
+                <artifactId>wildfly-glow-openshift-deployment-artemis</artifactId>
+                <version>${project.version}</version>
+            </dependency>
+            <dependency>
+                <groupId>${project.groupId}</groupId>
+                <artifactId>wildfly-glow-openshift-deployment-keycloak</artifactId>
+                <version>${project.version}</version>
+            </dependency>
             <dependency>
                 <groupId>info.picocli</groupId>
                 <artifactId>picocli</artifactId>
@@ -415,6 +436,11 @@
                 <artifactId>maven-resolver</artifactId>
                 <version>${version.org.wildfly.channel}</version>
             </dependency>
+            <dependency>
+                <groupId>io.fabric8</groupId>
+                <artifactId>openshift-client</artifactId>
+                <version>${version.io.fabric8}</version>
+            </dependency>
             <!-- Test dependencies -->
             <dependency>
                 <groupId>jakarta.annotation</groupId>
@@ -490,6 +516,7 @@
         <module>cli</module>
         <module>docs</module>
         <module>doc-plugin</module>
+        <module>openshift-deployment</module>
         <module>maven-resolver</module>
         <module>tests</module>
     </modules>