diff --git a/templates/ai4eoscvm.yaml b/templates/ai4eoscvm.yaml
new file mode 100644
index 0000000..ff7b582
--- /dev/null
+++ b/templates/ai4eoscvm.yaml
@@ -0,0 +1,110 @@
+tosca_definitions_version: tosca_simple_yaml_1_0
+
+imports:
+  - grycap_custom_types: https://raw.githubusercontent.com/grycap/tosca/main/custom_types.yaml
+
+metadata:
+  template_version: "1.0.0"
+  template_name: AI4EOSC DEEPaaS YoloV8
+  display_name: Deploy a DEEPaaS AI model YoloV8
+
+
+
+description: TOSCA template for deploying a DEEPaaS AI model
+
+topology_template:
+
+  inputs:
+    num_cpus:
+      type: integer
+      description: Number of virtual cpus for the VM
+      default: 2
+    mem_size:
+      type: scalar-unit.size
+      description: Amount of memory for the VM
+      default: 8 GB
+    disk_size:
+      type: scalar-unit.size
+      description: Size of the root disk of the VM (in case of 0, the disk will not be resized)
+      default: 100 GB
+
+    num_gpus:
+      type: integer
+      description: Number of GPUs to assing to this VM
+      default: 0
+    gpu_vendor:
+      type: string
+      description: GPU Vendor
+      default: ''
+      constraints:
+        - valid_values: [ '', 'NVIDIA', 'AMD' ]
+    gpu_model:
+      type: string
+      description: GPU Model
+      default: ''
+
+    ai4eosc_model_image:
+      type: string
+      description: Docker image of the model to be deployed
+      default: ai4oshub/ai4os-yolov8-torch
+      required: yes
+    ai4eosc_gpu_support:
+      type: boolean
+      description: Enable GPU support (install nvidia drivers and nvidia-docker)
+      default: false
+      constraints:
+        - valid_values: [ false, true ]
+
+  node_templates:
+
+    ai4eosc_model:
+      type: tosca.nodes.ec3.Application
+      artifacts:
+        docker_role:
+          file: grycap.docker
+          type: tosca.artifacts.AnsibleGalaxy.role
+      capabilities:
+        endpoint:
+          properties:
+            ports:
+              https:
+                protocol: tcp
+                source: 443
+      interfaces:
+        Standard:
+          configure:
+            implementation: https://raw.githubusercontent.com/grycap/tosca/main/artifacts/ai4eosc.yml
+            inputs:
+              ai4eosc_model_image: { get_input: ai4eosc_model_image }
+              docker_nvidia_support: { get_input: ai4eosc_gpu_support }
+      requirements:
+        - host: simple_node
+
+    simple_node:
+      type: tosca.nodes.indigo.Compute
+      capabilities:
+        endpoint:
+          properties:
+            network_name: PUBLIC
+        host:
+          properties:
+            disk_size: { get_input: disk_size }
+            num_cpus: { get_input: num_cpus }
+            mem_size: { get_input: mem_size }
+            num_gpus: { get_input: num_gpus }
+            gpu_vendor: { get_input: gpu_vendor }
+            gpu_model: { get_input: gpu_model }
+        os:
+          properties:
+            type: linux
+            distribution: ubuntu
+      # Add dummy task to activate context
+      interfaces:
+        Standard:
+          configure:
+            implementation: https://raw.githubusercontent.com/grycap/tosca/main/artifacts/dummy.yml
+
+
+  outputs:
+    ai4eosc_model_endpoint:
+      value: { concat: [ 'https://', get_attribute: [ simple_node, public_address, 5000 ], '/ui' ] }