diff --git a/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml b/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml index 85cd0a175..265cbf737 100644 --- a/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml +++ b/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml @@ -59,7 +59,7 @@ kpops_components_fields: - values - repo_config - version - producer-app-v3: + producer-app-v2: - name - prefix - from_ @@ -77,7 +77,7 @@ kpops_components_fields: - values - repo_config - version - streams-app-v3: + streams-app-v2: - name - prefix - from_ @@ -95,7 +95,7 @@ kpops_components_fields: - values - repo_config - version - streams-bootstrap-v3: + streams-bootstrap-v2: - name - prefix - from_ @@ -150,20 +150,19 @@ kpops_components_inheritance_ref: - base-defaults-component producer-app: bases: - - kafka-app - streams-bootstrap parents: - - kafka-app - streams-bootstrap + - kafka-app - helm-app - kubernetes-app - pipeline-component - base-defaults-component - producer-app-v3: + producer-app-v2: bases: - - streams-bootstrap-v3 + - streams-bootstrap-v2 parents: - - streams-bootstrap-v3 + - streams-bootstrap-v2 - kafka-app - helm-app - kubernetes-app @@ -171,20 +170,19 @@ kpops_components_inheritance_ref: - base-defaults-component streams-app: bases: - - kafka-app - streams-bootstrap parents: - - kafka-app - streams-bootstrap + - kafka-app - helm-app - kubernetes-app - pipeline-component - base-defaults-component - streams-app-v3: + streams-app-v2: bases: - - streams-bootstrap-v3 + - streams-bootstrap-v2 parents: - - streams-bootstrap-v3 + - streams-bootstrap-v2 - kafka-app - helm-app - kubernetes-app @@ -192,13 +190,15 @@ kpops_components_inheritance_ref: - base-defaults-component streams-bootstrap: bases: + - kafka-app - helm-app parents: + - kafka-app - helm-app - kubernetes-app - pipeline-component - base-defaults-component - streams-bootstrap-v3: + streams-bootstrap-v2: bases: - kafka-app - helm-app diff --git a/docs/docs/schema/defaults.json b/docs/docs/schema/defaults.json index 4b936455b..f86b8b302 100644 --- a/docs/docs/schema/defaults.json +++ b/docs/docs/schema/defaults.json @@ -896,23 +896,17 @@ "values": { "allOf": [ { - "$ref": "#/$defs/kpops__components__streams_bootstrap__producer__model__ProducerAppValues" + "$ref": "#/$defs/ProducerAppValues" } ], "description": "streams-bootstrap Helm values" }, "version": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": "2.9.0", + "default": "3.0.1", "description": "Helm chart version", - "title": "Version" + "pattern": "^(\\d+)\\.(\\d+)\\.(\\d+)(-[a-zA-Z]+(\\.[a-zA-Z]+)?)?$", + "title": "Version", + "type": "string" } }, "required": [ @@ -924,7 +918,7 @@ "title": "ProducerApp", "type": "object" }, - "ProducerAppV3": { + "ProducerAppV2": { "additionalProperties": true, "description": "Producer component.\nThis producer holds configuration to use as values for the streams-bootstrap producer Helm chart. Note that the producer does not support error topics.", "properties": { @@ -982,23 +976,29 @@ "description": "Topic(s) into which the component will write output" }, "type": { - "const": "producer-app-v3", + "const": "producer-app-v2", "title": "Type" }, "values": { "allOf": [ { - "$ref": "#/$defs/kpops__components__streams_bootstrap_v3__producer__model__ProducerAppValues" + "$ref": "#/$defs/ProducerAppV2Values" } ], "description": "streams-bootstrap Helm values" }, "version": { - "default": "3.0.0", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": "2.9.0", "description": "Helm chart version", - "pattern": "^(\\d+)\\.(\\d+)\\.(\\d+)(-[a-zA-Z]+(\\.[a-zA-Z]+)?)?$", - "title": "Version", - "type": "string" + "title": "Version" } }, "required": [ @@ -1007,7 +1007,87 @@ "values", "type" ], - "title": "ProducerAppV3", + "title": "ProducerAppV2", + "type": "object" + }, + "ProducerAppV2Values": { + "additionalProperties": true, + "description": "Settings specific to producers.", + "properties": { + "imageTag": { + "default": "latest", + "description": "Docker image tag of the streams-bootstrap-v2 app.", + "pattern": "^[a-zA-Z0-9_][a-zA-Z0-9._-]{0,127}$", + "title": "Imagetag", + "type": "string" + }, + "nameOverride": { + "anyOf": [ + { + "maxLength": 63, + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Helm chart name override, assigned automatically", + "title": "Nameoverride" + }, + "streams": { + "allOf": [ + { + "$ref": "#/$defs/ProducerStreamsConfig" + } + ], + "description": "Kafka Streams settings" + } + }, + "required": [ + "streams" + ], + "title": "ProducerAppV2Values", + "type": "object" + }, + "ProducerAppValues": { + "additionalProperties": true, + "description": "Settings specific to producers.", + "properties": { + "imageTag": { + "default": "latest", + "description": "Docker image tag of the streams-bootstrap app.", + "pattern": "^[a-zA-Z0-9_][a-zA-Z0-9._-]{0,127}$", + "title": "Imagetag", + "type": "string" + }, + "kafka": { + "allOf": [ + { + "$ref": "#/$defs/ProducerConfig" + } + ], + "description": "Kafka Streams settings" + }, + "nameOverride": { + "anyOf": [ + { + "maxLength": 63, + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Helm chart name override, assigned automatically", + "title": "Nameoverride" + } + }, + "required": [ + "kafka" + ], + "title": "ProducerAppValues", "type": "object" }, "ProducerConfig": { @@ -1248,23 +1328,17 @@ "values": { "allOf": [ { - "$ref": "#/$defs/kpops__components__streams_bootstrap__streams__model__StreamsAppValues" + "$ref": "#/$defs/StreamsAppValues" } ], "description": "streams-bootstrap Helm values" }, "version": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": "2.9.0", + "default": "3.0.1", "description": "Helm chart version", - "title": "Version" + "pattern": "^(\\d+)\\.(\\d+)\\.(\\d+)(-[a-zA-Z]+(\\.[a-zA-Z]+)?)?$", + "title": "Version", + "type": "string" } }, "required": [ @@ -1276,9 +1350,9 @@ "title": "StreamsApp", "type": "object" }, - "StreamsAppV3": { + "StreamsAppV2": { "additionalProperties": true, - "description": "StreamsApp component that configures a streams-bootstrap app.", + "description": "StreamsAppV2 component that configures a streams-bootstrap-v2 app.", "properties": { "from": { "anyOf": [ @@ -1341,23 +1415,29 @@ "description": "Topic(s) into which the component will write output" }, "type": { - "const": "streams-app-v3", + "const": "streams-app-v2", "title": "Type" }, "values": { "allOf": [ { - "$ref": "#/$defs/kpops__components__streams_bootstrap_v3__streams__model__StreamsAppValues" + "$ref": "#/$defs/StreamsAppV2Values" } ], - "description": "streams-bootstrap Helm values" + "description": "streams-bootstrap-v2 Helm values" }, "version": { - "default": "3.0.0", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": "2.9.0", "description": "Helm chart version", - "pattern": "^(\\d+)\\.(\\d+)\\.(\\d+)(-[a-zA-Z]+(\\.[a-zA-Z]+)?)?$", - "title": "Version", - "type": "string" + "title": "Version" } }, "required": [ @@ -1366,150 +1446,198 @@ "values", "type" ], - "title": "StreamsAppV3", + "title": "StreamsAppV2", "type": "object" }, - "StreamsBootstrap": { + "StreamsAppV2Values": { "additionalProperties": true, - "description": "Base for components with a streams-bootstrap Helm chart.", + "description": "streams-bootstrap-v2 app configurations.\nThe attributes correspond to keys and values that are used as values for the streams bootstrap helm chart.", "properties": { - "from": { + "autoscaling": { "anyOf": [ { - "$ref": "#/$defs/FromSection" + "$ref": "#/$defs/kpops__components__streams_bootstrap_v2__streams__model__StreamsAppAutoScaling" }, { "type": "null" } ], "default": null, - "description": "Topic(s) and/or components from which the component will read input", - "title": "From" - }, - "name": { - "description": "Component name", - "title": "Name", - "type": "string" - }, - "namespace": { - "description": "Kubernetes namespace in which the component shall be deployed", - "title": "Namespace", - "type": "string" + "description": "Kubernetes event-driven autoscaling config" }, - "prefix": { - "default": "${pipeline.name}-", - "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.", - "title": "Prefix", + "imageTag": { + "default": "latest", + "description": "Docker image tag of the streams-bootstrap-v2 app.", + "pattern": "^[a-zA-Z0-9_][a-zA-Z0-9._-]{0,127}$", + "title": "Imagetag", "type": "string" }, - "repo_config": { - "allOf": [ - { - "$ref": "#/$defs/HelmRepoConfig" - } - ], - "default": { - "repo_auth_flags": { - "ca_file": null, - "cert_file": null, - "insecure_skip_tls_verify": false, - "password": null, - "username": null - }, - "repository_name": "bakdata-streams-bootstrap", - "url": "https://bakdata.github.io/streams-bootstrap/" - }, - "description": "Configuration of the Helm chart repo to be used for deploying the component" - }, - "to": { + "nameOverride": { "anyOf": [ { - "$ref": "#/$defs/ToSection" + "maxLength": 63, + "type": "string" }, { "type": "null" } ], "default": null, - "description": "Topic(s) into which the component will write output" + "description": "Helm chart name override, assigned automatically", + "title": "Nameoverride" }, - "values": { + "persistence": { "allOf": [ { - "$ref": "#/$defs/StreamsBootstrapValues" + "$ref": "#/$defs/PersistenceConfig" } ], - "description": "streams-bootstrap Helm values" + "default": { + "enabled": false, + "size": null, + "storage_class": null + }, + "description": "" }, - "version": { - "anyOf": [ - { - "type": "string" - }, + "statefulSet": { + "default": false, + "description": "Whether to use a Statefulset instead of a Deployment to deploy the streams app.", + "title": "Statefulset", + "type": "boolean" + }, + "streams": { + "allOf": [ { - "type": "null" + "$ref": "#/$defs/kpops__components__streams_bootstrap_v2__streams__model__StreamsConfig" } ], - "default": "2.9.0", - "description": "Helm chart version", - "title": "Version" + "description": "streams-bootstrap-v2 streams section" } }, "required": [ - "name", - "namespace", - "values" + "streams" ], - "title": "StreamsBootstrap", + "title": "StreamsAppV2Values", "type": "object" }, - "StreamsBootstrapV3": { + "StreamsAppValues": { "additionalProperties": true, - "description": "Base for components with a streams-bootstrap Helm chart.", + "description": "streams-bootstrap app configurations.\nThe attributes correspond to keys and values that are used as values for the streams bootstrap helm chart.", "properties": { - "from": { + "autoscaling": { "anyOf": [ { - "$ref": "#/$defs/FromSection" + "$ref": "#/$defs/kpops__components__streams_bootstrap__streams__model__StreamsAppAutoScaling" }, { "type": "null" } ], "default": null, - "description": "Topic(s) and/or components from which the component will read input", - "title": "From" - }, - "name": { - "description": "Component name", - "title": "Name", - "type": "string" - }, - "namespace": { - "description": "Kubernetes namespace in which the component shall be deployed", - "title": "Namespace", - "type": "string" + "description": "Kubernetes event-driven autoscaling config" }, - "prefix": { - "default": "${pipeline.name}-", - "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.", - "title": "Prefix", + "imageTag": { + "default": "latest", + "description": "Docker image tag of the streams-bootstrap app.", + "pattern": "^[a-zA-Z0-9_][a-zA-Z0-9._-]{0,127}$", + "title": "Imagetag", "type": "string" }, - "repo_config": { + "kafka": { "allOf": [ { - "$ref": "#/$defs/HelmRepoConfig" + "$ref": "#/$defs/kpops__components__streams_bootstrap__streams__model__StreamsConfig" } ], - "default": { - "repo_auth_flags": { - "ca_file": null, - "cert_file": null, - "insecure_skip_tls_verify": false, - "password": null, - "username": null - }, + "description": "streams-bootstrap kafka section" + }, + "nameOverride": { + "anyOf": [ + { + "maxLength": 63, + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Helm chart name override, assigned automatically", + "title": "Nameoverride" + }, + "persistence": { + "allOf": [ + { + "$ref": "#/$defs/PersistenceConfig" + } + ], + "default": { + "enabled": false, + "size": null, + "storage_class": null + }, + "description": "" + }, + "statefulSet": { + "default": false, + "description": "Whether to use a Statefulset instead of a Deployment to deploy the streams app.", + "title": "Statefulset", + "type": "boolean" + } + }, + "required": [ + "kafka" + ], + "title": "StreamsAppValues", + "type": "object" + }, + "StreamsBootstrap": { + "additionalProperties": true, + "description": "Base for components with a streams-bootstrap Helm chart.", + "properties": { + "from": { + "anyOf": [ + { + "$ref": "#/$defs/FromSection" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Topic(s) and/or components from which the component will read input", + "title": "From" + }, + "name": { + "description": "Component name", + "title": "Name", + "type": "string" + }, + "namespace": { + "description": "Kubernetes namespace in which the component shall be deployed", + "title": "Namespace", + "type": "string" + }, + "prefix": { + "default": "${pipeline.name}-", + "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.", + "title": "Prefix", + "type": "string" + }, + "repo_config": { + "allOf": [ + { + "$ref": "#/$defs/HelmRepoConfig" + } + ], + "default": { + "repo_auth_flags": { + "ca_file": null, + "cert_file": null, + "insecure_skip_tls_verify": false, + "password": null, + "username": null + }, "repository_name": "bakdata-streams-bootstrap", "url": "https://bakdata.github.io/streams-bootstrap/" }, @@ -1530,13 +1658,13 @@ "values": { "allOf": [ { - "$ref": "#/$defs/StreamsBootstrapV3Values" + "$ref": "#/$defs/StreamsBootstrapValues" } ], "description": "streams-bootstrap Helm values" }, "version": { - "default": "3.0.0", + "default": "3.0.1", "description": "Helm chart version", "pattern": "^(\\d+)\\.(\\d+)\\.(\\d+)(-[a-zA-Z]+(\\.[a-zA-Z]+)?)?$", "title": "Version", @@ -1548,56 +1676,110 @@ "namespace", "values" ], - "title": "StreamsBootstrapV3", + "title": "StreamsBootstrap", "type": "object" }, - "StreamsBootstrapV3Values": { + "StreamsBootstrapV2": { "additionalProperties": true, - "description": "Base value class for all streams bootstrap related components.", + "description": "Base for components with a streams-bootstrap-v2 Helm chart.", "properties": { - "imageTag": { - "default": "latest", - "description": "Docker image tag of the streams-bootstrap app.", - "pattern": "^[a-zA-Z0-9_][a-zA-Z0-9._-]{0,127}$", - "title": "Imagetag", + "from": { + "anyOf": [ + { + "$ref": "#/$defs/FromSection" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Topic(s) and/or components from which the component will read input", + "title": "From" + }, + "name": { + "description": "Component name", + "title": "Name", "type": "string" }, - "kafka": { + "namespace": { + "description": "Kubernetes namespace in which the component shall be deployed", + "title": "Namespace", + "type": "string" + }, + "prefix": { + "default": "${pipeline.name}-", + "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.", + "title": "Prefix", + "type": "string" + }, + "repo_config": { "allOf": [ { - "$ref": "#/$defs/KafkaConfig" + "$ref": "#/$defs/HelmRepoConfig" } ], - "description": "Kafka configuration for the streams-bootstrap app." + "default": { + "repo_auth_flags": { + "ca_file": null, + "cert_file": null, + "insecure_skip_tls_verify": false, + "password": null, + "username": null + }, + "repository_name": "bakdata-streams-bootstrap", + "url": "https://bakdata.github.io/streams-bootstrap/" + }, + "description": "Configuration of the Helm chart repo to be used for deploying the component" }, - "nameOverride": { + "to": { "anyOf": [ { - "maxLength": 63, - "type": "string" + "$ref": "#/$defs/ToSection" }, { "type": "null" } ], "default": null, - "description": "Helm chart name override, assigned automatically", - "title": "Nameoverride" + "description": "Topic(s) into which the component will write output" + }, + "values": { + "allOf": [ + { + "$ref": "#/$defs/StreamsBootstrapV2Values" + } + ], + "description": "streams-bootstrap-v2 Helm values" + }, + "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": "2.9.0", + "description": "Helm chart version", + "title": "Version" } }, "required": [ - "kafka" + "name", + "namespace", + "values" ], - "title": "StreamsBootstrapV3Values", + "title": "StreamsBootstrapV2", "type": "object" }, - "StreamsBootstrapValues": { + "StreamsBootstrapV2Values": { "additionalProperties": true, - "description": "Base value class for all streams bootstrap related components.", + "description": "Base value class for all streams bootstrap v2 related components.", "properties": { "imageTag": { "default": "latest", - "description": "Docker image tag of the streams-bootstrap app.", + "description": "Docker image tag of the streams-bootstrap-v2 app.", "pattern": "^[a-zA-Z0-9_][a-zA-Z0-9._-]{0,127}$", "title": "Imagetag", "type": "string" @@ -1628,6 +1810,46 @@ "required": [ "streams" ], + "title": "StreamsBootstrapV2Values", + "type": "object" + }, + "StreamsBootstrapValues": { + "additionalProperties": true, + "description": "Base value class for all streams bootstrap related components.", + "properties": { + "imageTag": { + "default": "latest", + "description": "Docker image tag of the streams-bootstrap app.", + "pattern": "^[a-zA-Z0-9_][a-zA-Z0-9._-]{0,127}$", + "title": "Imagetag", + "type": "string" + }, + "kafka": { + "allOf": [ + { + "$ref": "#/$defs/KafkaConfig" + } + ], + "description": "Kafka configuration for the streams-bootstrap app." + }, + "nameOverride": { + "anyOf": [ + { + "maxLength": 63, + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Helm chart name override, assigned automatically", + "title": "Nameoverride" + } + }, + "required": [ + "kafka" + ], "title": "StreamsBootstrapValues", "type": "object" }, @@ -1759,62 +1981,18 @@ "title": "TopicConfig", "type": "object" }, - "kpops__components__streams_bootstrap__producer__model__ProducerAppValues": { - "additionalProperties": true, - "description": "Settings specific to producers.", - "properties": { - "imageTag": { - "default": "latest", - "description": "Docker image tag of the streams-bootstrap app.", - "pattern": "^[a-zA-Z0-9_][a-zA-Z0-9._-]{0,127}$", - "title": "Imagetag", - "type": "string" - }, - "nameOverride": { - "anyOf": [ - { - "maxLength": 63, - "type": "string" - }, - { - "type": "null" - } - ], - "default": null, - "description": "Helm chart name override, assigned automatically", - "title": "Nameoverride" - }, - "streams": { - "allOf": [ - { - "$ref": "#/$defs/ProducerStreamsConfig" - } - ], - "description": "Kafka Streams settings" - } - }, - "required": [ - "streams" - ], - "title": "ProducerAppValues", - "type": "object" - }, "kpops__components__streams_bootstrap__streams__model__StreamsAppAutoScaling": { "additionalProperties": true, "description": "Kubernetes Event-driven Autoscaling config.", "properties": { - "consumerGroup": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null, - "description": "Name of the consumer group used for checking the offset on the topic and processing the related lag. Mandatory to set when auto-scaling is enabled.", - "title": "Consumer group" + "additionalTriggers": { + "default": [], + "description": "List of additional KEDA triggers, see https://keda.sh/docs/latest/scalers/", + "items": { + "type": "string" + }, + "title": "Additionaltriggers", + "type": "array" }, "cooldownPeriod": { "default": 300, @@ -1824,7 +2002,7 @@ }, "enabled": { "default": false, - "description": "", + "description": "Whether to enable auto-scaling using KEDA.", "title": "Enabled", "type": "boolean" }, @@ -1841,6 +2019,15 @@ "description": "If this property is set, KEDA will scale the resource down to this number of replicas. https://keda.sh/docs/2.9/concepts/scaling-deployments/#idlereplicacount", "title": "Idle replica count" }, + "internalTopics": { + "default": [], + "description": "List of auto-generated Kafka Streams topics used by the streams app", + "items": { + "type": "string" + }, + "title": "Internaltopics", + "type": "array" + }, "lagThreshold": { "anyOf": [ { @@ -1880,7 +2067,7 @@ }, "topics": { "default": [], - "description": "List of auto-generated Kafka Streams topics used by the streams app.", + "description": "List of topics used by the streams app", "items": { "type": "string" }, @@ -1891,33 +2078,13 @@ "title": "StreamsAppAutoScaling", "type": "object" }, - "kpops__components__streams_bootstrap__streams__model__StreamsAppValues": { + "kpops__components__streams_bootstrap__streams__model__StreamsConfig": { "additionalProperties": true, - "description": "streams-bootstrap app configurations.\nThe attributes correspond to keys and values that are used as values for the streams bootstrap helm chart.", + "description": "Streams Bootstrap kafka section.", "properties": { - "autoscaling": { - "anyOf": [ - { - "$ref": "#/$defs/kpops__components__streams_bootstrap__streams__model__StreamsAppAutoScaling" - }, - { - "type": "null" - } - ], - "default": null, - "description": "Kubernetes event-driven autoscaling config" - }, - "imageTag": { - "default": "latest", - "description": "Docker image tag of the streams-bootstrap app.", - "pattern": "^[a-zA-Z0-9_][a-zA-Z0-9._-]{0,127}$", - "title": "Imagetag", - "type": "string" - }, - "nameOverride": { + "applicationId": { "anyOf": [ { - "maxLength": 63, "type": "string" }, { @@ -1925,50 +2092,12 @@ } ], "default": null, - "description": "Helm chart name override, assigned automatically", - "title": "Nameoverride" - }, - "persistence": { - "allOf": [ - { - "$ref": "#/$defs/PersistenceConfig" - } - ], - "default": { - "enabled": false, - "size": null, - "storage_class": null - }, - "description": "" - }, - "statefulSet": { - "default": false, - "description": "Whether to use a Statefulset instead of a Deployment to deploy the streams app.", - "title": "Statefulset", - "type": "boolean" + "description": "Unique application ID for Kafka Streams. Required for auto-scaling", + "title": "Unique application ID" }, - "streams": { - "allOf": [ - { - "$ref": "#/$defs/kpops__components__streams_bootstrap__streams__model__StreamsConfig" - } - ], - "description": "streams-bootstrap streams section" - } - }, - "required": [ - "streams" - ], - "title": "StreamsAppValues", - "type": "object" - }, - "kpops__components__streams_bootstrap__streams__model__StreamsConfig": { - "additionalProperties": true, - "description": "Streams Bootstrap streams section.", - "properties": { - "brokers": { + "bootstrapServers": { "description": "Brokers", - "title": "Brokers", + "title": "Bootstrapservers", "type": "string" }, "config": { @@ -1980,17 +2109,29 @@ "deleteOutput": { "anyOf": [ { - "type": "boolean" + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Whether the output topics with their associated schemas and the consumer group should be deleted during the cleanup", + "title": "Deleteoutput" + }, + "errorTopic": { + "anyOf": [ + { + "type": "string" }, { "type": "null" } ], "default": null, - "description": "Whether the output topics with their associated schemas and the consumer group should be deleted during the cleanup", - "title": "Deleteoutput" + "description": "Error topic" }, - "errorTopic": { + "inputPattern": { "anyOf": [ { "type": "string" @@ -2000,18 +2141,28 @@ } ], "default": null, - "description": "Error topic" + "description": "Input pattern", + "title": "Inputpattern" }, - "extraInputPatterns": { + "inputTopics": { + "default": [], + "description": "Input topics", + "items": { + "type": "string" + }, + "title": "Inputtopics", + "type": "array" + }, + "labeledInputPatterns": { "additionalProperties": { "type": "string" }, "default": {}, "description": "Extra input patterns", - "title": "Extrainputpatterns", + "title": "Labeledinputpatterns", "type": "object" }, - "extraInputTopics": { + "labeledInputTopics": { "additionalProperties": { "items": { "type": "string" @@ -2020,40 +2171,18 @@ }, "default": {}, "description": "Extra input topics", - "title": "Extrainputtopics", + "title": "Labeledinputtopics", "type": "object" }, - "extraOutputTopics": { + "labeledOutputTopics": { "additionalProperties": { "type": "string" }, "default": {}, "description": "Extra output topics", - "title": "Extraoutputtopics", + "title": "Labeledoutputtopics", "type": "object" }, - "inputPattern": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null, - "description": "Input pattern", - "title": "Inputpattern" - }, - "inputTopics": { - "default": [], - "description": "Input topics", - "items": { - "type": "string" - }, - "title": "Inputtopics", - "type": "array" - }, "outputTopic": { "anyOf": [ { @@ -2081,34 +2210,18 @@ } }, "required": [ - "brokers" + "bootstrapServers" ], "title": "StreamsConfig", "type": "object" }, - "kpops__components__streams_bootstrap_v3__producer__model__ProducerAppValues": { + "kpops__components__streams_bootstrap_v2__streams__model__StreamsAppAutoScaling": { "additionalProperties": true, - "description": "Settings specific to producers.", + "description": "Kubernetes Event-driven Autoscaling config.", "properties": { - "imageTag": { - "default": "latest", - "description": "Docker image tag of the streams-bootstrap app.", - "pattern": "^[a-zA-Z0-9_][a-zA-Z0-9._-]{0,127}$", - "title": "Imagetag", - "type": "string" - }, - "kafka": { - "allOf": [ - { - "$ref": "#/$defs/ProducerConfig" - } - ], - "description": "Kafka Streams settings" - }, - "nameOverride": { + "consumerGroup": { "anyOf": [ { - "maxLength": 63, "type": "string" }, { @@ -2116,28 +2229,8 @@ } ], "default": null, - "description": "Helm chart name override, assigned automatically", - "title": "Nameoverride" - } - }, - "required": [ - "kafka" - ], - "title": "ProducerAppValues", - "type": "object" - }, - "kpops__components__streams_bootstrap_v3__streams__model__StreamsAppAutoScaling": { - "additionalProperties": true, - "description": "Kubernetes Event-driven Autoscaling config.", - "properties": { - "additionalTriggers": { - "default": [], - "description": "List of additional KEDA triggers, see https://keda.sh/docs/latest/scalers/", - "items": { - "type": "string" - }, - "title": "Additionaltriggers", - "type": "array" + "description": "Name of the consumer group used for checking the offset on the topic and processing the related lag. Mandatory to set when auto-scaling is enabled.", + "title": "Consumer group" }, "cooldownPeriod": { "default": 300, @@ -2147,7 +2240,7 @@ }, "enabled": { "default": false, - "description": "Whether to enable auto-scaling using KEDA.", + "description": "", "title": "Enabled", "type": "boolean" }, @@ -2164,15 +2257,6 @@ "description": "If this property is set, KEDA will scale the resource down to this number of replicas. https://keda.sh/docs/2.9/concepts/scaling-deployments/#idlereplicacount", "title": "Idle replica count" }, - "internalTopics": { - "default": [], - "description": "List of auto-generated Kafka Streams topics used by the streams app", - "items": { - "type": "string" - }, - "title": "Internaltopics", - "type": "array" - }, "lagThreshold": { "anyOf": [ { @@ -2212,7 +2296,7 @@ }, "topics": { "default": [], - "description": "List of topics used by the streams app", + "description": "List of auto-generated Kafka Streams topics used by the streams app.", "items": { "type": "string" }, @@ -2223,97 +2307,13 @@ "title": "StreamsAppAutoScaling", "type": "object" }, - "kpops__components__streams_bootstrap_v3__streams__model__StreamsAppValues": { - "additionalProperties": true, - "description": "streams-bootstrap app configurations.\nThe attributes correspond to keys and values that are used as values for the streams bootstrap helm chart.", - "properties": { - "autoscaling": { - "anyOf": [ - { - "$ref": "#/$defs/kpops__components__streams_bootstrap_v3__streams__model__StreamsAppAutoScaling" - }, - { - "type": "null" - } - ], - "default": null, - "description": "Kubernetes event-driven autoscaling config" - }, - "imageTag": { - "default": "latest", - "description": "Docker image tag of the streams-bootstrap app.", - "pattern": "^[a-zA-Z0-9_][a-zA-Z0-9._-]{0,127}$", - "title": "Imagetag", - "type": "string" - }, - "kafka": { - "allOf": [ - { - "$ref": "#/$defs/kpops__components__streams_bootstrap_v3__streams__model__StreamsConfig" - } - ], - "description": "streams-bootstrap kafka section" - }, - "nameOverride": { - "anyOf": [ - { - "maxLength": 63, - "type": "string" - }, - { - "type": "null" - } - ], - "default": null, - "description": "Helm chart name override, assigned automatically", - "title": "Nameoverride" - }, - "persistence": { - "allOf": [ - { - "$ref": "#/$defs/PersistenceConfig" - } - ], - "default": { - "enabled": false, - "size": null, - "storage_class": null - }, - "description": "" - }, - "statefulSet": { - "default": false, - "description": "Whether to use a Statefulset instead of a Deployment to deploy the streams app.", - "title": "Statefulset", - "type": "boolean" - } - }, - "required": [ - "kafka" - ], - "title": "StreamsAppValues", - "type": "object" - }, - "kpops__components__streams_bootstrap_v3__streams__model__StreamsConfig": { + "kpops__components__streams_bootstrap_v2__streams__model__StreamsConfig": { "additionalProperties": true, - "description": "Streams Bootstrap kafka section.", + "description": "Streams Bootstrap streams section.", "properties": { - "applicationId": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null, - "description": "Unique application ID for Kafka Streams. Required for auto-scaling", - "title": "Unique application ID" - }, - "bootstrapServers": { + "brokers": { "description": "Brokers", - "title": "Bootstrapservers", + "title": "Brokers", "type": "string" }, "config": { @@ -2347,38 +2347,16 @@ "default": null, "description": "Error topic" }, - "inputPattern": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null, - "description": "Input pattern", - "title": "Inputpattern" - }, - "inputTopics": { - "default": [], - "description": "Input topics", - "items": { - "type": "string" - }, - "title": "Inputtopics", - "type": "array" - }, - "labeledInputPatterns": { + "extraInputPatterns": { "additionalProperties": { "type": "string" }, "default": {}, "description": "Extra input patterns", - "title": "Labeledinputpatterns", + "title": "Extrainputpatterns", "type": "object" }, - "labeledInputTopics": { + "extraInputTopics": { "additionalProperties": { "items": { "type": "string" @@ -2387,18 +2365,40 @@ }, "default": {}, "description": "Extra input topics", - "title": "Labeledinputtopics", + "title": "Extrainputtopics", "type": "object" }, - "labeledOutputTopics": { + "extraOutputTopics": { "additionalProperties": { "type": "string" }, "default": {}, "description": "Extra output topics", - "title": "Labeledoutputtopics", + "title": "Extraoutputtopics", "type": "object" }, + "inputPattern": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Input pattern", + "title": "Inputpattern" + }, + "inputTopics": { + "default": [], + "description": "Input topics", + "items": { + "type": "string" + }, + "title": "Inputtopics", + "type": "array" + }, "outputTopic": { "anyOf": [ { @@ -2426,7 +2426,7 @@ } }, "required": [ - "bootstrapServers" + "brokers" ], "title": "StreamsConfig", "type": "object" @@ -2457,20 +2457,20 @@ "producer-app": { "$ref": "#/$defs/ProducerApp" }, - "producer-app-v3": { - "$ref": "#/$defs/ProducerAppV3" + "producer-app-v2": { + "$ref": "#/$defs/ProducerAppV2" }, "streams-app": { "$ref": "#/$defs/StreamsApp" }, - "streams-app-v3": { - "$ref": "#/$defs/StreamsAppV3" + "streams-app-v2": { + "$ref": "#/$defs/StreamsAppV2" }, "streams-bootstrap": { "$ref": "#/$defs/StreamsBootstrap" }, - "streams-bootstrap-v3": { - "$ref": "#/$defs/StreamsBootstrapV3" + "streams-bootstrap-v2": { + "$ref": "#/$defs/StreamsBootstrapV2" } }, "required": [ @@ -2484,9 +2484,9 @@ "producer-app", "streams-app", "streams-bootstrap", - "producer-app-v3", - "streams-app-v3", - "streams-bootstrap-v3" + "producer-app-v2", + "streams-app-v2", + "streams-bootstrap-v2" ], "title": "DefaultsSchema", "type": "object" diff --git a/docs/docs/schema/pipeline.json b/docs/docs/schema/pipeline.json index c5dbac2af..29bcc0dc6 100644 --- a/docs/docs/schema/pipeline.json +++ b/docs/docs/schema/pipeline.json @@ -556,23 +556,17 @@ "values": { "allOf": [ { - "$ref": "#/$defs/kpops__components__streams_bootstrap__producer__model__ProducerAppValues" + "$ref": "#/$defs/ProducerAppValues" } ], "description": "streams-bootstrap Helm values" }, "version": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": "2.9.0", + "default": "3.0.1", "description": "Helm chart version", - "title": "Version" + "pattern": "^(\\d+)\\.(\\d+)\\.(\\d+)(-[a-zA-Z]+(\\.[a-zA-Z]+)?)?$", + "title": "Version", + "type": "string" } }, "required": [ @@ -584,7 +578,7 @@ "title": "ProducerApp", "type": "object" }, - "ProducerAppV3": { + "ProducerAppV2": { "additionalProperties": true, "description": "Producer component.\nThis producer holds configuration to use as values for the streams-bootstrap producer Helm chart. Note that the producer does not support error topics.", "properties": { @@ -642,23 +636,29 @@ "description": "Topic(s) into which the component will write output" }, "type": { - "const": "producer-app-v3", + "const": "producer-app-v2", "title": "Type" }, "values": { "allOf": [ { - "$ref": "#/$defs/kpops__components__streams_bootstrap_v3__producer__model__ProducerAppValues" + "$ref": "#/$defs/ProducerAppV2Values" } ], "description": "streams-bootstrap Helm values" }, "version": { - "default": "3.0.0", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": "2.9.0", "description": "Helm chart version", - "pattern": "^(\\d+)\\.(\\d+)\\.(\\d+)(-[a-zA-Z]+(\\.[a-zA-Z]+)?)?$", - "title": "Version", - "type": "string" + "title": "Version" } }, "required": [ @@ -667,7 +667,87 @@ "values", "type" ], - "title": "ProducerAppV3", + "title": "ProducerAppV2", + "type": "object" + }, + "ProducerAppV2Values": { + "additionalProperties": true, + "description": "Settings specific to producers.", + "properties": { + "imageTag": { + "default": "latest", + "description": "Docker image tag of the streams-bootstrap-v2 app.", + "pattern": "^[a-zA-Z0-9_][a-zA-Z0-9._-]{0,127}$", + "title": "Imagetag", + "type": "string" + }, + "nameOverride": { + "anyOf": [ + { + "maxLength": 63, + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Helm chart name override, assigned automatically", + "title": "Nameoverride" + }, + "streams": { + "allOf": [ + { + "$ref": "#/$defs/ProducerStreamsConfig" + } + ], + "description": "Kafka Streams settings" + } + }, + "required": [ + "streams" + ], + "title": "ProducerAppV2Values", + "type": "object" + }, + "ProducerAppValues": { + "additionalProperties": true, + "description": "Settings specific to producers.", + "properties": { + "imageTag": { + "default": "latest", + "description": "Docker image tag of the streams-bootstrap app.", + "pattern": "^[a-zA-Z0-9_][a-zA-Z0-9._-]{0,127}$", + "title": "Imagetag", + "type": "string" + }, + "kafka": { + "allOf": [ + { + "$ref": "#/$defs/ProducerConfig" + } + ], + "description": "Kafka Streams settings" + }, + "nameOverride": { + "anyOf": [ + { + "maxLength": 63, + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Helm chart name override, assigned automatically", + "title": "Nameoverride" + } + }, + "required": [ + "kafka" + ], + "title": "ProducerAppValues", "type": "object" }, "ProducerConfig": { @@ -908,23 +988,17 @@ "values": { "allOf": [ { - "$ref": "#/$defs/kpops__components__streams_bootstrap__streams__model__StreamsAppValues" + "$ref": "#/$defs/StreamsAppValues" } ], "description": "streams-bootstrap Helm values" }, "version": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": "2.9.0", + "default": "3.0.1", "description": "Helm chart version", - "title": "Version" + "pattern": "^(\\d+)\\.(\\d+)\\.(\\d+)(-[a-zA-Z]+(\\.[a-zA-Z]+)?)?$", + "title": "Version", + "type": "string" } }, "required": [ @@ -936,9 +1010,9 @@ "title": "StreamsApp", "type": "object" }, - "StreamsAppV3": { + "StreamsAppV2": { "additionalProperties": true, - "description": "StreamsApp component that configures a streams-bootstrap app.", + "description": "StreamsAppV2 component that configures a streams-bootstrap-v2 app.", "properties": { "from": { "anyOf": [ @@ -1001,23 +1075,29 @@ "description": "Topic(s) into which the component will write output" }, "type": { - "const": "streams-app-v3", + "const": "streams-app-v2", "title": "Type" }, "values": { "allOf": [ { - "$ref": "#/$defs/kpops__components__streams_bootstrap_v3__streams__model__StreamsAppValues" + "$ref": "#/$defs/StreamsAppV2Values" } ], - "description": "streams-bootstrap Helm values" + "description": "streams-bootstrap-v2 Helm values" }, "version": { - "default": "3.0.0", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": "2.9.0", "description": "Helm chart version", - "pattern": "^(\\d+)\\.(\\d+)\\.(\\d+)(-[a-zA-Z]+(\\.[a-zA-Z]+)?)?$", - "title": "Version", - "type": "string" + "title": "Version" } }, "required": [ @@ -1026,71 +1106,36 @@ "values", "type" ], - "title": "StreamsAppV3", - "type": "object" - }, - "ToSection": { - "additionalProperties": false, - "description": "Holds multiple output topics.", - "properties": { - "models": { - "additionalProperties": { - "type": "string" - }, - "default": {}, - "description": "Data models", - "title": "Models", - "type": "object" - }, - "topics": { - "additionalProperties": { - "$ref": "#/$defs/TopicConfig" - }, - "default": {}, - "description": "Output topics", - "title": "Topics", - "type": "object" - } - }, - "title": "ToSection", + "title": "StreamsAppV2", "type": "object" }, - "TopicConfig": { - "additionalProperties": false, - "description": "Configure an output topic.", + "StreamsAppV2Values": { + "additionalProperties": true, + "description": "streams-bootstrap-v2 app configurations.\nThe attributes correspond to keys and values that are used as values for the streams bootstrap helm chart.", "properties": { - "configs": { - "additionalProperties": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "integer" - } - ] - }, - "default": {}, - "description": "Topic configs", - "title": "Configs", - "type": "object" - }, - "key_schema": { + "autoscaling": { "anyOf": [ { - "type": "string" + "$ref": "#/$defs/kpops__components__streams_bootstrap_v2__streams__model__StreamsAppAutoScaling" }, { "type": "null" } ], "default": null, - "description": "Key schema class name", - "title": "Key schema" + "description": "Kubernetes event-driven autoscaling config" }, - "label": { + "imageTag": { + "default": "latest", + "description": "Docker image tag of the streams-bootstrap-v2 app.", + "pattern": "^[a-zA-Z0-9_][a-zA-Z0-9._-]{0,127}$", + "title": "Imagetag", + "type": "string" + }, + "nameOverride": { "anyOf": [ { + "maxLength": 63, "type": "string" }, { @@ -1098,49 +1143,174 @@ } ], "default": null, - "description": "Custom identifier belonging to one or multiple topics, provide only if `type` is `extra`", - "title": "Label" + "description": "Helm chart name override, assigned automatically", + "title": "Nameoverride" }, - "partitions_count": { + "persistence": { + "allOf": [ + { + "$ref": "#/$defs/PersistenceConfig" + } + ], + "default": { + "enabled": false, + "size": null, + "storage_class": null + }, + "description": "" + }, + "statefulSet": { + "default": false, + "description": "Whether to use a Statefulset instead of a Deployment to deploy the streams app.", + "title": "Statefulset", + "type": "boolean" + }, + "streams": { + "allOf": [ + { + "$ref": "#/$defs/kpops__components__streams_bootstrap_v2__streams__model__StreamsConfig" + } + ], + "description": "streams-bootstrap-v2 streams section" + } + }, + "required": [ + "streams" + ], + "title": "StreamsAppV2Values", + "type": "object" + }, + "StreamsAppValues": { + "additionalProperties": true, + "description": "streams-bootstrap app configurations.\nThe attributes correspond to keys and values that are used as values for the streams bootstrap helm chart.", + "properties": { + "autoscaling": { "anyOf": [ { - "type": "integer" + "$ref": "#/$defs/kpops__components__streams_bootstrap__streams__model__StreamsAppAutoScaling" }, { "type": "null" } ], "default": null, - "description": "Number of partitions into which the topic is divided", - "title": "Partitions count" + "description": "Kubernetes event-driven autoscaling config" }, - "replication_factor": { + "imageTag": { + "default": "latest", + "description": "Docker image tag of the streams-bootstrap app.", + "pattern": "^[a-zA-Z0-9_][a-zA-Z0-9._-]{0,127}$", + "title": "Imagetag", + "type": "string" + }, + "kafka": { + "allOf": [ + { + "$ref": "#/$defs/kpops__components__streams_bootstrap__streams__model__StreamsConfig" + } + ], + "description": "streams-bootstrap kafka section" + }, + "nameOverride": { "anyOf": [ { - "type": "integer" + "maxLength": 63, + "type": "string" }, { "type": "null" } ], "default": null, - "description": "Replication factor of the topic", - "title": "Replication factor" + "description": "Helm chart name override, assigned automatically", + "title": "Nameoverride" }, - "type": { + "persistence": { + "allOf": [ + { + "$ref": "#/$defs/PersistenceConfig" + } + ], + "default": { + "enabled": false, + "size": null, + "storage_class": null + }, + "description": "" + }, + "statefulSet": { + "default": false, + "description": "Whether to use a Statefulset instead of a Deployment to deploy the streams app.", + "title": "Statefulset", + "type": "boolean" + } + }, + "required": [ + "kafka" + ], + "title": "StreamsAppValues", + "type": "object" + }, + "ToSection": { + "additionalProperties": false, + "description": "Holds multiple output topics.", + "properties": { + "models": { + "additionalProperties": { + "type": "string" + }, + "default": {}, + "description": "Data models", + "title": "Models", + "type": "object" + }, + "topics": { + "additionalProperties": { + "$ref": "#/$defs/TopicConfig" + }, + "default": {}, + "description": "Output topics", + "title": "Topics", + "type": "object" + } + }, + "title": "ToSection", + "type": "object" + }, + "TopicConfig": { + "additionalProperties": false, + "description": "Configure an output topic.", + "properties": { + "configs": { + "additionalProperties": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "integer" + } + ] + }, + "default": {}, + "description": "Topic configs", + "title": "Configs", + "type": "object" + }, + "key_schema": { "anyOf": [ { - "$ref": "#/$defs/OutputTopicTypes" + "type": "string" }, { "type": "null" } ], "default": null, - "description": "Topic type", - "title": "Topic type" + "description": "Key schema class name", + "title": "Key schema" }, - "value_schema": { + "label": { "anyOf": [ { "type": "string" @@ -1150,58 +1320,49 @@ } ], "default": null, - "description": "Value schema class name", - "title": "Value schema" - } - }, - "title": "TopicConfig", - "type": "object" - }, - "kpops__components__streams_bootstrap__producer__model__ProducerAppValues": { - "additionalProperties": true, - "description": "Settings specific to producers.", - "properties": { - "imageTag": { - "default": "latest", - "description": "Docker image tag of the streams-bootstrap app.", - "pattern": "^[a-zA-Z0-9_][a-zA-Z0-9._-]{0,127}$", - "title": "Imagetag", - "type": "string" + "description": "Custom identifier belonging to one or multiple topics, provide only if `type` is `extra`", + "title": "Label" }, - "nameOverride": { + "partitions_count": { "anyOf": [ { - "maxLength": 63, - "type": "string" + "type": "integer" }, { "type": "null" } ], "default": null, - "description": "Helm chart name override, assigned automatically", - "title": "Nameoverride" + "description": "Number of partitions into which the topic is divided", + "title": "Partitions count" }, - "streams": { - "allOf": [ + "replication_factor": { + "anyOf": [ { - "$ref": "#/$defs/ProducerStreamsConfig" + "type": "integer" + }, + { + "type": "null" } ], - "description": "Kafka Streams settings" - } - }, - "required": [ - "streams" - ], - "title": "ProducerAppValues", - "type": "object" - }, - "kpops__components__streams_bootstrap__streams__model__StreamsAppAutoScaling": { - "additionalProperties": true, - "description": "Kubernetes Event-driven Autoscaling config.", - "properties": { - "consumerGroup": { + "default": null, + "description": "Replication factor of the topic", + "title": "Replication factor" + }, + "type": { + "anyOf": [ + { + "$ref": "#/$defs/OutputTopicTypes" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Topic type", + "title": "Topic type" + }, + "value_schema": { "anyOf": [ { "type": "string" @@ -1211,8 +1372,25 @@ } ], "default": null, - "description": "Name of the consumer group used for checking the offset on the topic and processing the related lag. Mandatory to set when auto-scaling is enabled.", - "title": "Consumer group" + "description": "Value schema class name", + "title": "Value schema" + } + }, + "title": "TopicConfig", + "type": "object" + }, + "kpops__components__streams_bootstrap__streams__model__StreamsAppAutoScaling": { + "additionalProperties": true, + "description": "Kubernetes Event-driven Autoscaling config.", + "properties": { + "additionalTriggers": { + "default": [], + "description": "List of additional KEDA triggers, see https://keda.sh/docs/latest/scalers/", + "items": { + "type": "string" + }, + "title": "Additionaltriggers", + "type": "array" }, "cooldownPeriod": { "default": 300, @@ -1222,7 +1400,7 @@ }, "enabled": { "default": false, - "description": "", + "description": "Whether to enable auto-scaling using KEDA.", "title": "Enabled", "type": "boolean" }, @@ -1239,6 +1417,15 @@ "description": "If this property is set, KEDA will scale the resource down to this number of replicas. https://keda.sh/docs/2.9/concepts/scaling-deployments/#idlereplicacount", "title": "Idle replica count" }, + "internalTopics": { + "default": [], + "description": "List of auto-generated Kafka Streams topics used by the streams app", + "items": { + "type": "string" + }, + "title": "Internaltopics", + "type": "array" + }, "lagThreshold": { "anyOf": [ { @@ -1278,7 +1465,7 @@ }, "topics": { "default": [], - "description": "List of auto-generated Kafka Streams topics used by the streams app.", + "description": "List of topics used by the streams app", "items": { "type": "string" }, @@ -1289,33 +1476,13 @@ "title": "StreamsAppAutoScaling", "type": "object" }, - "kpops__components__streams_bootstrap__streams__model__StreamsAppValues": { + "kpops__components__streams_bootstrap__streams__model__StreamsConfig": { "additionalProperties": true, - "description": "streams-bootstrap app configurations.\nThe attributes correspond to keys and values that are used as values for the streams bootstrap helm chart.", + "description": "Streams Bootstrap kafka section.", "properties": { - "autoscaling": { - "anyOf": [ - { - "$ref": "#/$defs/kpops__components__streams_bootstrap__streams__model__StreamsAppAutoScaling" - }, - { - "type": "null" - } - ], - "default": null, - "description": "Kubernetes event-driven autoscaling config" - }, - "imageTag": { - "default": "latest", - "description": "Docker image tag of the streams-bootstrap app.", - "pattern": "^[a-zA-Z0-9_][a-zA-Z0-9._-]{0,127}$", - "title": "Imagetag", - "type": "string" - }, - "nameOverride": { + "applicationId": { "anyOf": [ { - "maxLength": 63, "type": "string" }, { @@ -1323,50 +1490,12 @@ } ], "default": null, - "description": "Helm chart name override, assigned automatically", - "title": "Nameoverride" - }, - "persistence": { - "allOf": [ - { - "$ref": "#/$defs/PersistenceConfig" - } - ], - "default": { - "enabled": false, - "size": null, - "storage_class": null - }, - "description": "" - }, - "statefulSet": { - "default": false, - "description": "Whether to use a Statefulset instead of a Deployment to deploy the streams app.", - "title": "Statefulset", - "type": "boolean" + "description": "Unique application ID for Kafka Streams. Required for auto-scaling", + "title": "Unique application ID" }, - "streams": { - "allOf": [ - { - "$ref": "#/$defs/kpops__components__streams_bootstrap__streams__model__StreamsConfig" - } - ], - "description": "streams-bootstrap streams section" - } - }, - "required": [ - "streams" - ], - "title": "StreamsAppValues", - "type": "object" - }, - "kpops__components__streams_bootstrap__streams__model__StreamsConfig": { - "additionalProperties": true, - "description": "Streams Bootstrap streams section.", - "properties": { - "brokers": { + "bootstrapServers": { "description": "Brokers", - "title": "Brokers", + "title": "Bootstrapservers", "type": "string" }, "config": { @@ -1378,17 +1507,29 @@ "deleteOutput": { "anyOf": [ { - "type": "boolean" + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Whether the output topics with their associated schemas and the consumer group should be deleted during the cleanup", + "title": "Deleteoutput" + }, + "errorTopic": { + "anyOf": [ + { + "type": "string" }, { "type": "null" } ], "default": null, - "description": "Whether the output topics with their associated schemas and the consumer group should be deleted during the cleanup", - "title": "Deleteoutput" + "description": "Error topic" }, - "errorTopic": { + "inputPattern": { "anyOf": [ { "type": "string" @@ -1398,18 +1539,28 @@ } ], "default": null, - "description": "Error topic" + "description": "Input pattern", + "title": "Inputpattern" }, - "extraInputPatterns": { + "inputTopics": { + "default": [], + "description": "Input topics", + "items": { + "type": "string" + }, + "title": "Inputtopics", + "type": "array" + }, + "labeledInputPatterns": { "additionalProperties": { "type": "string" }, "default": {}, "description": "Extra input patterns", - "title": "Extrainputpatterns", + "title": "Labeledinputpatterns", "type": "object" }, - "extraInputTopics": { + "labeledInputTopics": { "additionalProperties": { "items": { "type": "string" @@ -1418,40 +1569,18 @@ }, "default": {}, "description": "Extra input topics", - "title": "Extrainputtopics", + "title": "Labeledinputtopics", "type": "object" }, - "extraOutputTopics": { + "labeledOutputTopics": { "additionalProperties": { "type": "string" }, "default": {}, "description": "Extra output topics", - "title": "Extraoutputtopics", + "title": "Labeledoutputtopics", "type": "object" }, - "inputPattern": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null, - "description": "Input pattern", - "title": "Inputpattern" - }, - "inputTopics": { - "default": [], - "description": "Input topics", - "items": { - "type": "string" - }, - "title": "Inputtopics", - "type": "array" - }, "outputTopic": { "anyOf": [ { @@ -1479,34 +1608,18 @@ } }, "required": [ - "brokers" + "bootstrapServers" ], "title": "StreamsConfig", "type": "object" }, - "kpops__components__streams_bootstrap_v3__producer__model__ProducerAppValues": { + "kpops__components__streams_bootstrap_v2__streams__model__StreamsAppAutoScaling": { "additionalProperties": true, - "description": "Settings specific to producers.", + "description": "Kubernetes Event-driven Autoscaling config.", "properties": { - "imageTag": { - "default": "latest", - "description": "Docker image tag of the streams-bootstrap app.", - "pattern": "^[a-zA-Z0-9_][a-zA-Z0-9._-]{0,127}$", - "title": "Imagetag", - "type": "string" - }, - "kafka": { - "allOf": [ - { - "$ref": "#/$defs/ProducerConfig" - } - ], - "description": "Kafka Streams settings" - }, - "nameOverride": { + "consumerGroup": { "anyOf": [ { - "maxLength": 63, "type": "string" }, { @@ -1514,28 +1627,8 @@ } ], "default": null, - "description": "Helm chart name override, assigned automatically", - "title": "Nameoverride" - } - }, - "required": [ - "kafka" - ], - "title": "ProducerAppValues", - "type": "object" - }, - "kpops__components__streams_bootstrap_v3__streams__model__StreamsAppAutoScaling": { - "additionalProperties": true, - "description": "Kubernetes Event-driven Autoscaling config.", - "properties": { - "additionalTriggers": { - "default": [], - "description": "List of additional KEDA triggers, see https://keda.sh/docs/latest/scalers/", - "items": { - "type": "string" - }, - "title": "Additionaltriggers", - "type": "array" + "description": "Name of the consumer group used for checking the offset on the topic and processing the related lag. Mandatory to set when auto-scaling is enabled.", + "title": "Consumer group" }, "cooldownPeriod": { "default": 300, @@ -1545,7 +1638,7 @@ }, "enabled": { "default": false, - "description": "Whether to enable auto-scaling using KEDA.", + "description": "", "title": "Enabled", "type": "boolean" }, @@ -1562,15 +1655,6 @@ "description": "If this property is set, KEDA will scale the resource down to this number of replicas. https://keda.sh/docs/2.9/concepts/scaling-deployments/#idlereplicacount", "title": "Idle replica count" }, - "internalTopics": { - "default": [], - "description": "List of auto-generated Kafka Streams topics used by the streams app", - "items": { - "type": "string" - }, - "title": "Internaltopics", - "type": "array" - }, "lagThreshold": { "anyOf": [ { @@ -1610,7 +1694,7 @@ }, "topics": { "default": [], - "description": "List of topics used by the streams app", + "description": "List of auto-generated Kafka Streams topics used by the streams app.", "items": { "type": "string" }, @@ -1621,97 +1705,13 @@ "title": "StreamsAppAutoScaling", "type": "object" }, - "kpops__components__streams_bootstrap_v3__streams__model__StreamsAppValues": { - "additionalProperties": true, - "description": "streams-bootstrap app configurations.\nThe attributes correspond to keys and values that are used as values for the streams bootstrap helm chart.", - "properties": { - "autoscaling": { - "anyOf": [ - { - "$ref": "#/$defs/kpops__components__streams_bootstrap_v3__streams__model__StreamsAppAutoScaling" - }, - { - "type": "null" - } - ], - "default": null, - "description": "Kubernetes event-driven autoscaling config" - }, - "imageTag": { - "default": "latest", - "description": "Docker image tag of the streams-bootstrap app.", - "pattern": "^[a-zA-Z0-9_][a-zA-Z0-9._-]{0,127}$", - "title": "Imagetag", - "type": "string" - }, - "kafka": { - "allOf": [ - { - "$ref": "#/$defs/kpops__components__streams_bootstrap_v3__streams__model__StreamsConfig" - } - ], - "description": "streams-bootstrap kafka section" - }, - "nameOverride": { - "anyOf": [ - { - "maxLength": 63, - "type": "string" - }, - { - "type": "null" - } - ], - "default": null, - "description": "Helm chart name override, assigned automatically", - "title": "Nameoverride" - }, - "persistence": { - "allOf": [ - { - "$ref": "#/$defs/PersistenceConfig" - } - ], - "default": { - "enabled": false, - "size": null, - "storage_class": null - }, - "description": "" - }, - "statefulSet": { - "default": false, - "description": "Whether to use a Statefulset instead of a Deployment to deploy the streams app.", - "title": "Statefulset", - "type": "boolean" - } - }, - "required": [ - "kafka" - ], - "title": "StreamsAppValues", - "type": "object" - }, - "kpops__components__streams_bootstrap_v3__streams__model__StreamsConfig": { + "kpops__components__streams_bootstrap_v2__streams__model__StreamsConfig": { "additionalProperties": true, - "description": "Streams Bootstrap kafka section.", + "description": "Streams Bootstrap streams section.", "properties": { - "applicationId": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null, - "description": "Unique application ID for Kafka Streams. Required for auto-scaling", - "title": "Unique application ID" - }, - "bootstrapServers": { + "brokers": { "description": "Brokers", - "title": "Bootstrapservers", + "title": "Brokers", "type": "string" }, "config": { @@ -1745,38 +1745,16 @@ "default": null, "description": "Error topic" }, - "inputPattern": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null, - "description": "Input pattern", - "title": "Inputpattern" - }, - "inputTopics": { - "default": [], - "description": "Input topics", - "items": { - "type": "string" - }, - "title": "Inputtopics", - "type": "array" - }, - "labeledInputPatterns": { + "extraInputPatterns": { "additionalProperties": { "type": "string" }, "default": {}, "description": "Extra input patterns", - "title": "Labeledinputpatterns", + "title": "Extrainputpatterns", "type": "object" }, - "labeledInputTopics": { + "extraInputTopics": { "additionalProperties": { "items": { "type": "string" @@ -1785,18 +1763,40 @@ }, "default": {}, "description": "Extra input topics", - "title": "Labeledinputtopics", + "title": "Extrainputtopics", "type": "object" }, - "labeledOutputTopics": { + "extraOutputTopics": { "additionalProperties": { "type": "string" }, "default": {}, "description": "Extra output topics", - "title": "Labeledoutputtopics", + "title": "Extraoutputtopics", "type": "object" }, + "inputPattern": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Input pattern", + "title": "Inputpattern" + }, + "inputTopics": { + "default": [], + "description": "Input topics", + "items": { + "type": "string" + }, + "title": "Inputtopics", + "type": "array" + }, "outputTopic": { "anyOf": [ { @@ -1824,7 +1824,7 @@ } }, "required": [ - "bootstrapServers" + "brokers" ], "title": "StreamsConfig", "type": "object" @@ -1837,9 +1837,9 @@ "kafka-sink-connector": "#/$defs/KafkaSinkConnector", "kafka-source-connector": "#/$defs/KafkaSourceConnector", "producer-app": "#/$defs/ProducerApp", - "producer-app-v3": "#/$defs/ProducerAppV3", + "producer-app-v2": "#/$defs/ProducerAppV2", "streams-app": "#/$defs/StreamsApp", - "streams-app-v3": "#/$defs/StreamsAppV3" + "streams-app-v2": "#/$defs/StreamsAppV2" }, "propertyName": "type" }, @@ -1860,10 +1860,10 @@ "$ref": "#/$defs/StreamsApp" }, { - "$ref": "#/$defs/ProducerAppV3" + "$ref": "#/$defs/ProducerAppV2" }, { - "$ref": "#/$defs/StreamsAppV3" + "$ref": "#/$defs/StreamsAppV2" } ] }, diff --git a/examples b/examples index 608637744..5ea436622 160000 --- a/examples +++ b/examples @@ -1 +1 @@ -Subproject commit 60863774413b2eb08bb5d6020432c7f29a944383 +Subproject commit 5ea436622c3ca857a44762a9b5dd5de678fdc1d4 diff --git a/kpops/components/base_components/kafka_app.py b/kpops/components/base_components/kafka_app.py index 2d43cba65..83ae82a3a 100644 --- a/kpops/components/base_components/kafka_app.py +++ b/kpops/components/base_components/kafka_app.py @@ -33,7 +33,7 @@ async def clean(self, dry_run: bool) -> None: log.info(f"Uninstall old cleanup job for {self.helm_release_name}") await self.destroy(dry_run) - log.info(f"Init cleanup job for {self.helm_release_name}") + log.info(f"Deploy cleanup job for {self.helm_release_name}") await self.deploy(dry_run) if not get_config().retain_clean_jobs: diff --git a/kpops/components/streams_bootstrap/__init__.py b/kpops/components/streams_bootstrap/__init__.py index 19341d74b..f900f809b 100644 --- a/kpops/components/streams_bootstrap/__init__.py +++ b/kpops/components/streams_bootstrap/__init__.py @@ -1,10 +1,6 @@ -from kpops.components.common.streams_bootstrap import StreamsBootstrap +from kpops.components.streams_bootstrap.base import StreamsBootstrap from .producer.producer_app import ProducerApp from .streams.streams_app import StreamsApp -__all__ = ( - "StreamsBootstrap", - "StreamsApp", - "ProducerApp", -) +__all__ = ("StreamsBootstrap", "StreamsApp", "ProducerApp") diff --git a/kpops/components/streams_bootstrap_v3/base.py b/kpops/components/streams_bootstrap/base.py similarity index 84% rename from kpops/components/streams_bootstrap_v3/base.py rename to kpops/components/streams_bootstrap/base.py index 202f17295..c7090e830 100644 --- a/kpops/components/streams_bootstrap_v3/base.py +++ b/kpops/components/streams_bootstrap/base.py @@ -11,7 +11,7 @@ from kpops.component_handlers.helm_wrapper.model import HelmRepoConfig from kpops.components.base_components import KafkaApp from kpops.components.base_components.helm_app import HelmApp -from kpops.components.streams_bootstrap_v3.model import StreamsBootstrapV3Values +from kpops.components.streams_bootstrap.model import StreamsBootstrapValues from kpops.utils.docstring import describe_attr if TYPE_CHECKING: @@ -25,15 +25,14 @@ url="https://bakdata.github.io/streams-bootstrap/", ) -# TODO: Update this with the latest stable version release -STREAMS_BOOTSTRAP_VERSION = "3.0.0" +STREAMS_BOOTSTRAP_VERSION = "3.0.1" STREAMS_BOOTSTRAP_VERSION_PATTERN = r"^(\d+)\.(\d+)\.(\d+)(-[a-zA-Z]+(\.[a-zA-Z]+)?)?$" COMPILED_VERSION_PATTERN = re.compile(STREAMS_BOOTSTRAP_VERSION_PATTERN) -log = logging.getLogger("StreamsBootstrapV3") +log = logging.getLogger("StreamsBootstrap") -class StreamsBootstrapV3(KafkaApp, HelmApp, ABC): +class StreamsBootstrap(KafkaApp, HelmApp, ABC): """Base for components with a streams-bootstrap Helm chart. :param values: streams-bootstrap Helm values @@ -42,7 +41,7 @@ class StreamsBootstrapV3(KafkaApp, HelmApp, ABC): :param version: Helm chart version, defaults to "3.0.0" """ - values: StreamsBootstrapV3Values = Field( + values: StreamsBootstrapValues = Field( description=describe_attr("values", __doc__), ) @@ -70,7 +69,7 @@ def version_validator(cls, version: str) -> str: major = int(major) if major != 3: - msg = f"When using the streams-bootstrap v3 component your version ('{version}') must be at least 3.0.0." + msg = f"When using the streams-bootstrap component your version ('{version}') must be at least 3.0.1." raise ValueError(msg) return version diff --git a/kpops/components/streams_bootstrap_v3/model.py b/kpops/components/streams_bootstrap/model.py similarity index 98% rename from kpops/components/streams_bootstrap_v3/model.py rename to kpops/components/streams_bootstrap/model.py index ee0c2786c..7c4390c02 100644 --- a/kpops/components/streams_bootstrap_v3/model.py +++ b/kpops/components/streams_bootstrap/model.py @@ -19,7 +19,7 @@ IMAGE_TAG_PATTERN = r"^[a-zA-Z0-9_][a-zA-Z0-9._-]{0,127}$" -class StreamsBootstrapV3Values(HelmAppValues): +class StreamsBootstrapValues(HelmAppValues): """Base value class for all streams bootstrap related components. :param image_tag: Docker image tag of the streams-bootstrap app. diff --git a/kpops/components/streams_bootstrap/producer/model.py b/kpops/components/streams_bootstrap/producer/model.py index 359776ea0..acf1019c4 100644 --- a/kpops/components/streams_bootstrap/producer/model.py +++ b/kpops/components/streams_bootstrap/producer/model.py @@ -1,24 +1,22 @@ from pydantic import ConfigDict, Field -from kpops.components.common.streams_bootstrap import ( - KafkaStreamsConfig, +from kpops.components.streams_bootstrap.model import ( + KafkaConfig, StreamsBootstrapValues, ) from kpops.utils.docstring import describe_attr -class ProducerStreamsConfig(KafkaStreamsConfig): +class ProducerConfig(KafkaConfig): """Kafka Streams settings specific to Producer.""" class ProducerAppValues(StreamsBootstrapValues): """Settings specific to producers. - :param streams: Kafka Streams settings + :param kafka: Kafka Streams settings """ - streams: ProducerStreamsConfig = Field( - description=describe_attr("streams", __doc__), - ) + kafka: ProducerConfig = Field(description=describe_attr("kafka", __doc__)) model_config = ConfigDict(extra="allow") diff --git a/kpops/components/streams_bootstrap/producer/producer_app.py b/kpops/components/streams_bootstrap/producer/producer_app.py index 91b6238a9..21cda9ebd 100644 --- a/kpops/components/streams_bootstrap/producer/producer_app.py +++ b/kpops/components/streams_bootstrap/producer/producer_app.py @@ -4,17 +4,16 @@ from pydantic import Field, ValidationError, computed_field from typing_extensions import override -from kpops.components.base_components.kafka_app import ( - KafkaApp, - KafkaAppCleaner, -) +from kpops.components.base_components.kafka_app import KafkaAppCleaner from kpops.components.common.app_type import AppType -from kpops.components.common.streams_bootstrap import StreamsBootstrap from kpops.components.common.topic import ( KafkaTopic, OutputTopicTypes, TopicConfig, ) +from kpops.components.streams_bootstrap.base import ( + StreamsBootstrap, +) from kpops.components.streams_bootstrap.producer.model import ProducerAppValues from kpops.const.file_type import DEFAULTS_YAML, PIPELINE_YAML from kpops.utils.docstring import describe_attr @@ -33,7 +32,7 @@ def helm_chart(self) -> str: ) -class ProducerApp(KafkaApp, StreamsBootstrap): +class ProducerApp(StreamsBootstrap): """Producer component. This producer holds configuration to use as values for the streams-bootstrap @@ -74,20 +73,20 @@ def apply_to_outputs(self, name: str, topic: TopicConfig) -> None: @property @override def output_topic(self) -> KafkaTopic | None: - return self.values.streams.output_topic + return self.values.kafka.output_topic @property @override def extra_output_topics(self) -> dict[str, KafkaTopic]: - return self.values.streams.extra_output_topics + return self.values.kafka.labeled_output_topics @override def set_output_topic(self, topic: KafkaTopic) -> None: - self.values.streams.output_topic = topic + self.values.kafka.output_topic = topic @override def add_extra_output_topic(self, topic: KafkaTopic, label: str) -> None: - self.values.streams.extra_output_topics[label] = topic + self.values.kafka.labeled_output_topics[label] = topic @property @override diff --git a/kpops/components/streams_bootstrap/streams/model.py b/kpops/components/streams_bootstrap/streams/model.py index 85847a4a4..36ac57f5b 100644 --- a/kpops/components/streams_bootstrap/streams/model.py +++ b/kpops/components/streams_bootstrap/streams/model.py @@ -3,14 +3,13 @@ from typing import Any import pydantic -from pydantic import BaseModel, ConfigDict, Field, model_validator +from pydantic import BaseModel, ConfigDict, Field -from kpops.api.exception import ValidationError -from kpops.components.common.streams_bootstrap import ( - KafkaStreamsConfig, +from kpops.components.common.topic import KafkaTopic, KafkaTopicStr +from kpops.components.streams_bootstrap.model import ( + KafkaConfig, StreamsBootstrapValues, ) -from kpops.components.common.topic import KafkaTopic, KafkaTopicStr from kpops.utils.docstring import describe_attr from kpops.utils.pydantic import ( CamelCaseConfigModel, @@ -18,29 +17,35 @@ ) -class StreamsConfig(KafkaStreamsConfig): - """Streams Bootstrap streams section. +class StreamsConfig(KafkaConfig): + """Streams Bootstrap kafka section. + :param application_id: Unique application ID for Kafka Streams. Required for auto-scaling :param input_topics: Input topics, defaults to [] :param input_pattern: Input pattern, defaults to None - :param extra_input_topics: Extra input topics, defaults to {} - :param extra_input_patterns: Extra input patterns, defaults to {} + :param labeled_input_topics: Extra input topics, defaults to {} + :param labeled_input_patterns: Extra input patterns, defaults to {} :param error_topic: Error topic, defaults to None :param config: Configuration, defaults to {} :param delete_output: Whether the output topics with their associated schemas and the consumer group should be deleted during the cleanup, defaults to None """ + application_id: str | None = Field( + default=None, + title="Unique application ID", + description=describe_attr("application_id", __doc__), + ) input_topics: list[KafkaTopicStr] = Field( default=[], description=describe_attr("input_topics", __doc__) ) input_pattern: str | None = Field( default=None, description=describe_attr("input_pattern", __doc__) ) - extra_input_topics: dict[str, list[KafkaTopicStr]] = Field( - default={}, description=describe_attr("extra_input_topics", __doc__) + labeled_input_topics: dict[str, list[KafkaTopicStr]] = Field( + default={}, description=describe_attr("labeled_input_topics", __doc__) ) - extra_input_patterns: dict[str, str] = Field( - default={}, description=describe_attr("extra_input_patterns", __doc__) + labeled_input_patterns: dict[str, str] = Field( + default={}, description=describe_attr("labeled_input_patterns", __doc__) ) error_topic: KafkaTopicStr | None = Field( default=None, description=describe_attr("error_topic", __doc__) @@ -61,29 +66,29 @@ def deserialize_input_topics( return [KafkaTopic(name=topic_name) for topic_name in input_topics] return input_topics - @pydantic.field_validator("extra_input_topics", mode="before") + @pydantic.field_validator("labeled_input_topics", mode="before") @classmethod - def deserialize_extra_input_topics( - cls, extra_input_topics: dict[str, list[str]] | Any + def deserialize_labeled_input_topics( + cls, labeled_input_topics: dict[str, list[str]] | Any ) -> dict[str, list[KafkaTopic]] | Any: - if isinstance(extra_input_topics, dict): + if isinstance(labeled_input_topics, dict): return { label: [KafkaTopic(name=topic_name) for topic_name in topics] - for label, topics in extra_input_topics.items() + for label, topics in labeled_input_topics.items() } - return extra_input_topics + return labeled_input_topics @pydantic.field_serializer("input_topics") - def serialize_topics(self, topics: list[KafkaTopic]) -> list[str]: - return [topic.name for topic in topics] + def serialize_topics(self, input_topics: list[KafkaTopic]) -> list[str]: + return [topic.name for topic in input_topics] - @pydantic.field_serializer("extra_input_topics") - def serialize_extra_input_topics( - self, extra_topics: dict[str, list[KafkaTopic]] + @pydantic.field_serializer("labeled_input_topics") + def serialize_labeled_input_topics( + self, labeled_input_topics: dict[str, list[KafkaTopic]] ) -> dict[str, list[str]]: return { label: self.serialize_topics(topics) - for label, topics in extra_topics.items() + for label, topics in labeled_input_topics.items() } def add_input_topics(self, topics: list[KafkaTopic]) -> None: @@ -95,16 +100,16 @@ def add_input_topics(self, topics: list[KafkaTopic]) -> None: """ self.input_topics = KafkaTopic.deduplicate(self.input_topics + topics) - def add_extra_input_topics(self, label: str, topics: list[KafkaTopic]) -> None: - """Add given extra topics that share a label to the list of extra input topics. + def add_labeled_input_topics(self, label: str, topics: list[KafkaTopic]) -> None: + """Add given labeled topics that share a label to the list of extra input topics. Ensures no duplicate topics in the list. :param topics: Extra input topics :param label: Topic label """ - self.extra_input_topics[label] = KafkaTopic.deduplicate( - self.extra_input_topics.get(label, []) + topics + self.labeled_input_topics[label] = KafkaTopic.deduplicate( + self.labeled_input_topics.get(label, []) + topics ) @@ -112,9 +117,6 @@ class StreamsAppAutoScaling(CamelCaseConfigModel, DescConfigModel): """Kubernetes Event-driven Autoscaling config. :param enabled: Whether to enable auto-scaling using KEDA., defaults to False - :param consumer_group: Name of the consumer group used for checking the - offset on the topic and processing the related lag. - Mandatory to set when auto-scaling is enabled. :param lag_threshold: Average target value to trigger scaling actions. Mandatory to set when auto-scaling is enabled. :param polling_interval: This is the interval to check each trigger on. @@ -139,18 +141,16 @@ class StreamsAppAutoScaling(CamelCaseConfigModel, DescConfigModel): down to this number of replicas. https://keda.sh/docs/2.9/concepts/scaling-deployments/#idlereplicacount, defaults to None - :param topics: List of auto-generated Kafka Streams topics used by the streams app., + :param internal_topics: List of auto-generated Kafka Streams topics used by the streams app, defaults to [] + :param topics: List of topics used by the streams app, defaults to [] + :param additional_triggers: List of additional KEDA triggers, + see https://keda.sh/docs/latest/scalers/, defaults to [] """ enabled: bool = Field( default=False, - description=describe_attr("streams", __doc__), - ) - consumer_group: str | None = Field( - default=None, - title="Consumer group", - description=describe_attr("consumer_group", __doc__), + description=describe_attr("enabled", __doc__), ) lag_threshold: int | None = Field( default=None, @@ -187,24 +187,20 @@ class StreamsAppAutoScaling(CamelCaseConfigModel, DescConfigModel): title="Idle replica count", description=describe_attr("idle_replicas", __doc__), ) + internal_topics: list[str] = Field( + default=[], + description=describe_attr("internal_topics", __doc__), + ) topics: list[str] = Field( default=[], description=describe_attr("topics", __doc__), ) + additional_triggers: list[str] = Field( + default=[], + description=describe_attr("additional_triggers", __doc__), + ) model_config = ConfigDict(extra="allow") - @model_validator(mode="after") - def validate_mandatory_fields_are_set( - self: StreamsAppAutoScaling, - ) -> StreamsAppAutoScaling: # TODO: typing.Self for Python 3.11+ - if self.enabled and (self.consumer_group is None or self.lag_threshold is None): - msg = ( - "If app.autoscaling.enabled is set to true, " - "the fields app.autoscaling.consumer_group and app.autoscaling.lag_threshold should be set." - ) - raise ValidationError(msg) - return self - class PersistenceConfig(BaseModel): """streams-bootstrap persistence configurations. @@ -227,30 +223,18 @@ class PersistenceConfig(BaseModel): description="Storage class to use for the persistent volume.", ) - @model_validator(mode="after") - def validate_mandatory_fields_are_set( - self: PersistenceConfig, - ) -> PersistenceConfig: # TODO: typing.Self for Python 3.11+ - if self.enabled and self.size is None: - msg = ( - "If app.persistence.enabled is set to true, " - "the field app.persistence.size needs to be set." - ) - raise ValidationError(msg) - return self - class StreamsAppValues(StreamsBootstrapValues): """streams-bootstrap app configurations. The attributes correspond to keys and values that are used as values for the streams bootstrap helm chart. - :param streams: streams-bootstrap streams section + :param kafka: streams-bootstrap kafka section :param autoscaling: Kubernetes event-driven autoscaling config, defaults to None """ - streams: StreamsConfig = Field( - description=describe_attr("streams", __doc__), + kafka: StreamsConfig = Field( + description=describe_attr("kafka", __doc__), ) autoscaling: StreamsAppAutoScaling | None = Field( default=None, diff --git a/kpops/components/streams_bootstrap/streams/streams_app.py b/kpops/components/streams_bootstrap/streams/streams_app.py index ebbc73e8b..ac3ffae68 100644 --- a/kpops/components/streams_bootstrap/streams/streams_app.py +++ b/kpops/components/streams_bootstrap/streams/streams_app.py @@ -6,10 +6,12 @@ from kpops.component_handlers.kubernetes.pvc_handler import PVCHandler from kpops.components.base_components.helm_app import HelmApp -from kpops.components.base_components.kafka_app import KafkaApp, KafkaAppCleaner +from kpops.components.base_components.kafka_app import KafkaAppCleaner from kpops.components.common.app_type import AppType -from kpops.components.common.streams_bootstrap import StreamsBootstrap from kpops.components.common.topic import KafkaTopic +from kpops.components.streams_bootstrap.base import ( + StreamsBootstrap, +) from kpops.components.streams_bootstrap.streams.model import ( StreamsAppValues, ) @@ -31,12 +33,12 @@ def helm_chart(self) -> str: @override async def reset(self, dry_run: bool) -> None: - self.values.streams.delete_output = False + self.values.kafka.delete_output = False await super().clean(dry_run) @override async def clean(self, dry_run: bool) -> None: - self.values.streams.delete_output = True + self.values.kafka.delete_output = True await super().clean(dry_run) if self.values.stateful_set and self.values.persistence.enabled: @@ -48,7 +50,7 @@ async def clean_pvcs(self, dry_run: bool) -> None: await pvc_handler.delete_pvcs(dry_run) -class StreamsApp(KafkaApp, StreamsBootstrap): +class StreamsApp(StreamsBootstrap): """StreamsApp component that configures a streams-bootstrap app. :param values: streams-bootstrap Helm values @@ -68,50 +70,50 @@ def _cleaner(self) -> StreamsAppCleaner: @property @override def input_topics(self) -> list[KafkaTopic]: - return self.values.streams.input_topics + return self.values.kafka.input_topics @property @override def extra_input_topics(self) -> dict[str, list[KafkaTopic]]: - return self.values.streams.extra_input_topics + return self.values.kafka.labeled_input_topics @property @override def output_topic(self) -> KafkaTopic | None: - return self.values.streams.output_topic + return self.values.kafka.output_topic @property @override def extra_output_topics(self) -> dict[str, KafkaTopic]: - return self.values.streams.extra_output_topics + return self.values.kafka.labeled_output_topics @override def add_input_topics(self, topics: list[KafkaTopic]) -> None: - self.values.streams.add_input_topics(topics) + self.values.kafka.add_input_topics(topics) @override def add_extra_input_topics(self, label: str, topics: list[KafkaTopic]) -> None: - self.values.streams.add_extra_input_topics(label, topics) + self.values.kafka.add_labeled_input_topics(label, topics) @override def set_input_pattern(self, name: str) -> None: - self.values.streams.input_pattern = name + self.values.kafka.input_pattern = name @override def add_extra_input_pattern(self, label: str, topic: str) -> None: - self.values.streams.extra_input_patterns[label] = topic + self.values.kafka.labeled_input_patterns[label] = topic @override def set_output_topic(self, topic: KafkaTopic) -> None: - self.values.streams.output_topic = topic + self.values.kafka.output_topic = topic @override def set_error_topic(self, topic: KafkaTopic) -> None: - self.values.streams.error_topic = topic + self.values.kafka.error_topic = topic @override def add_extra_output_topic(self, topic: KafkaTopic, label: str) -> None: - self.values.streams.extra_output_topics[label] = topic + self.values.kafka.labeled_output_topics[label] = topic @property @override diff --git a/kpops/components/streams_bootstrap_v2/__init__.py b/kpops/components/streams_bootstrap_v2/__init__.py new file mode 100644 index 000000000..6668c707a --- /dev/null +++ b/kpops/components/streams_bootstrap_v2/__init__.py @@ -0,0 +1,10 @@ +from kpops.components.streams_bootstrap_v2.base import StreamsBootstrapV2 + +from .producer.producer_app import ProducerAppV2 +from .streams.streams_app import StreamsAppV2 + +__all__ = ( + "StreamsBootstrapV2", + "StreamsAppV2", + "ProducerAppV2", +) diff --git a/kpops/components/common/streams_bootstrap.py b/kpops/components/streams_bootstrap_v2/base.py similarity index 84% rename from kpops/components/common/streams_bootstrap.py rename to kpops/components/streams_bootstrap_v2/base.py index c8a72b779..95f3281e2 100644 --- a/kpops/components/common/streams_bootstrap.py +++ b/kpops/components/streams_bootstrap_v2/base.py @@ -6,8 +6,10 @@ import pydantic from pydantic import AliasChoices, ConfigDict, Field +from typing_extensions import deprecated from kpops.component_handlers.helm_wrapper.model import HelmRepoConfig +from kpops.components.base_components import KafkaApp from kpops.components.base_components.helm_app import HelmApp, HelmAppValues from kpops.components.common.topic import KafkaTopic, KafkaTopicStr from kpops.utils.docstring import describe_attr @@ -94,10 +96,10 @@ def serialize_model( ) -class StreamsBootstrapValues(HelmAppValues): - """Base value class for all streams bootstrap related components. +class StreamsBootstrapV2Values(HelmAppValues): + """Base value class for all streams bootstrap v2 related components. - :param image_tag: Docker image tag of the streams-bootstrap app. + :param image_tag: Docker image tag of the streams-bootstrap-v2 app. """ image_tag: str = Field( @@ -111,16 +113,17 @@ class StreamsBootstrapValues(HelmAppValues): ) -class StreamsBootstrap(HelmApp, ABC): - """Base for components with a streams-bootstrap Helm chart. +@deprecated("StreamsBootstrapV2 component is deprecated, use StreamsBootstrap instead.") +class StreamsBootstrapV2(KafkaApp, HelmApp, ABC): + """Base for components with a streams-bootstrap-v2 Helm chart. - :param values: streams-bootstrap Helm values + :param values: streams-bootstrap-v2 Helm values :param repo_config: Configuration of the Helm chart repo to be used for deploying the component, defaults to streams-bootstrap Helm repo :param version: Helm chart version, defaults to "2.9.0" """ - values: StreamsBootstrapValues = Field( + values: StreamsBootstrapV2Values = Field( description=describe_attr("values", __doc__), ) @@ -140,3 +143,11 @@ def warning_for_latest_image_tag(self) -> Self: f"The image tag for component '{self.name}' is set or defaulted to 'latest'. Please, consider providing a stable image tag." ) return self + + @pydantic.model_validator(mode="before") + @classmethod + def deprecation_warning(cls, model: Any) -> Any: + log.warning( + "StreamsBootstrapV2 is deprecated, consider migrating to StreamsBootstrap." + ) + return model diff --git a/kpops/components/streams_bootstrap_v3/producer/__init__.py b/kpops/components/streams_bootstrap_v2/producer/__init__.py similarity index 100% rename from kpops/components/streams_bootstrap_v3/producer/__init__.py rename to kpops/components/streams_bootstrap_v2/producer/__init__.py diff --git a/kpops/components/streams_bootstrap_v2/producer/model.py b/kpops/components/streams_bootstrap_v2/producer/model.py new file mode 100644 index 000000000..005b7f603 --- /dev/null +++ b/kpops/components/streams_bootstrap_v2/producer/model.py @@ -0,0 +1,24 @@ +from pydantic import ConfigDict, Field + +from kpops.components.streams_bootstrap_v2.base import ( + KafkaStreamsConfig, + StreamsBootstrapV2Values, +) +from kpops.utils.docstring import describe_attr + + +class ProducerStreamsConfig(KafkaStreamsConfig): + """Kafka Streams settings specific to Producer.""" + + +class ProducerAppV2Values(StreamsBootstrapV2Values): + """Settings specific to producers. + + :param streams: Kafka Streams settings + """ + + streams: ProducerStreamsConfig = Field( + default=..., description=describe_attr("streams", __doc__) + ) + + model_config = ConfigDict(extra="allow") diff --git a/kpops/components/streams_bootstrap_v3/producer/producer_app.py b/kpops/components/streams_bootstrap_v2/producer/producer_app.py similarity index 63% rename from kpops/components/streams_bootstrap_v3/producer/producer_app.py rename to kpops/components/streams_bootstrap_v2/producer/producer_app.py index d24af2ea2..c36ef2c5b 100644 --- a/kpops/components/streams_bootstrap_v3/producer/producer_app.py +++ b/kpops/components/streams_bootstrap_v2/producer/producer_app.py @@ -1,27 +1,28 @@ import logging from functools import cached_property -from pydantic import Field, computed_field -from typing_extensions import override +from pydantic import Field, ValidationError, computed_field +from typing_extensions import deprecated, override -from kpops.components.base_components.kafka_app import KafkaAppCleaner +from kpops.components.base_components.kafka_app import ( + KafkaAppCleaner, +) from kpops.components.common.app_type import AppType from kpops.components.common.topic import ( KafkaTopic, OutputTopicTypes, TopicConfig, ) -from kpops.components.streams_bootstrap_v3.base import ( - StreamsBootstrapV3, -) -from kpops.components.streams_bootstrap_v3.producer.model import ProducerAppValues +from kpops.components.streams_bootstrap_v2 import StreamsBootstrapV2 +from kpops.components.streams_bootstrap_v2.producer.model import ProducerAppV2Values +from kpops.const.file_type import DEFAULTS_YAML, PIPELINE_YAML from kpops.utils.docstring import describe_attr -log = logging.getLogger("ProducerAppV3") +log = logging.getLogger("ProducerAppV2") -class ProducerAppCleaner(KafkaAppCleaner, StreamsBootstrapV3): - values: ProducerAppValues +class ProducerAppCleaner(KafkaAppCleaner, StreamsBootstrapV2): + values: ProducerAppV2Values @property @override @@ -31,7 +32,8 @@ def helm_chart(self) -> str: ) -class ProducerAppV3(StreamsBootstrapV3): +@deprecated("ProducerAppV2 component is deprecated, use ProducerApp instead.") +class ProducerAppV2(StreamsBootstrapV2): """Producer component. This producer holds configuration to use as values for the streams-bootstrap @@ -43,7 +45,7 @@ class ProducerAppV3(StreamsBootstrapV3): :param from_: Producer doesn't support FromSection, defaults to None """ - values: ProducerAppValues = Field( + values: ProducerAppV2Values = Field( description=describe_attr("values", __doc__), ) from_: None = Field( @@ -72,20 +74,20 @@ def apply_to_outputs(self, name: str, topic: TopicConfig) -> None: @property @override def output_topic(self) -> KafkaTopic | None: - return self.values.kafka.output_topic + return self.values.streams.output_topic @property @override def extra_output_topics(self) -> dict[str, KafkaTopic]: - return self.values.kafka.labeled_output_topics + return self.values.streams.extra_output_topics @override def set_output_topic(self, topic: KafkaTopic) -> None: - self.values.kafka.output_topic = topic + self.values.streams.output_topic = topic @override def add_extra_output_topic(self, topic: KafkaTopic, label: str) -> None: - self.values.kafka.labeled_output_topics[label] = topic + self.values.streams.extra_output_topics[label] = topic @property @override @@ -104,8 +106,16 @@ async def destroy(self, dry_run: bool) -> None: if cluster_values: log.debug("Fetched Helm chart values from cluster") name_override = self._cleaner.helm_name_override - self._cleaner.values = self.values.model_validate(cluster_values) - self._cleaner.values.name_override = name_override + try: + self._cleaner.values = self.values.model_validate(cluster_values) + self._cleaner.values.name_override = name_override + except ValidationError as validation_error: + warning_msg = f"The values in the cluster are invalid with the current model. Falling back to the enriched values of {PIPELINE_YAML} and {DEFAULTS_YAML}" + log.warning(warning_msg) + debug_msg = f"Cluster values: {cluster_values}" + log.debug(debug_msg) + debug_msg = f"Validation error: {validation_error}" + log.debug(debug_msg) await super().destroy(dry_run) diff --git a/kpops/components/streams_bootstrap_v3/streams/__init__.py b/kpops/components/streams_bootstrap_v2/streams/__init__.py similarity index 100% rename from kpops/components/streams_bootstrap_v3/streams/__init__.py rename to kpops/components/streams_bootstrap_v2/streams/__init__.py diff --git a/kpops/components/streams_bootstrap_v3/streams/model.py b/kpops/components/streams_bootstrap_v2/streams/model.py similarity index 67% rename from kpops/components/streams_bootstrap_v3/streams/model.py rename to kpops/components/streams_bootstrap_v2/streams/model.py index ab09b1dbb..e733bb91c 100644 --- a/kpops/components/streams_bootstrap_v3/streams/model.py +++ b/kpops/components/streams_bootstrap_v2/streams/model.py @@ -3,12 +3,13 @@ from typing import Any import pydantic -from pydantic import BaseModel, ConfigDict, Field +from pydantic import BaseModel, ConfigDict, Field, model_validator +from kpops.api.exception import ValidationError from kpops.components.common.topic import KafkaTopic, KafkaTopicStr -from kpops.components.streams_bootstrap_v3.model import ( - KafkaConfig, - StreamsBootstrapV3Values, +from kpops.components.streams_bootstrap_v2.base import ( + KafkaStreamsConfig, + StreamsBootstrapV2Values, ) from kpops.utils.docstring import describe_attr from kpops.utils.pydantic import ( @@ -17,35 +18,29 @@ ) -class StreamsConfig(KafkaConfig): - """Streams Bootstrap kafka section. +class StreamsConfig(KafkaStreamsConfig): + """Streams Bootstrap streams section. - :param application_id: Unique application ID for Kafka Streams. Required for auto-scaling :param input_topics: Input topics, defaults to [] :param input_pattern: Input pattern, defaults to None - :param labeled_input_topics: Extra input topics, defaults to {} - :param labeled_input_patterns: Extra input patterns, defaults to {} + :param extra_input_topics: Extra input topics, defaults to {} + :param extra_input_patterns: Extra input patterns, defaults to {} :param error_topic: Error topic, defaults to None :param config: Configuration, defaults to {} :param delete_output: Whether the output topics with their associated schemas and the consumer group should be deleted during the cleanup, defaults to None """ - application_id: str | None = Field( - default=None, - title="Unique application ID", - description=describe_attr("application_id", __doc__), - ) input_topics: list[KafkaTopicStr] = Field( default=[], description=describe_attr("input_topics", __doc__) ) input_pattern: str | None = Field( default=None, description=describe_attr("input_pattern", __doc__) ) - labeled_input_topics: dict[str, list[KafkaTopicStr]] = Field( - default={}, description=describe_attr("labeled_input_topics", __doc__) + extra_input_topics: dict[str, list[KafkaTopicStr]] = Field( + default={}, description=describe_attr("extra_input_topics", __doc__) ) - labeled_input_patterns: dict[str, str] = Field( - default={}, description=describe_attr("labeled_input_patterns", __doc__) + extra_input_patterns: dict[str, str] = Field( + default={}, description=describe_attr("extra_input_patterns", __doc__) ) error_topic: KafkaTopicStr | None = Field( default=None, description=describe_attr("error_topic", __doc__) @@ -66,29 +61,29 @@ def deserialize_input_topics( return [KafkaTopic(name=topic_name) for topic_name in input_topics] return input_topics - @pydantic.field_validator("labeled_input_topics", mode="before") + @pydantic.field_validator("extra_input_topics", mode="before") @classmethod - def deserialize_labeled_input_topics( - cls, labeled_input_topics: dict[str, list[str]] | Any + def deserialize_extra_input_topics( + cls, extra_input_topics: dict[str, str] | Any ) -> dict[str, list[KafkaTopic]] | Any: - if isinstance(labeled_input_topics, dict): + if isinstance(extra_input_topics, dict): return { label: [KafkaTopic(name=topic_name) for topic_name in topics] - for label, topics in labeled_input_topics.items() + for label, topics in extra_input_topics.items() } - return labeled_input_topics + return extra_input_topics @pydantic.field_serializer("input_topics") - def serialize_topics(self, input_topics: list[KafkaTopic]) -> list[str]: - return [topic.name for topic in input_topics] + def serialize_topics(self, topics: list[KafkaTopic]) -> list[str]: + return [topic.name for topic in topics] - @pydantic.field_serializer("labeled_input_topics") - def serialize_labeled_input_topics( - self, labeled_input_topics: dict[str, list[KafkaTopic]] + @pydantic.field_serializer("extra_input_topics") + def serialize_extra_input_topics( + self, extra_topics: dict[str, list[KafkaTopic]] ) -> dict[str, list[str]]: return { label: self.serialize_topics(topics) - for label, topics in labeled_input_topics.items() + for label, topics in extra_topics.items() } def add_input_topics(self, topics: list[KafkaTopic]) -> None: @@ -100,16 +95,16 @@ def add_input_topics(self, topics: list[KafkaTopic]) -> None: """ self.input_topics = KafkaTopic.deduplicate(self.input_topics + topics) - def add_labeled_input_topics(self, label: str, topics: list[KafkaTopic]) -> None: - """Add given labeled topics that share a label to the list of extra input topics. + def add_extra_input_topics(self, label: str, topics: list[KafkaTopic]) -> None: + """Add given extra topics that share a label to the list of extra input topics. Ensures no duplicate topics in the list. :param topics: Extra input topics :param label: Topic label """ - self.labeled_input_topics[label] = KafkaTopic.deduplicate( - self.labeled_input_topics.get(label, []) + topics + self.extra_input_topics[label] = KafkaTopic.deduplicate( + self.extra_input_topics.get(label, []) + topics ) @@ -117,6 +112,9 @@ class StreamsAppAutoScaling(CamelCaseConfigModel, DescConfigModel): """Kubernetes Event-driven Autoscaling config. :param enabled: Whether to enable auto-scaling using KEDA., defaults to False + :param consumer_group: Name of the consumer group used for checking the + offset on the topic and processing the related lag. + Mandatory to set when auto-scaling is enabled. :param lag_threshold: Average target value to trigger scaling actions. Mandatory to set when auto-scaling is enabled. :param polling_interval: This is the interval to check each trigger on. @@ -141,16 +139,18 @@ class StreamsAppAutoScaling(CamelCaseConfigModel, DescConfigModel): down to this number of replicas. https://keda.sh/docs/2.9/concepts/scaling-deployments/#idlereplicacount, defaults to None - :param internal_topics: List of auto-generated Kafka Streams topics used by the streams app, defaults to [] - :param topics: List of topics used by the streams app, defaults to [] - :param additional_triggers: List of additional KEDA triggers, - see https://keda.sh/docs/latest/scalers/, + :param topics: List of auto-generated Kafka Streams topics used by the streams app., defaults to [] """ enabled: bool = Field( default=False, - description=describe_attr("enabled", __doc__), + description=describe_attr("streams", __doc__), + ) + consumer_group: str | None = Field( + default=None, + title="Consumer group", + description=describe_attr("consumer_group", __doc__), ) lag_threshold: int | None = Field( default=None, @@ -187,20 +187,24 @@ class StreamsAppAutoScaling(CamelCaseConfigModel, DescConfigModel): title="Idle replica count", description=describe_attr("idle_replicas", __doc__), ) - internal_topics: list[str] = Field( - default=[], - description=describe_attr("internal_topics", __doc__), - ) topics: list[str] = Field( default=[], description=describe_attr("topics", __doc__), ) - additional_triggers: list[str] = Field( - default=[], - description=describe_attr("additional_triggers", __doc__), - ) model_config = ConfigDict(extra="allow") + @model_validator(mode="after") + def validate_mandatory_fields_are_set( + self: StreamsAppAutoScaling, + ) -> StreamsAppAutoScaling: # TODO: typing.Self for Python 3.11+ + if self.enabled and (self.consumer_group is None or self.lag_threshold is None): + msg = ( + "If app.autoscaling.enabled is set to true, " + "the fields app.autoscaling.consumer_group and app.autoscaling.lag_threshold should be set." + ) + raise ValidationError(msg) + return self + class PersistenceConfig(BaseModel): """streams-bootstrap persistence configurations. @@ -223,18 +227,30 @@ class PersistenceConfig(BaseModel): description="Storage class to use for the persistent volume.", ) + @model_validator(mode="after") + def validate_mandatory_fields_are_set( + self: PersistenceConfig, + ) -> PersistenceConfig: # TODO: typing.Self for Python 3.11+ + if self.enabled and self.size is None: + msg = ( + "If app.persistence.enabled is set to true, " + "the field app.persistence.size needs to be set." + ) + raise ValidationError(msg) + return self + -class StreamsAppValues(StreamsBootstrapV3Values): - """streams-bootstrap app configurations. +class StreamsAppV2Values(StreamsBootstrapV2Values): + """streams-bootstrap-v2 app configurations. The attributes correspond to keys and values that are used as values for the streams bootstrap helm chart. - :param kafka: streams-bootstrap kafka section + :param streams: streams-bootstrap-v2 streams section :param autoscaling: Kubernetes event-driven autoscaling config, defaults to None """ - kafka: StreamsConfig = Field( - description=describe_attr("kafka", __doc__), + streams: StreamsConfig = Field( + description=describe_attr("streams", __doc__), ) autoscaling: StreamsAppAutoScaling | None = Field( default=None, diff --git a/kpops/components/streams_bootstrap_v3/streams/streams_app.py b/kpops/components/streams_bootstrap_v2/streams/streams_app.py similarity index 58% rename from kpops/components/streams_bootstrap_v3/streams/streams_app.py rename to kpops/components/streams_bootstrap_v2/streams/streams_app.py index 8c6414053..0b6e952d2 100644 --- a/kpops/components/streams_bootstrap_v3/streams/streams_app.py +++ b/kpops/components/streams_bootstrap_v2/streams/streams_app.py @@ -1,29 +1,28 @@ import logging from functools import cached_property -from pydantic import Field, computed_field -from typing_extensions import override +from pydantic import Field, ValidationError, computed_field +from typing_extensions import deprecated, override from kpops.component_handlers.kubernetes.pvc_handler import PVCHandler from kpops.components.base_components.helm_app import HelmApp from kpops.components.base_components.kafka_app import KafkaAppCleaner from kpops.components.common.app_type import AppType from kpops.components.common.topic import KafkaTopic -from kpops.components.streams_bootstrap_v3.base import ( - StreamsBootstrapV3, -) -from kpops.components.streams_bootstrap_v3.streams.model import ( - StreamsAppValues, +from kpops.components.streams_bootstrap_v2 import StreamsBootstrapV2 +from kpops.components.streams_bootstrap_v2.streams.model import ( + StreamsAppV2Values, ) +from kpops.const.file_type import DEFAULTS_YAML, PIPELINE_YAML from kpops.utils.docstring import describe_attr -log = logging.getLogger("StreamsAppV3") +log = logging.getLogger("StreamsAppV2") -class StreamsAppCleaner(KafkaAppCleaner, StreamsBootstrapV3): +class StreamsAppCleaner(KafkaAppCleaner, StreamsBootstrapV2): from_: None = None to: None = None - values: StreamsAppValues + values: StreamsAppV2Values @property @override @@ -32,12 +31,12 @@ def helm_chart(self) -> str: @override async def reset(self, dry_run: bool) -> None: - self.values.kafka.delete_output = False + self.values.streams.delete_output = False await super().clean(dry_run) @override async def clean(self, dry_run: bool) -> None: - self.values.kafka.delete_output = True + self.values.streams.delete_output = True await super().clean(dry_run) if self.values.stateful_set and self.values.persistence.enabled: @@ -45,22 +44,18 @@ async def clean(self, dry_run: bool) -> None: async def clean_pvcs(self, dry_run: bool) -> None: app_full_name = super(HelmApp, self).full_name - pvc_handler = await PVCHandler.create(app_full_name, self.namespace) - if dry_run: - pvc_names = await pvc_handler.list_pvcs() - log.info(f"Deleting the PVCs {pvc_names} for StatefulSet '{app_full_name}'") - else: - log.info(f"Deleting the PVCs for StatefulSet '{app_full_name}'") - await pvc_handler.delete_pvcs() + pvc_handler = PVCHandler(app_full_name, self.namespace) + await pvc_handler.delete_pvcs(dry_run) -class StreamsAppV3(StreamsBootstrapV3): - """StreamsApp component that configures a streams-bootstrap app. +@deprecated("StreamsAppV2 component is deprecated, use StreamsApp instead.") +class StreamsAppV2(StreamsBootstrapV2): + """StreamsAppV2 component that configures a streams-bootstrap-v2 app. - :param values: streams-bootstrap Helm values + :param values: streams-bootstrap-v2 Helm values """ - values: StreamsAppValues = Field( + values: StreamsAppV2Values = Field( description=describe_attr("values", __doc__), ) @@ -74,50 +69,50 @@ def _cleaner(self) -> StreamsAppCleaner: @property @override def input_topics(self) -> list[KafkaTopic]: - return self.values.kafka.input_topics + return self.values.streams.input_topics @property @override def extra_input_topics(self) -> dict[str, list[KafkaTopic]]: - return self.values.kafka.labeled_input_topics + return self.values.streams.extra_input_topics @property @override def output_topic(self) -> KafkaTopic | None: - return self.values.kafka.output_topic + return self.values.streams.output_topic @property @override def extra_output_topics(self) -> dict[str, KafkaTopic]: - return self.values.kafka.labeled_output_topics + return self.values.streams.extra_output_topics @override def add_input_topics(self, topics: list[KafkaTopic]) -> None: - self.values.kafka.add_input_topics(topics) + self.values.streams.add_input_topics(topics) @override def add_extra_input_topics(self, label: str, topics: list[KafkaTopic]) -> None: - self.values.kafka.add_labeled_input_topics(label, topics) + self.values.streams.add_extra_input_topics(label, topics) @override def set_input_pattern(self, name: str) -> None: - self.values.kafka.input_pattern = name + self.values.streams.input_pattern = name @override def add_extra_input_pattern(self, label: str, topic: str) -> None: - self.values.kafka.labeled_input_patterns[label] = topic + self.values.streams.extra_input_patterns[label] = topic @override def set_output_topic(self, topic: KafkaTopic) -> None: - self.values.kafka.output_topic = topic + self.values.streams.output_topic = topic @override def set_error_topic(self, topic: KafkaTopic) -> None: - self.values.kafka.error_topic = topic + self.values.streams.error_topic = topic @override def add_extra_output_topic(self, topic: KafkaTopic, label: str) -> None: - self.values.kafka.labeled_output_topics[label] = topic + self.values.streams.extra_output_topics[label] = topic @property @override @@ -132,8 +127,16 @@ async def destroy(self, dry_run: bool) -> None: if cluster_values: log.debug("Fetched Helm chart values from cluster") name_override = self._cleaner.helm_name_override - self._cleaner.values = self.values.model_validate(cluster_values) - self._cleaner.values.name_override = name_override + try: + self._cleaner.values = self.values.model_validate(cluster_values) + self._cleaner.values.name_override = name_override + except ValidationError as validation_error: + warning_msg = f"The values in the cluster are invalid with the current model. Falling back to the enriched values of {PIPELINE_YAML} and {DEFAULTS_YAML}" + log.warning(warning_msg) + debug_msg = f"Cluster values: {cluster_values}" + log.debug(debug_msg) + debug_msg = f"Validation error: {validation_error}" + log.debug(debug_msg) await super().destroy(dry_run) diff --git a/kpops/components/streams_bootstrap_v3/__init__.py b/kpops/components/streams_bootstrap_v3/__init__.py deleted file mode 100644 index 07b11bf21..000000000 --- a/kpops/components/streams_bootstrap_v3/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -from kpops.components.streams_bootstrap_v3.base import StreamsBootstrapV3 - -from .producer.producer_app import ProducerAppV3 -from .streams.streams_app import StreamsAppV3 - -__all__ = ("StreamsBootstrapV3", "StreamsAppV3", "ProducerAppV3") diff --git a/kpops/components/streams_bootstrap_v3/producer/model.py b/kpops/components/streams_bootstrap_v3/producer/model.py deleted file mode 100644 index 1da8a2be8..000000000 --- a/kpops/components/streams_bootstrap_v3/producer/model.py +++ /dev/null @@ -1,22 +0,0 @@ -from pydantic import ConfigDict, Field - -from kpops.components.streams_bootstrap_v3.model import ( - KafkaConfig, - StreamsBootstrapV3Values, -) -from kpops.utils.docstring import describe_attr - - -class ProducerConfig(KafkaConfig): - """Kafka Streams settings specific to Producer.""" - - -class ProducerAppValues(StreamsBootstrapV3Values): - """Settings specific to producers. - - :param kafka: Kafka Streams settings - """ - - kafka: ProducerConfig = Field(description=describe_attr("kafka", __doc__)) - - model_config = ConfigDict(extra="allow") diff --git a/tests/api/test_registry.py b/tests/api/test_registry.py index 9429dab83..64df1f1c8 100644 --- a/tests/api/test_registry.py +++ b/tests/api/test_registry.py @@ -18,14 +18,14 @@ ) from kpops.components.base_components.kubernetes_app import KubernetesApp from kpops.components.base_components.pipeline_component import PipelineComponent -from kpops.components.common.streams_bootstrap import StreamsBootstrap -from kpops.components.streams_bootstrap.producer.producer_app import ProducerApp -from kpops.components.streams_bootstrap.streams.streams_app import StreamsApp -from kpops.components.streams_bootstrap_v3 import ( - ProducerAppV3, - StreamsAppV3, - StreamsBootstrapV3, +from kpops.components.streams_bootstrap import ( + ProducerApp, + StreamsApp, + StreamsBootstrap, ) +from kpops.components.streams_bootstrap_v2 import StreamsBootstrapV2 +from kpops.components.streams_bootstrap_v2.producer.producer_app import ProducerAppV2 +from kpops.components.streams_bootstrap_v2.streams.streams_app import StreamsAppV2 from tests.cli.resources.custom_module import CustomSchemaProvider @@ -55,7 +55,7 @@ def test_iter_namespace(): "kpops.components.base_components", "kpops.components.common", "kpops.components.streams_bootstrap", - "kpops.components.streams_bootstrap_v3", + "kpops.components.streams_bootstrap_v2", "kpops.components.test_components", ] @@ -67,7 +67,7 @@ def test_iter_component_modules(): "kpops.components.base_components", "kpops.components.common", "kpops.components.streams_bootstrap", - "kpops.components.streams_bootstrap_v3", + "kpops.components.streams_bootstrap_v2", "kpops.components.test_components", ] @@ -105,13 +105,12 @@ def test_registry(): "kafka-source-connector": KafkaSourceConnector, "kubernetes-app": KubernetesApp, "pipeline-component": PipelineComponent, - # TODO: change the old sterams bootstrap to -v2 and remove -v3 + "producer-app-v2": ProducerAppV2, "producer-app": ProducerApp, - "producer-app-v3": ProducerAppV3, + "streams-app-v2": StreamsAppV2, "streams-app": StreamsApp, - "streams-app-v3": StreamsAppV3, + "streams-bootstrap-v2": StreamsBootstrapV2, "streams-bootstrap": StreamsBootstrap, - "streams-bootstrap-v3": StreamsBootstrapV3, } for _type, _class in registry._classes.items(): assert registry[_type] is _class diff --git a/tests/components/resources/pipelines/test-distributed-defaults/pipeline-deep/defaults.yaml b/tests/components/resources/pipelines/test-distributed-defaults/pipeline-deep/defaults.yaml index 5f084bfcf..e19704b3e 100644 --- a/tests/components/resources/pipelines/test-distributed-defaults/pipeline-deep/defaults.yaml +++ b/tests/components/resources/pipelines/test-distributed-defaults/pipeline-deep/defaults.yaml @@ -2,14 +2,14 @@ helm-app: name: ${component.type} namespace: namespace-to-override-it-all -streams-bootstrap: +streams-bootstrap-v2: app: streams: brokers: "${config.kafka_brokers}" -producer-app: {} # inherits from kafka-app +producer-app-v2: {} # inherits from streams-bootstrap-v2 -streams-app: # inherits from kafka-app +streams-app-v2: # inherits from streams-bootstrap-v2 to: topics: ${error_topic_name}: diff --git a/tests/components/streams_bootstrap_v3/__init__.py b/tests/components/streams_bootstrap/__init__.py similarity index 100% rename from tests/components/streams_bootstrap_v3/__init__.py rename to tests/components/streams_bootstrap/__init__.py diff --git a/tests/components/streams_bootstrap_v3/test_producer_app.py b/tests/components/streams_bootstrap/test_producer_app.py similarity index 83% rename from tests/components/streams_bootstrap_v3/test_producer_app.py rename to tests/components/streams_bootstrap/test_producer_app.py index db0de218c..046fa7f08 100644 --- a/tests/components/streams_bootstrap_v3/test_producer_app.py +++ b/tests/components/streams_bootstrap/test_producer_app.py @@ -13,9 +13,9 @@ OutputTopicTypes, TopicConfig, ) -from kpops.components.streams_bootstrap_v3.producer.producer_app import ( +from kpops.components.streams_bootstrap.producer.producer_app import ( + ProducerApp, ProducerAppCleaner, - ProducerAppV3, ) PRODUCER_APP_NAME = "test-producer-app-with-long-name-0123456789abcdefghijklmnop" @@ -39,8 +39,8 @@ def test_release_name(self): assert PRODUCER_APP_CLEAN_RELEASE_NAME.endswith("-clean") @pytest.fixture() - def producer_app(self) -> ProducerAppV3: - return ProducerAppV3( + def producer_app(self) -> ProducerApp: + return ProducerApp( name=PRODUCER_APP_NAME, **{ "version": "3.2.1", @@ -63,28 +63,28 @@ def producer_app(self) -> ProducerAppV3: def empty_helm_get_values(self, mocker: MockerFixture) -> MagicMock: return mocker.patch.object(Helm, "get_values", return_value=None) - def test_cleaner(self, producer_app: ProducerAppV3): + def test_cleaner(self, producer_app: ProducerApp): cleaner = producer_app._cleaner assert isinstance(cleaner, ProducerAppCleaner) assert not hasattr(cleaner, "_cleaner") - def test_cleaner_inheritance(self, producer_app: ProducerAppV3): + def test_cleaner_inheritance(self, producer_app: ProducerApp): assert producer_app._cleaner.values == producer_app.values - def test_cleaner_helm_release_name(self, producer_app: ProducerAppV3): + def test_cleaner_helm_release_name(self, producer_app: ProducerApp): assert ( producer_app._cleaner.helm_release_name == "${pipeline.name}-test-producer-app-with-l-abc43-clean" ) - def test_cleaner_helm_name_override(self, producer_app: ProducerAppV3): + def test_cleaner_helm_name_override(self, producer_app: ProducerApp): assert ( producer_app._cleaner.to_helm_values()["nameOverride"] == PRODUCER_APP_CLEAN_HELM_NAMEOVERRIDE ) def test_output_topics(self): - producer_app = ProducerAppV3( + producer_app = ProducerApp( name=PRODUCER_APP_NAME, **{ "namespace": "test-namespace", @@ -116,7 +116,7 @@ def test_output_topics(self): @pytest.mark.asyncio() async def test_deploy_order_when_dry_run_is_false( self, - producer_app: ProducerAppV3, + producer_app: ProducerApp, mocker: MockerFixture, ): mock_create_topic = mocker.patch.object( @@ -167,7 +167,7 @@ async def test_deploy_order_when_dry_run_is_false( @pytest.mark.asyncio() async def test_destroy( self, - producer_app: ProducerAppV3, + producer_app: ProducerApp, mocker: MockerFixture, ): mock_helm_uninstall = mocker.patch.object(producer_app.helm, "uninstall") @@ -181,7 +181,7 @@ async def test_destroy( @pytest.mark.asyncio() async def test_should_clean_producer_app( self, - producer_app: ProducerAppV3, + producer_app: ProducerApp, empty_helm_get_values: MockerFixture, mocker: MockerFixture, ): @@ -260,7 +260,7 @@ async def test_should_clean_producer_app( async def test_should_clean_producer_app_and_deploy_clean_up_job_and_delete_clean_up_with_dry_run_false( self, mocker: MockerFixture, - producer_app: ProducerAppV3, + producer_app: ProducerApp, empty_helm_get_values: MockerFixture, ): # actual component @@ -326,7 +326,7 @@ async def test_should_clean_producer_app_and_deploy_clean_up_job_and_delete_clea ) def test_get_output_topics(self): - producer_app = ProducerAppV3( + producer_app = ProducerApp( name="my-producer", **{ "namespace": "test-namespace", @@ -378,7 +378,7 @@ async def test_should_not_deploy_clean_up_when_rest(self, mocker: MockerFixture) }, }, ) - producer_app = ProducerAppV3( + producer_app = ProducerApp( name=PRODUCER_APP_NAME, **{ "namespace": "test-namespace", @@ -428,7 +428,7 @@ async def test_should_deploy_clean_up_job_with_values_in_cluster_when_clean( }, }, ) - producer_app = ProducerAppV3( + producer_app = ProducerApp( name=PRODUCER_APP_NAME, **{ "namespace": "test-namespace", @@ -470,5 +470,76 @@ async def test_should_deploy_clean_up_job_with_values_in_cluster_when_clean( "schemaRegistryUrl": "http://localhost:8081", }, }, - HelmUpgradeInstallFlags(version="3.0.0", wait=True, wait_for_jobs=True), + HelmUpgradeInstallFlags(version="3.0.1", wait=True, wait_for_jobs=True), + ) + + @pytest.mark.asyncio() + async def test_clean_should_fall_back_to_local_values_when_validation_of_cluster_values_fails( + self, mocker: MockerFixture, caplog: pytest.LogCaptureFixture + ): + caplog.set_level(logging.WARNING) + + # invalid model + mocker.patch.object( + Helm, + "get_values", + return_value={ + "image": "registry/producer-app", + "imageTag": "1.1.1", + "nameOverride": PRODUCER_APP_NAME, + "streams": { + "brokers": "fake-broker:9092", + "outputTopic": "test-output-topic", + "schemaRegistryUrl": "http://localhost:8081", + }, + }, + ) + + # user defined model + producer_app = ProducerApp( + name=PRODUCER_APP_NAME, + **{ + "namespace": "test-namespace", + "values": { + "image": "registry/producer-app", + "imageTag": "2.2.2", + "kafka": {"bootstrapServers": "fake-broker:9092"}, + }, + "to": { + "topics": { + "test-output-topic": {"type": "output"}, + } + }, + }, + ) + mocker.patch.object(producer_app.helm, "uninstall") + mocker.patch.object(producer_app._cleaner.dry_run_handler, "print_helm_diff") + mocker.patch.object(producer_app._cleaner.helm, "uninstall") + + mock_helm_upgrade_install = mocker.patch.object( + producer_app._cleaner.helm, "upgrade_install" + ) + + dry_run = True + await producer_app.clean(dry_run) + assert ( + "The values in the cluster are invalid with the current model. Falling back to the enriched values of pipeline.yaml and defaults.yaml" + in caplog.text + ) + + mock_helm_upgrade_install.assert_called_once_with( + PRODUCER_APP_CLEAN_RELEASE_NAME, + "bakdata-streams-bootstrap/producer-app-cleanup-job", + dry_run, + "test-namespace", + { + "image": "registry/producer-app", + "imageTag": "2.2.2", + "nameOverride": PRODUCER_APP_CLEAN_HELM_NAMEOVERRIDE, + "kafka": { + "bootstrapServers": "fake-broker:9092", + "outputTopic": "test-output-topic", + }, + }, + HelmUpgradeInstallFlags(version="3.0.1", wait=True, wait_for_jobs=True), ) diff --git a/tests/components/streams_bootstrap_v3/test_streams_app.py b/tests/components/streams_bootstrap/test_streams_app.py similarity index 82% rename from tests/components/streams_bootstrap_v3/test_streams_app.py rename to tests/components/streams_bootstrap/test_streams_app.py index 2642e38f6..9aa982e3c 100644 --- a/tests/components/streams_bootstrap_v3/test_streams_app.py +++ b/tests/components/streams_bootstrap/test_streams_app.py @@ -1,8 +1,15 @@ import logging +from collections.abc import AsyncIterator from pathlib import Path -from unittest.mock import ANY, AsyncMock, MagicMock +from unittest.mock import ANY, MagicMock import pytest +from lightkube.models.core_v1 import ( + PersistentVolumeClaim, + PersistentVolumeClaimSpec, + PersistentVolumeClaimStatus, +) +from lightkube.models.meta_v1 import ObjectMeta from pytest_mock import MockerFixture from kpops.component_handlers import get_handlers @@ -11,6 +18,7 @@ HelmUpgradeInstallFlags, ) from kpops.component_handlers.helm_wrapper.utils import create_helm_release_name +from kpops.component_handlers.kubernetes.pvc_handler import PVCHandler from kpops.components.base_components.models import TopicName from kpops.components.base_components.models.to_section import ( ToSection, @@ -20,11 +28,11 @@ OutputTopicTypes, TopicConfig, ) -from kpops.components.streams_bootstrap_v3 import StreamsAppV3 -from kpops.components.streams_bootstrap_v3.streams.model import ( +from kpops.components.streams_bootstrap import StreamsApp +from kpops.components.streams_bootstrap.streams.model import ( StreamsAppAutoScaling, ) -from kpops.components.streams_bootstrap_v3.streams.streams_app import ( +from kpops.components.streams_bootstrap.streams.streams_app import ( StreamsAppCleaner, ) @@ -53,8 +61,8 @@ def test_release_name(self): assert STREAMS_APP_CLEAN_RELEASE_NAME.endswith("-clean") @pytest.fixture() - def streams_app(self) -> StreamsAppV3: - return StreamsAppV3( + def streams_app(self) -> StreamsApp: + return StreamsApp( name=STREAMS_APP_NAME, **{ "namespace": "test-namespace", @@ -72,8 +80,8 @@ def streams_app(self) -> StreamsAppV3: ) @pytest.fixture() - def stateful_streams_app(self) -> StreamsAppV3: - return StreamsAppV3( + def stateful_streams_app(self) -> StreamsApp: + return StreamsApp( name=STREAMS_APP_NAME, **{ "namespace": "test-namespace", @@ -104,12 +112,12 @@ def dry_run_handler_mock(self, mocker: MockerFixture) -> MagicMock: def empty_helm_get_values(self, mocker: MockerFixture) -> MagicMock: return mocker.patch.object(Helm, "get_values", return_value=None) - def test_cleaner(self, streams_app: StreamsAppV3): + def test_cleaner(self, streams_app: StreamsApp): cleaner = streams_app._cleaner assert isinstance(cleaner, StreamsAppCleaner) assert not hasattr(cleaner, "_cleaner") - def test_cleaner_inheritance(self, streams_app: StreamsAppV3): + def test_cleaner_inheritance(self, streams_app: StreamsApp): streams_app.values.kafka.application_id = "test-application-id" streams_app.values.autoscaling = StreamsAppAutoScaling( enabled=True, @@ -118,20 +126,20 @@ def test_cleaner_inheritance(self, streams_app: StreamsAppV3): ) assert streams_app._cleaner.values == streams_app.values - def test_cleaner_helm_release_name(self, streams_app: StreamsAppV3): + def test_cleaner_helm_release_name(self, streams_app: StreamsApp): assert ( streams_app._cleaner.helm_release_name == "${pipeline.name}-test-streams-app-with-lo-c98c5-clean" ) - def test_cleaner_helm_name_override(self, streams_app: StreamsAppV3): + def test_cleaner_helm_name_override(self, streams_app: StreamsApp): assert ( streams_app._cleaner.to_helm_values()["nameOverride"] == STREAMS_APP_CLEAN_HELM_NAME_OVERRIDE ) def test_set_topics(self): - streams_app = StreamsAppV3( + streams_app = StreamsApp( name=STREAMS_APP_NAME, **{ "namespace": "test-namespace", @@ -177,7 +185,7 @@ def test_set_topics(self): assert "labeledInputPatterns" in kafka_config def test_no_empty_input_topic(self): - streams_app = StreamsAppV3( + streams_app = StreamsApp( name=STREAMS_APP_NAME, **{ "namespace": "test-namespace", @@ -208,7 +216,7 @@ def test_should_validate(self): with pytest.raises( ValueError, match="Define label only if `type` is `pattern` or `None`" ): - StreamsAppV3( + StreamsApp( name=STREAMS_APP_NAME, **{ "namespace": "test-namespace", @@ -230,7 +238,7 @@ def test_should_validate(self): with pytest.raises( ValueError, match="Define `label` only if `type` is undefined" ): - StreamsAppV3( + StreamsApp( name=STREAMS_APP_NAME, **{ "namespace": "test-namespace", @@ -249,7 +257,7 @@ def test_should_validate(self): ) def test_set_streams_output_from_to(self): - streams_app = StreamsAppV3( + streams_app = StreamsApp( name=STREAMS_APP_NAME, **{ "namespace": "test-namespace", @@ -288,7 +296,7 @@ def test_set_streams_output_from_to(self): ) def test_weave_inputs_from_prev_component(self): - streams_app = StreamsAppV3( + streams_app = StreamsApp( name=STREAMS_APP_NAME, **{ "namespace": "test-namespace", @@ -325,7 +333,7 @@ def test_weave_inputs_from_prev_component(self): @pytest.mark.asyncio() async def test_deploy_order_when_dry_run_is_false(self, mocker: MockerFixture): - streams_app = StreamsAppV3( + streams_app = StreamsApp( name=STREAMS_APP_NAME, **{ "namespace": "test-namespace", @@ -427,7 +435,7 @@ async def test_deploy_order_when_dry_run_is_false(self, mocker: MockerFixture): ca_file=None, insecure_skip_tls_verify=False, timeout="5m0s", - version="3.0.0", + version="3.0.1", wait=True, wait_for_jobs=False, ), @@ -437,7 +445,7 @@ async def test_deploy_order_when_dry_run_is_false(self, mocker: MockerFixture): @pytest.mark.asyncio() async def test_destroy( self, - streams_app: StreamsAppV3, + streams_app: StreamsApp, mocker: MockerFixture, ): mock_helm_uninstall = mocker.patch.object(streams_app.helm, "uninstall") @@ -451,7 +459,7 @@ async def test_destroy( @pytest.mark.asyncio() async def test_reset_when_dry_run_is_false( self, - streams_app: StreamsAppV3, + streams_app: StreamsApp, empty_helm_get_values: MockerFixture, mocker: MockerFixture, ): @@ -504,7 +512,7 @@ async def test_reset_when_dry_run_is_false( }, }, HelmUpgradeInstallFlags( - version="3.0.0", wait=True, wait_for_jobs=True + version="3.0.1", wait=True, wait_for_jobs=True ), ), mocker.call.helm_uninstall( @@ -518,7 +526,7 @@ async def test_reset_when_dry_run_is_false( @pytest.mark.asyncio() async def test_should_clean_streams_app_and_deploy_clean_up_job_and_delete_clean_up( self, - streams_app: StreamsAppV3, + streams_app: StreamsApp, empty_helm_get_values: MockerFixture, mocker: MockerFixture, ): @@ -570,7 +578,7 @@ async def test_should_clean_streams_app_and_deploy_clean_up_job_and_delete_clean }, }, HelmUpgradeInstallFlags( - version="3.0.0", wait=True, wait_for_jobs=True + version="3.0.1", wait=True, wait_for_jobs=True ), ), mocker.call.helm_uninstall( @@ -604,7 +612,7 @@ async def test_should_deploy_clean_up_job_with_values_in_cluster_when_reset( }, }, ) - streams_app = StreamsAppV3( + streams_app = StreamsApp( name=STREAMS_APP_NAME, **{ "namespace": "test-namespace", @@ -657,7 +665,7 @@ async def test_should_deploy_clean_up_job_with_values_in_cluster_when_reset( "schemaRegistryUrl": "http://localhost:8081", }, }, - HelmUpgradeInstallFlags(version="3.0.0", wait=True, wait_for_jobs=True), + HelmUpgradeInstallFlags(version="3.0.1", wait=True, wait_for_jobs=True), ) @pytest.mark.asyncio() @@ -683,7 +691,7 @@ async def test_should_deploy_clean_up_job_with_values_in_cluster_when_clean( }, }, ) - streams_app = StreamsAppV3( + streams_app = StreamsApp( name=STREAMS_APP_NAME, **{ "namespace": "test-namespace", @@ -736,12 +744,12 @@ async def test_should_deploy_clean_up_job_with_values_in_cluster_when_clean( "schemaRegistryUrl": "http://localhost:8081", }, }, - HelmUpgradeInstallFlags(version="3.0.0", wait=True, wait_for_jobs=True), + HelmUpgradeInstallFlags(version="3.0.1", wait=True, wait_for_jobs=True), ) @pytest.mark.asyncio() async def test_get_input_output_topics(self): - streams_app = StreamsAppV3( + streams_app = StreamsApp( name="my-app", **{ "namespace": "test-namespace", @@ -798,11 +806,60 @@ async def test_get_input_output_topics(self): KafkaTopic(name="topic-extra"), ] + @pytest.fixture() + def pvc1(self) -> PersistentVolumeClaim: + return PersistentVolumeClaim( + apiVersion="v1", + kind="PersistentVolumeClaim", + metadata=ObjectMeta(name="test-pvc1"), + spec=PersistentVolumeClaimSpec(), + status=PersistentVolumeClaimStatus(), + ) + + @pytest.fixture() + def pvc2(self) -> PersistentVolumeClaim: + return PersistentVolumeClaim( + apiVersion="v1", + kind="PersistentVolumeClaim", + metadata=ObjectMeta(name="test-pvc2"), + spec=PersistentVolumeClaimSpec(), + status=PersistentVolumeClaimStatus(), + ) + + @pytest.fixture() + def pvc3(self) -> PersistentVolumeClaim: + return PersistentVolumeClaim( + apiVersion="v1", + kind="PersistentVolumeClaim", + metadata=ObjectMeta(name="test-pvc3"), + spec=PersistentVolumeClaimSpec(), + status=PersistentVolumeClaimStatus(), + ) + + @pytest.fixture() + def mock_list_pvcs( + self, + mocker: MockerFixture, + pvc1: PersistentVolumeClaim, + pvc2: PersistentVolumeClaim, + pvc3: PersistentVolumeClaim, + ) -> MagicMock: + async def async_generator_side_effect() -> AsyncIterator[PersistentVolumeClaim]: + yield pvc1 + yield pvc2 + yield pvc3 + + return mocker.patch.object( + PVCHandler, "list_pvcs", side_effect=async_generator_side_effect + ) + @pytest.mark.asyncio() + @pytest.mark.usefixtures("kubeconfig") async def test_stateful_clean_with_dry_run_false( self, - stateful_streams_app: StreamsAppV3, + stateful_streams_app: StreamsApp, empty_helm_get_values: MockerFixture, + mock_list_pvcs: MagicMock, mocker: MockerFixture, ): # actual component @@ -815,14 +872,7 @@ async def test_stateful_clean_with_dry_run_false( mock_helm_upgrade_install = mocker.patch.object(cleaner.helm, "upgrade_install") mock_helm_uninstall = mocker.patch.object(cleaner.helm, "uninstall") - module = StreamsAppCleaner.__module__ - mock_pvc_handler_instance = AsyncMock() - mock_delete_pvcs = mock_pvc_handler_instance.delete_pvcs - mock_delete_pvcs.return_value = AsyncMock() - - mocker.patch( - f"{module}.PVCHandler.create", return_value=mock_pvc_handler_instance - ) + mock_delete_pvcs = mocker.patch.object(PVCHandler, "delete_pvcs") mock = MagicMock() mock.attach_mock(mock_helm_uninstall_streams_app, "helm_uninstall_streams_app") @@ -854,16 +904,16 @@ async def test_stateful_clean_with_dry_run_false( "test-namespace", { "nameOverride": STREAMS_APP_CLEAN_HELM_NAME_OVERRIDE, - "statefulSet": True, - "persistence": {"enabled": True, "size": "5Gi"}, "kafka": { "bootstrapServers": "fake-broker:9092", "outputTopic": "streams-app-output-topic", "deleteOutput": True, }, + "statefulSet": True, + "persistence": {"enabled": True, "size": "5Gi"}, }, HelmUpgradeInstallFlags( - version="3.0.0", wait=True, wait_for_jobs=True + version="3.0.1", wait=True, wait_for_jobs=True ), ), mocker.call.helm_uninstall( @@ -873,48 +923,119 @@ async def test_stateful_clean_with_dry_run_false( ), ANY, # __bool__ ANY, # __str__ - mocker.call.delete_pvcs(), + mocker.call.delete_pvcs(False), ] ) @pytest.mark.asyncio() + @pytest.mark.usefixtures("kubeconfig") async def test_stateful_clean_with_dry_run_true( self, - stateful_streams_app: StreamsAppV3, + stateful_streams_app: StreamsApp, empty_helm_get_values: MockerFixture, mocker: MockerFixture, + mock_list_pvcs: MagicMock, caplog: pytest.LogCaptureFixture, ): - caplog.set_level(logging.INFO) + caplog.set_level(logging.DEBUG) # actual component mocker.patch.object(stateful_streams_app, "destroy") cleaner = stateful_streams_app._cleaner assert isinstance(cleaner, StreamsAppCleaner) - pvc_names = ["test-pvc1", "test-pvc2", "test-pvc3"] - - mock_pvc_handler_instance = AsyncMock() - mock_list_pvcs = mock_pvc_handler_instance.list_pvcs - mock_list_pvcs.return_value = pvc_names - - module = StreamsAppCleaner.__module__ - pvc_handler_create = mocker.patch( - f"{module}.PVCHandler.create", return_value=mock_pvc_handler_instance - ) mocker.patch.object(cleaner, "destroy") mocker.patch.object(cleaner, "deploy") - mocker.patch.object(mock_list_pvcs, "list_pvcs") dry_run = True await stateful_streams_app.clean(dry_run=dry_run) - pvc_handler_create.assert_called_once_with( - STREAMS_APP_FULL_NAME, "test-namespace" + mock_list_pvcs.assert_called_once() + assert ( + f"Deleting in namespace 'test-namespace' StatefulSet '{STREAMS_APP_FULL_NAME}' PVCs ['test-pvc1', 'test-pvc2', 'test-pvc3']" + in caplog.text ) - mock_list_pvcs.assert_called_once() + @pytest.mark.asyncio() + async def test_clean_should_fall_back_to_local_values_when_validation_of_cluster_values_fails( + self, + mocker: MockerFixture, + caplog: pytest.LogCaptureFixture, + ): + caplog.set_level(logging.WARNING) + + # invalid model + mocker.patch.object( + Helm, + "get_values", + return_value={ + "image": "registry/producer-app", + "imageTag": "1.1.1", + "nameOverride": STREAMS_APP_NAME, + "streams": { + "brokers": "fake-broker:9092", + "inputTopics": ["test-input-topic"], + "outputTopic": "streams-app-output-topic", + "schemaRegistryUrl": "http://localhost:8081", + }, + }, + ) + + streams_app = StreamsApp( + name=STREAMS_APP_NAME, + **{ + "namespace": "test-namespace", + "values": { + "image": "registry/streams-app", + "imageTag": "2.2.2", + "kafka": {"bootstrapServers": "fake-broker:9092"}, + }, + "from": { + "topics": { + "test-input-topic": {"type": "input"}, + } + }, + "to": { + "topics": { + "streams-app-output-topic": {"type": "output"}, + } + }, + }, + ) + + mocker.patch.object(streams_app.helm, "uninstall") + + mock_helm_upgrade_install = mocker.patch.object( + streams_app._cleaner.helm, "upgrade_install" + ) + mocker.patch.object(streams_app._cleaner.helm, "uninstall") + + mock = mocker.MagicMock() + mock.attach_mock(mock_helm_upgrade_install, "helm_upgrade_install") + + dry_run = False + await streams_app.clean(dry_run=dry_run) + assert ( - f"Deleting the PVCs {pvc_names} for StatefulSet '{STREAMS_APP_FULL_NAME}'" + "The values in the cluster are invalid with the current model. Falling back to the enriched values of pipeline.yaml and defaults.yaml" in caplog.text ) + + mock_helm_upgrade_install.assert_called_once_with( + STREAMS_APP_CLEAN_RELEASE_NAME, + "bakdata-streams-bootstrap/streams-app-cleanup-job", + dry_run, + "test-namespace", + { + "image": "registry/streams-app", + "nameOverride": STREAMS_APP_CLEAN_HELM_NAME_OVERRIDE, + "imageTag": "2.2.2", + "kafka": { + "bootstrapServers": "fake-broker:9092", + "inputTopics": ["test-input-topic"], + "outputTopic": "streams-app-output-topic", + "deleteOutput": True, + }, + }, + HelmUpgradeInstallFlags(version="3.0.1", wait=True, wait_for_jobs=True), + ) diff --git a/tests/components/streams_bootstrap_v3/test_streams_bootstrap.py b/tests/components/streams_bootstrap/test_streams_bootstrap.py similarity index 84% rename from tests/components/streams_bootstrap_v3/test_streams_bootstrap.py rename to tests/components/streams_bootstrap/test_streams_bootstrap.py index cdb892119..a6533d5b6 100644 --- a/tests/components/streams_bootstrap_v3/test_streams_bootstrap.py +++ b/tests/components/streams_bootstrap/test_streams_bootstrap.py @@ -9,14 +9,14 @@ HelmUpgradeInstallFlags, ) from kpops.component_handlers.helm_wrapper.utils import create_helm_release_name -from kpops.components.streams_bootstrap_v3.base import StreamsBootstrapV3 -from kpops.components.streams_bootstrap_v3.model import StreamsBootstrapV3Values +from kpops.components.streams_bootstrap.base import StreamsBootstrap +from kpops.components.streams_bootstrap.model import StreamsBootstrapValues @pytest.mark.usefixtures("mock_env") class TestStreamsBootstrap: def test_default_configs(self): - streams_bootstrap = StreamsBootstrapV3( + streams_bootstrap = StreamsBootstrap( name="example-name", **{ "namespace": "test-namespace", @@ -31,13 +31,13 @@ def test_default_configs(self): repository_name="bakdata-streams-bootstrap", url="https://bakdata.github.io/streams-bootstrap/", ) - assert streams_bootstrap.version == "3.0.0" + assert streams_bootstrap.version == "3.0.1" assert streams_bootstrap.namespace == "test-namespace" assert streams_bootstrap.values.image_tag == "latest" @pytest.mark.asyncio() async def test_should_deploy_streams_bootstrap_app(self, mocker: MockerFixture): - streams_bootstrap = StreamsBootstrapV3( + streams_bootstrap = StreamsBootstrap( name="example-name", **{ "namespace": "test-namespace", @@ -58,7 +58,7 @@ async def test_should_deploy_streams_bootstrap_app(self, mocker: MockerFixture): streams_bootstrap.dry_run_handler, "print_helm_diff" ) mocker.patch.object( - StreamsBootstrapV3, + StreamsBootstrap, "helm_chart", return_value="test/test-chart", new_callable=mocker.PropertyMock, @@ -88,10 +88,10 @@ async def test_should_raise_validation_error_for_invalid_image_tag(self): with pytest.raises( ValidationError, match=re.escape( - "1 validation error for StreamsBootstrapV3Values\nimageTag\n String should match pattern '^[a-zA-Z0-9_][a-zA-Z0-9._-]{0,127}$'" + "1 validation error for StreamsBootstrapValues\nimageTag\n String should match pattern '^[a-zA-Z0-9_][a-zA-Z0-9._-]{0,127}$'" ), ): - StreamsBootstrapV3Values( + StreamsBootstrapValues( **{ "imageTag": "invalid image tag!", "kafka": { @@ -105,10 +105,10 @@ async def test_should_raise_validation_error_for_invalid_helm_chart_version(self with pytest.raises( ValueError, match=re.escape( - "When using the streams-bootstrap v3 component your version ('2.1.0') must be at least 3.0.0." + "When using the streams-bootstrap component your version ('2.1.0') must be at least 3.0.1." ), ): - StreamsBootstrapV3( + StreamsBootstrap( name="example-name", **{ "namespace": "test-namespace", diff --git a/tests/components/streams_bootstrap_v2/__init__.py b/tests/components/streams_bootstrap_v2/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/components/test_producer_app.py b/tests/components/streams_bootstrap_v2/test_producer_app.py similarity index 97% rename from tests/components/test_producer_app.py rename to tests/components/streams_bootstrap_v2/test_producer_app.py index aa30177c7..9a8fcbb77 100644 --- a/tests/components/test_producer_app.py +++ b/tests/components/streams_bootstrap_v2/test_producer_app.py @@ -13,9 +13,9 @@ OutputTopicTypes, TopicConfig, ) -from kpops.components.streams_bootstrap.producer.producer_app import ( - ProducerApp, +from kpops.components.streams_bootstrap_v2.producer.producer_app import ( ProducerAppCleaner, + ProducerAppV2, ) PRODUCER_APP_NAME = "test-producer-app-with-long-name-0123456789abcdefghijklmnop" @@ -39,8 +39,8 @@ def test_release_name(self): assert PRODUCER_APP_CLEAN_RELEASE_NAME.endswith("-clean") @pytest.fixture() - def producer_app(self) -> ProducerApp: - return ProducerApp( + def producer_app(self) -> ProducerAppV2: + return ProducerAppV2( name=PRODUCER_APP_NAME, **{ "version": "2.4.2", @@ -63,28 +63,28 @@ def producer_app(self) -> ProducerApp: def empty_helm_get_values(self, mocker: MockerFixture) -> MagicMock: return mocker.patch.object(Helm, "get_values", return_value=None) - def test_cleaner(self, producer_app: ProducerApp): + def test_cleaner(self, producer_app: ProducerAppV2): cleaner = producer_app._cleaner assert isinstance(cleaner, ProducerAppCleaner) assert not hasattr(cleaner, "_cleaner") - def test_cleaner_inheritance(self, producer_app: ProducerApp): + def test_cleaner_inheritance(self, producer_app: ProducerAppV2): assert producer_app._cleaner.values == producer_app.values - def test_cleaner_helm_release_name(self, producer_app: ProducerApp): + def test_cleaner_helm_release_name(self, producer_app: ProducerAppV2): assert ( producer_app._cleaner.helm_release_name == "${pipeline.name}-test-producer-app-with-l-abc43-clean" ) - def test_cleaner_helm_name_override(self, producer_app: ProducerApp): + def test_cleaner_helm_name_override(self, producer_app: ProducerAppV2): assert ( producer_app._cleaner.to_helm_values()["nameOverride"] == PRODUCER_APP_CLEAN_HELM_NAMEOVERRIDE ) def test_output_topics(self): - producer_app = ProducerApp( + producer_app = ProducerAppV2( name=PRODUCER_APP_NAME, **{ "namespace": "test-namespace", @@ -116,7 +116,7 @@ def test_output_topics(self): @pytest.mark.asyncio() async def test_deploy_order_when_dry_run_is_false( self, - producer_app: ProducerApp, + producer_app: ProducerAppV2, mocker: MockerFixture, ): mock_create_topic = mocker.patch.object( @@ -167,7 +167,7 @@ async def test_deploy_order_when_dry_run_is_false( @pytest.mark.asyncio() async def test_destroy( self, - producer_app: ProducerApp, + producer_app: ProducerAppV2, mocker: MockerFixture, ): mock_helm_uninstall = mocker.patch.object(producer_app.helm, "uninstall") @@ -181,7 +181,7 @@ async def test_destroy( @pytest.mark.asyncio() async def test_should_clean_producer_app( self, - producer_app: ProducerApp, + producer_app: ProducerAppV2, empty_helm_get_values: MockerFixture, mocker: MockerFixture, ): @@ -260,7 +260,7 @@ async def test_should_clean_producer_app( async def test_should_clean_producer_app_and_deploy_clean_up_job_and_delete_clean_up_with_dry_run_false( self, mocker: MockerFixture, - producer_app: ProducerApp, + producer_app: ProducerAppV2, empty_helm_get_values: MockerFixture, ): # actual component @@ -326,7 +326,7 @@ async def test_should_clean_producer_app_and_deploy_clean_up_job_and_delete_clea ) def test_get_output_topics(self): - producer_app = ProducerApp( + producer_app = ProducerAppV2( name="my-producer", **{ "namespace": "test-namespace", @@ -378,7 +378,7 @@ async def test_should_not_deploy_clean_up_when_rest(self, mocker: MockerFixture) }, }, ) - producer_app = ProducerApp( + producer_app = ProducerAppV2( name=PRODUCER_APP_NAME, **{ "namespace": "test-namespace", @@ -428,7 +428,7 @@ async def test_should_deploy_clean_up_job_with_values_in_cluster_when_clean( }, }, ) - producer_app = ProducerApp( + producer_app = ProducerAppV2( name=PRODUCER_APP_NAME, **{ "namespace": "test-namespace", @@ -496,7 +496,7 @@ async def test_clean_should_fall_back_to_local_values_when_validation_of_cluster ) # user defined model - producer_app = ProducerApp( + producer_app = ProducerAppV2( name=PRODUCER_APP_NAME, **{ "namespace": "test-namespace", diff --git a/tests/components/test_streams_app.py b/tests/components/streams_bootstrap_v2/test_streams_app.py similarity index 97% rename from tests/components/test_streams_app.py rename to tests/components/streams_bootstrap_v2/test_streams_app.py index 393564a07..d86561e5f 100644 --- a/tests/components/test_streams_app.py +++ b/tests/components/streams_bootstrap_v2/test_streams_app.py @@ -27,13 +27,13 @@ OutputTopicTypes, TopicConfig, ) -from kpops.components.streams_bootstrap.streams.model import ( +from kpops.components.streams_bootstrap_v2.streams.model import ( PersistenceConfig, StreamsAppAutoScaling, ) -from kpops.components.streams_bootstrap.streams.streams_app import ( - StreamsApp, +from kpops.components.streams_bootstrap_v2.streams.streams_app import ( StreamsAppCleaner, + StreamsAppV2, ) RESOURCES_PATH = Path(__file__).parent / "resources" @@ -61,8 +61,8 @@ def test_release_name(self): assert STREAMS_APP_CLEAN_RELEASE_NAME.endswith("-clean") @pytest.fixture() - def streams_app(self) -> StreamsApp: - return StreamsApp( + def streams_app(self) -> StreamsAppV2: + return StreamsAppV2( name=STREAMS_APP_NAME, **{ "namespace": "test-namespace", @@ -80,8 +80,8 @@ def streams_app(self) -> StreamsApp: ) @pytest.fixture() - def stateful_streams_app(self) -> StreamsApp: - return StreamsApp( + def stateful_streams_app(self) -> StreamsAppV2: + return StreamsAppV2( name=STREAMS_APP_NAME, **{ "namespace": "test-namespace", @@ -106,12 +106,12 @@ def stateful_streams_app(self) -> StreamsApp: def empty_helm_get_values(self, mocker: MockerFixture) -> MagicMock: return mocker.patch.object(Helm, "get_values", return_value=None) - def test_cleaner(self, streams_app: StreamsApp): + def test_cleaner(self, streams_app: StreamsAppV2): cleaner = streams_app._cleaner assert isinstance(cleaner, StreamsAppCleaner) assert not hasattr(cleaner, "_cleaner") - def test_cleaner_inheritance(self, streams_app: StreamsApp): + def test_cleaner_inheritance(self, streams_app: StreamsAppV2): streams_app.values.autoscaling = StreamsAppAutoScaling( enabled=True, consumer_group="foo", @@ -121,7 +121,7 @@ def test_cleaner_inheritance(self, streams_app: StreamsApp): assert streams_app._cleaner.values == streams_app.values def test_raise_validation_error_when_autoscaling_enabled_and_mandatory_fields_not_set( - self, streams_app: StreamsApp + self, streams_app: StreamsAppV2 ): with pytest.raises(ValidationError) as error: streams_app.values.autoscaling = StreamsAppAutoScaling( @@ -134,7 +134,7 @@ def test_raise_validation_error_when_autoscaling_enabled_and_mandatory_fields_no assert str(error.value) == msg def test_raise_validation_error_when_autoscaling_enabled_and_only_consumer_group_set( - self, streams_app: StreamsApp + self, streams_app: StreamsAppV2 ): with pytest.raises(ValidationError) as error: streams_app.values.autoscaling = StreamsAppAutoScaling( @@ -147,7 +147,7 @@ def test_raise_validation_error_when_autoscaling_enabled_and_only_consumer_group assert str(error.value) == msg def test_raise_validation_error_when_autoscaling_enabled_and_only_lag_threshold_is_set( - self, streams_app: StreamsApp + self, streams_app: StreamsAppV2 ): with pytest.raises(ValidationError) as error: streams_app.values.autoscaling = StreamsAppAutoScaling( @@ -159,20 +159,20 @@ def test_raise_validation_error_when_autoscaling_enabled_and_only_lag_threshold_ ) assert str(error.value) == msg - def test_cleaner_helm_release_name(self, streams_app: StreamsApp): + def test_cleaner_helm_release_name(self, streams_app: StreamsAppV2): assert ( streams_app._cleaner.helm_release_name == "${pipeline.name}-test-streams-app-with-lo-c98c5-clean" ) - def test_cleaner_helm_name_override(self, streams_app: StreamsApp): + def test_cleaner_helm_name_override(self, streams_app: StreamsAppV2): assert ( streams_app._cleaner.to_helm_values()["nameOverride"] == STREAMS_APP_CLEAN_HELM_NAME_OVERRIDE ) def test_set_topics(self): - streams_app = StreamsApp( + streams_app = StreamsAppV2( name=STREAMS_APP_NAME, **{ "namespace": "test-namespace", @@ -218,7 +218,7 @@ def test_set_topics(self): assert "extraInputPatterns" in streams_config def test_no_empty_input_topic(self): - streams_app = StreamsApp( + streams_app = StreamsAppV2( name=STREAMS_APP_NAME, **{ "namespace": "test-namespace", @@ -249,7 +249,7 @@ def test_should_validate(self): with pytest.raises( ValueError, match="Define label only if `type` is `pattern` or `None`" ): - StreamsApp( + StreamsAppV2( name=STREAMS_APP_NAME, **{ "namespace": "test-namespace", @@ -271,7 +271,7 @@ def test_should_validate(self): with pytest.raises( ValueError, match="Define `label` only if `type` is undefined" ): - StreamsApp( + StreamsAppV2( name=STREAMS_APP_NAME, **{ "namespace": "test-namespace", @@ -290,7 +290,7 @@ def test_should_validate(self): ) def test_set_streams_output_from_to(self): - streams_app = StreamsApp( + streams_app = StreamsAppV2( name=STREAMS_APP_NAME, **{ "namespace": "test-namespace", @@ -329,7 +329,7 @@ def test_set_streams_output_from_to(self): ) def test_weave_inputs_from_prev_component(self): - streams_app = StreamsApp( + streams_app = StreamsAppV2( name=STREAMS_APP_NAME, **{ "namespace": "test-namespace", @@ -366,7 +366,7 @@ def test_weave_inputs_from_prev_component(self): @pytest.mark.asyncio() async def test_deploy_order_when_dry_run_is_false(self, mocker: MockerFixture): - streams_app = StreamsApp( + streams_app = StreamsAppV2( name=STREAMS_APP_NAME, **{ "namespace": "test-namespace", @@ -478,7 +478,7 @@ async def test_deploy_order_when_dry_run_is_false(self, mocker: MockerFixture): @pytest.mark.asyncio() async def test_destroy( self, - streams_app: StreamsApp, + streams_app: StreamsAppV2, mocker: MockerFixture, ): mock_helm_uninstall = mocker.patch.object(streams_app.helm, "uninstall") @@ -492,7 +492,7 @@ async def test_destroy( @pytest.mark.asyncio() async def test_reset_when_dry_run_is_false( self, - streams_app: StreamsApp, + streams_app: StreamsAppV2, empty_helm_get_values: MockerFixture, mocker: MockerFixture, ): @@ -559,7 +559,7 @@ async def test_reset_when_dry_run_is_false( @pytest.mark.asyncio() async def test_should_clean_streams_app_and_deploy_clean_up_job_and_delete_clean_up( self, - streams_app: StreamsApp, + streams_app: StreamsAppV2, empty_helm_get_values: MockerFixture, mocker: MockerFixture, ): @@ -645,7 +645,7 @@ async def test_should_deploy_clean_up_job_with_values_in_cluster_when_reset( }, }, ) - streams_app = StreamsApp( + streams_app = StreamsAppV2( name=STREAMS_APP_NAME, **{ "namespace": "test-namespace", @@ -724,7 +724,7 @@ async def test_should_deploy_clean_up_job_with_values_in_cluster_when_clean( }, }, ) - streams_app = StreamsApp( + streams_app = StreamsAppV2( name=STREAMS_APP_NAME, **{ "namespace": "test-namespace", @@ -782,7 +782,7 @@ async def test_should_deploy_clean_up_job_with_values_in_cluster_when_clean( @pytest.mark.asyncio() async def test_get_input_output_topics(self): - streams_app = StreamsApp( + streams_app = StreamsAppV2( name="my-app", **{ "namespace": "test-namespace", @@ -840,7 +840,7 @@ async def test_get_input_output_topics(self): ] def test_raise_validation_error_when_persistence_enabled_and_size_not_set( - self, stateful_streams_app: StreamsApp + self, stateful_streams_app: StreamsAppV2 ): with pytest.raises(ValidationError) as error: stateful_streams_app.values.persistence = PersistenceConfig( @@ -903,7 +903,7 @@ async def async_generator_side_effect() -> AsyncIterator[PersistentVolumeClaim]: @pytest.mark.usefixtures("kubeconfig") async def test_stateful_clean_with_dry_run_false( self, - stateful_streams_app: StreamsApp, + stateful_streams_app: StreamsAppV2, empty_helm_get_values: MockerFixture, mock_list_pvcs: MagicMock, mocker: MockerFixture, @@ -977,7 +977,7 @@ async def test_stateful_clean_with_dry_run_false( @pytest.mark.usefixtures("kubeconfig") async def test_stateful_clean_with_dry_run_true( self, - stateful_streams_app: StreamsApp, + stateful_streams_app: StreamsAppV2, empty_helm_get_values: MockerFixture, mocker: MockerFixture, mock_list_pvcs: MagicMock, @@ -1027,7 +1027,7 @@ async def test_clean_should_fall_back_to_local_values_when_validation_of_cluster }, ) - streams_app = StreamsApp( + streams_app = StreamsAppV2( name=STREAMS_APP_NAME, **{ "namespace": "test-namespace", diff --git a/tests/components/test_streams_bootstrap.py b/tests/components/streams_bootstrap_v2/test_streams_bootstrap.py similarity index 86% rename from tests/components/test_streams_bootstrap.py rename to tests/components/streams_bootstrap_v2/test_streams_bootstrap.py index 390ff3228..ecd475f2c 100644 --- a/tests/components/test_streams_bootstrap.py +++ b/tests/components/streams_bootstrap_v2/test_streams_bootstrap.py @@ -9,16 +9,14 @@ HelmUpgradeInstallFlags, ) from kpops.component_handlers.helm_wrapper.utils import create_helm_release_name -from kpops.components.common.streams_bootstrap import ( - StreamsBootstrap, - StreamsBootstrapValues, -) +from kpops.components.streams_bootstrap_v2 import StreamsBootstrapV2 +from kpops.components.streams_bootstrap_v2.base import StreamsBootstrapV2Values @pytest.mark.usefixtures("mock_env") class TestStreamsBootstrap: def test_default_configs(self): - streams_bootstrap = StreamsBootstrap( + streams_bootstrap = StreamsBootstrapV2( name="example-name", **{ "namespace": "test-namespace", @@ -39,7 +37,7 @@ def test_default_configs(self): @pytest.mark.asyncio() async def test_should_deploy_streams_bootstrap_app(self, mocker: MockerFixture): - streams_bootstrap = StreamsBootstrap( + streams_bootstrap = StreamsBootstrapV2( name="example-name", **{ "namespace": "test-namespace", @@ -60,7 +58,7 @@ async def test_should_deploy_streams_bootstrap_app(self, mocker: MockerFixture): streams_bootstrap.dry_run_handler, "print_helm_diff" ) mocker.patch.object( - StreamsBootstrap, + StreamsBootstrapV2, "helm_chart", return_value="test/test-chart", new_callable=mocker.PropertyMock, @@ -90,10 +88,10 @@ async def test_should_raise_validation_error_for_invalid_image_tag(self): with pytest.raises( ValidationError, match=re.escape( - "1 validation error for StreamsBootstrapValues\nimageTag\n String should match pattern '^[a-zA-Z0-9_][a-zA-Z0-9._-]{0,127}$'" + "1 validation error for StreamsBootstrapV2Values\nimageTag\n String should match pattern '^[a-zA-Z0-9_][a-zA-Z0-9._-]{0,127}$'" ), ): - StreamsBootstrapValues( + StreamsBootstrapV2Values( **{ "imageTag": "invalid image tag!", "streams": { diff --git a/tests/pipeline/resources/custom-config/defaults.yaml b/tests/pipeline/resources/custom-config/defaults.yaml index 40c754e0f..9e39b96f1 100644 --- a/tests/pipeline/resources/custom-config/defaults.yaml +++ b/tests/pipeline/resources/custom-config/defaults.yaml @@ -1,4 +1,4 @@ -kafka-app: +streams-bootstrap-v2: version: 2.9.0 namespace: development-namespace values: @@ -6,14 +6,14 @@ kafka-app: brokers: "${config.kafka_brokers}" schemaRegistryUrl: "${config.schema_registry.url}" -producer-app: +producer-app-v2: to: topics: ${output_topic_name}: type: output partitions_count: 3 -streams-app: +streams-app-v2: values: labels: pipeline: ${pipeline.name} diff --git a/tests/pipeline/resources/custom-config/pipeline.yaml b/tests/pipeline/resources/custom-config/pipeline.yaml index b50df4f7e..83c56f136 100644 --- a/tests/pipeline/resources/custom-config/pipeline.yaml +++ b/tests/pipeline/resources/custom-config/pipeline.yaml @@ -1,4 +1,4 @@ -- type: producer-app +- type: producer-app-v2 name: app1 values: resources: @@ -7,7 +7,7 @@ requests: memory: 2G -- type: streams-app +- type: streams-app-v2 name: app2 values: image: some-image diff --git a/tests/pipeline/resources/defaults.yaml b/tests/pipeline/resources/defaults.yaml index b0785a3e3..5aa3f9520 100644 --- a/tests/pipeline/resources/defaults.yaml +++ b/tests/pipeline/resources/defaults.yaml @@ -4,16 +4,16 @@ pipeline-component: kubernetes-app: namespace: example-namespace -streams-bootstrap: +streams-bootstrap-v2: values: streams: brokers: ${config.kafka_brokers} schemaRegistryUrl: ${config.schema_registry.url} version: "2.4.2" -producer-app: {} # inherits from kafka-app +producer-app-v2: {} # inherits from streams-bootstrap-v2 -streams-app: # inherits from kafka-app +streams-app-v2: # inherits from streams-bootstrap-v2 values: streams: config: diff --git a/tests/pipeline/resources/kafka-connect-sink-config/defaults.yaml b/tests/pipeline/resources/kafka-connect-sink-config/defaults.yaml index 4fba15fda..bdf22b02e 100644 --- a/tests/pipeline/resources/kafka-connect-sink-config/defaults.yaml +++ b/tests/pipeline/resources/kafka-connect-sink-config/defaults.yaml @@ -1,2 +1,2 @@ -streams-bootstrap: +streams-bootstrap-v2: version: 2.9.0 diff --git a/tests/pipeline/resources/kafka-connect-sink/pipeline.yaml b/tests/pipeline/resources/kafka-connect-sink/pipeline.yaml index 02fd8e087..e88be97e2 100644 --- a/tests/pipeline/resources/kafka-connect-sink/pipeline.yaml +++ b/tests/pipeline/resources/kafka-connect-sink/pipeline.yaml @@ -1,5 +1,5 @@ # Parse Connector topics from previous component output topic -- type: streams-app +- type: streams-app-v2 from: topics: example-topic: diff --git a/tests/pipeline/resources/name_prefix_concatenation/pipeline.yaml b/tests/pipeline/resources/name_prefix_concatenation/pipeline.yaml index f1e5f18c4..f3fd13980 100644 --- a/tests/pipeline/resources/name_prefix_concatenation/pipeline.yaml +++ b/tests/pipeline/resources/name_prefix_concatenation/pipeline.yaml @@ -1,4 +1,4 @@ -- type: streams-app +- type: streams-app-v2 name: my-streams-app prefix: my-fake-prefix- from: diff --git a/tests/pipeline/resources/no-input-topic-pipeline/pipeline.yaml b/tests/pipeline/resources/no-input-topic-pipeline/pipeline.yaml index eaea0a4c6..c4fe2dc71 100644 --- a/tests/pipeline/resources/no-input-topic-pipeline/pipeline.yaml +++ b/tests/pipeline/resources/no-input-topic-pipeline/pipeline.yaml @@ -1,4 +1,4 @@ -- type: streams-app +- type: streams-app-v2 name: app1 from: topics: @@ -16,7 +16,7 @@ topics: example-output: type: output -- type: streams-app +- type: streams-app-v2 name: app2 to: topics: diff --git a/tests/pipeline/resources/no-topics-defaults/defaults.yaml b/tests/pipeline/resources/no-topics-defaults/defaults.yaml index f933006ff..044a2af9e 100644 --- a/tests/pipeline/resources/no-topics-defaults/defaults.yaml +++ b/tests/pipeline/resources/no-topics-defaults/defaults.yaml @@ -1,17 +1,17 @@ -kafka-app: +streams-bootstrap-v2: values: streams: brokers: "${config.kafka_brokers}" schemaRegistryUrl: "${config.schema_registry.url}" -producer-app: +producer-app-v2: to: topics: ${output_topic_name}: type: output partitions_count: 3 -streams-app: +streams-app-v2: values: labels: pipeline: ${pipeline.name} diff --git a/tests/pipeline/resources/no-user-defined-components/pipeline.yaml b/tests/pipeline/resources/no-user-defined-components/pipeline.yaml index e8c53c87d..0ee4773fd 100644 --- a/tests/pipeline/resources/no-user-defined-components/pipeline.yaml +++ b/tests/pipeline/resources/no-user-defined-components/pipeline.yaml @@ -1,4 +1,4 @@ -- type: streams-app +- type: streams-app-v2 from: topics: example-topic: diff --git a/tests/pipeline/resources/parallel-pipeline/defaults.yaml b/tests/pipeline/resources/parallel-pipeline/defaults.yaml index 073e8e877..7fcd5c154 100644 --- a/tests/pipeline/resources/parallel-pipeline/defaults.yaml +++ b/tests/pipeline/resources/parallel-pipeline/defaults.yaml @@ -4,13 +4,13 @@ pipeline-component: kubernetes-app: namespace: ${NAMESPACE} -kafka-app: +streams-bootstrap-v2: values: streams: brokers: ${config.kafka_brokers} schemaRegistryUrl: ${config.schema_registry.url} -streams-app: +streams-app-v2: values: labels: pipeline: ${pipeline.name} diff --git a/tests/pipeline/resources/parallel-pipeline/pipeline.yaml b/tests/pipeline/resources/parallel-pipeline/pipeline.yaml index 688d86fb5..ae8785c1e 100644 --- a/tests/pipeline/resources/parallel-pipeline/pipeline.yaml +++ b/tests/pipeline/resources/parallel-pipeline/pipeline.yaml @@ -1,4 +1,4 @@ -- type: producer-app +- type: producer-app-v2 name: transaction-avro-producer-1 to: topics: @@ -6,7 +6,7 @@ type: output partitions_count: 3 -- type: producer-app +- type: producer-app-v2 name: transaction-avro-producer-2 to: topics: @@ -14,7 +14,7 @@ type: output partitions_count: 3 -- type: producer-app +- type: producer-app-v2 name: transaction-avro-producer-3 to: topics: @@ -22,13 +22,13 @@ type: output partitions_count: 3 -- type: streams-app +- type: streams-app-v2 name: transaction-joiner -- type: streams-app +- type: streams-app-v2 name: fraud-detector -- type: streams-app +- type: streams-app-v2 name: account-linker from: components: diff --git a/tests/pipeline/resources/pipeline-component-should-have-prefix/defaults.yaml b/tests/pipeline/resources/pipeline-component-should-have-prefix/defaults.yaml index 5d3fa4696..b122ecf40 100644 --- a/tests/pipeline/resources/pipeline-component-should-have-prefix/defaults.yaml +++ b/tests/pipeline/resources/pipeline-component-should-have-prefix/defaults.yaml @@ -4,7 +4,7 @@ pipeline-component: kubernetes-app: namespace: ${NAMESPACE} -kafka-app: +streams-bootstrap-v2: version: 2.9.0 values: streams: diff --git a/tests/pipeline/resources/pipeline-component-should-have-prefix/pipeline.yaml b/tests/pipeline/resources/pipeline-component-should-have-prefix/pipeline.yaml index 7dcdda440..8b4c558c3 100644 --- a/tests/pipeline/resources/pipeline-component-should-have-prefix/pipeline.yaml +++ b/tests/pipeline/resources/pipeline-component-should-have-prefix/pipeline.yaml @@ -1,4 +1,4 @@ -- type: producer-app +- type: producer-app-v2 name: account-producer values: replicaCount: 1 diff --git a/tests/pipeline/resources/pipeline-with-env-defaults/defaults.yaml b/tests/pipeline/resources/pipeline-with-env-defaults/defaults.yaml index c3b71231f..3bb6b7c8f 100644 --- a/tests/pipeline/resources/pipeline-with-env-defaults/defaults.yaml +++ b/tests/pipeline/resources/pipeline-with-env-defaults/defaults.yaml @@ -1,15 +1,16 @@ kubernetes-app: name: ${component.type} namespace: example-namespace -kafka-app: + +stream-bootstrap-v2: values: streams: brokers: "${config.kafka_brokers}" schemaRegistryUrl: "${config.schema_registry.url}" -producer-app: {} # inherits from kafka-app +producer-app-v2: {} # inherits from streams-bootstrap-v2 -streams-app: # inherits from kafka-app +streams-app-v2: # inherits from streams-bootstrap-v2 version: 2.9.0 values: streams: diff --git a/tests/pipeline/resources/pipeline-with-env-defaults/pipeline.yaml b/tests/pipeline/resources/pipeline-with-env-defaults/pipeline.yaml index 02fd8e087..e88be97e2 100644 --- a/tests/pipeline/resources/pipeline-with-env-defaults/pipeline.yaml +++ b/tests/pipeline/resources/pipeline-with-env-defaults/pipeline.yaml @@ -1,5 +1,5 @@ # Parse Connector topics from previous component output topic -- type: streams-app +- type: streams-app-v2 from: topics: example-topic: diff --git a/tests/pipeline/resources/pipeline-with-loop/defaults.yaml b/tests/pipeline/resources/pipeline-with-loop/defaults.yaml index cc9b4e585..c7da32bc8 100644 --- a/tests/pipeline/resources/pipeline-with-loop/defaults.yaml +++ b/tests/pipeline/resources/pipeline-with-loop/defaults.yaml @@ -7,13 +7,13 @@ kubernetes-app: kafka-connector: namespace: example-namespace -kafka-app: +streams-bootstrap-v2: values: streams: brokers: 127.0.0.1:9092 schemaRegistryUrl: 127.0.0.1:8081 -streams-app: +streams-app-v2: values: labels: pipeline: ${pipeline.name} diff --git a/tests/pipeline/resources/pipeline-with-loop/pipeline.yaml b/tests/pipeline/resources/pipeline-with-loop/pipeline.yaml index d60e4c36e..634293d69 100644 --- a/tests/pipeline/resources/pipeline-with-loop/pipeline.yaml +++ b/tests/pipeline/resources/pipeline-with-loop/pipeline.yaml @@ -1,4 +1,4 @@ -- type: producer-app +- type: producer-app-v2 name: app1 values: image: producer-image @@ -7,7 +7,7 @@ my-output-topic: type: output -- type: streams-app +- type: streams-app-v2 name: app2 values: image: app2-image @@ -20,7 +20,7 @@ my-app2-topic: type: output -- type: streams-app +- type: streams-app-v2 name: app3 values: image: app3-image diff --git a/tests/pipeline/resources/pipeline-with-paths/pipeline.yaml b/tests/pipeline/resources/pipeline-with-paths/pipeline.yaml index ba1bd0000..65a6eaab0 100644 --- a/tests/pipeline/resources/pipeline-with-paths/pipeline.yaml +++ b/tests/pipeline/resources/pipeline-with-paths/pipeline.yaml @@ -1,4 +1,4 @@ -- type: producer-app +- type: producer-app-v2 name: account-producer namespace: test values: diff --git a/tests/pipeline/resources/pipeline-with-short-topics/defaults.yaml b/tests/pipeline/resources/pipeline-with-short-topics/defaults.yaml index df6d08c74..f93f0ca40 100644 --- a/tests/pipeline/resources/pipeline-with-short-topics/defaults.yaml +++ b/tests/pipeline/resources/pipeline-with-short-topics/defaults.yaml @@ -2,21 +2,21 @@ kubernetes-app: name: "${component_type}" namespace: example-namespace -kafka-app: +streams-bootstrap-v2: values: streams: brokers: "${config.kafka_brokers}" schema_registry_url: "${config.schema_registry.url}" version: "2.4.2" -producer-app: +producer-app-v2: to: topics: ${output_topic_name}: partitions_count: 3 -streams-app: # inherits from kafka-app +streams-app-v2: # inherits from streams-bootstrap-v2 values: streams: brokers: test diff --git a/tests/pipeline/resources/pipeline-with-short-topics/pipeline.yaml b/tests/pipeline/resources/pipeline-with-short-topics/pipeline.yaml index 3b2a4b0a5..c63916918 100644 --- a/tests/pipeline/resources/pipeline-with-short-topics/pipeline.yaml +++ b/tests/pipeline/resources/pipeline-with-short-topics/pipeline.yaml @@ -1,16 +1,16 @@ -- type: producer-app +- type: producer-app-v2 name: component-input -- type: producer-app +- type: producer-app-v2 name: component-extra -- type: producer-app +- type: producer-app-v2 name: component-input-pattern -- type: producer-app +- type: producer-app-v2 name: component-extra-pattern -- type: streams-app +- type: streams-app-v2 name: simple-app from: components: diff --git a/tests/pipeline/resources/pipelines-with-graphs/same-topic-and-component-name/defaults.yaml b/tests/pipeline/resources/pipelines-with-graphs/same-topic-and-component-name/defaults.yaml index f45f4ef37..4369bcadb 100644 --- a/tests/pipeline/resources/pipelines-with-graphs/same-topic-and-component-name/defaults.yaml +++ b/tests/pipeline/resources/pipelines-with-graphs/same-topic-and-component-name/defaults.yaml @@ -7,13 +7,13 @@ kubernetes-app: kafka-connector: namespace: example-namespace -kafka-app: +streams-bootstrap-v2: values: streams: brokers: 127.0.0.1:9092 schemaRegistryUrl: 127.0.0.1:8081 -streams-app: +streams-app-v2: values: labels: pipeline: ${pipeline.name} diff --git a/tests/pipeline/resources/pipelines-with-graphs/same-topic-and-component-name/pipeline.yaml b/tests/pipeline/resources/pipelines-with-graphs/same-topic-and-component-name/pipeline.yaml index 1c6511d79..7c3b8ecaa 100644 --- a/tests/pipeline/resources/pipelines-with-graphs/same-topic-and-component-name/pipeline.yaml +++ b/tests/pipeline/resources/pipelines-with-graphs/same-topic-and-component-name/pipeline.yaml @@ -1,4 +1,4 @@ -- type: streams-app +- type: streams-app-v2 name: app2-processor values: image: some-image diff --git a/tests/pipeline/resources/pipelines-with-graphs/simple-pipeline/defaults.yaml b/tests/pipeline/resources/pipelines-with-graphs/simple-pipeline/defaults.yaml index 8fbb3ea39..0a8fb4d11 100644 --- a/tests/pipeline/resources/pipelines-with-graphs/simple-pipeline/defaults.yaml +++ b/tests/pipeline/resources/pipelines-with-graphs/simple-pipeline/defaults.yaml @@ -7,19 +7,19 @@ kubernetes-app: kafka-connector: namespace: example-namespace -kafka-app: +streams-bootstrap-v2: values: streams: brokers: 127.0.0.1:9092 schemaRegistryUrl: 127.0.0.1:8081 -streams-app: +streams-app-v2: values: labels: pipeline: ${pipeline.name} -producer-app: +producer-app-v2: to: topics: ${output_topic_name}: diff --git a/tests/pipeline/resources/pipelines-with-graphs/simple-pipeline/pipeline.yaml b/tests/pipeline/resources/pipelines-with-graphs/simple-pipeline/pipeline.yaml index 03d9bee6d..ffb0f8c3f 100644 --- a/tests/pipeline/resources/pipelines-with-graphs/simple-pipeline/pipeline.yaml +++ b/tests/pipeline/resources/pipelines-with-graphs/simple-pipeline/pipeline.yaml @@ -1,6 +1,6 @@ -- type: producer-app +- type: producer-app-v2 name: app1 -- type: streams-app +- type: streams-app-v2 name: app2 diff --git a/tests/pipeline/resources/read-from-component/pipeline.yaml b/tests/pipeline/resources/read-from-component/pipeline.yaml index cc6bf72c7..fab7729d7 100644 --- a/tests/pipeline/resources/read-from-component/pipeline.yaml +++ b/tests/pipeline/resources/read-from-component/pipeline.yaml @@ -1,11 +1,11 @@ -- type: producer-app +- type: producer-app-v2 name: producer1 to: topics: ${output_topic_name}: type: output -- type: producer-app +- type: producer-app-v2 name: producer2 prefix: "" to: @@ -20,7 +20,7 @@ name: inflate-step-without-prefix prefix: "" -- type: streams-app +- type: streams-app-v2 name: consumer1 from: components: @@ -31,7 +31,7 @@ ${output_topic_name}: type: output -- type: streams-app +- type: streams-app-v2 name: consumer2 from: components: @@ -40,7 +40,7 @@ consumer1: type: input -- type: streams-app +- type: streams-app-v2 name: consumer3 from: topics: @@ -50,14 +50,14 @@ producer2: type: input -- type: streams-app +- type: streams-app-v2 name: consumer4 from: components: inflate-step: type: input -- type: streams-app +- type: streams-app-v2 name: consumer5 from: components: diff --git a/tests/pipeline/resources/simple-pipeline/pipeline.yaml b/tests/pipeline/resources/simple-pipeline/pipeline.yaml index f78d0c385..d523398ed 100644 --- a/tests/pipeline/resources/simple-pipeline/pipeline.yaml +++ b/tests/pipeline/resources/simple-pipeline/pipeline.yaml @@ -1,6 +1,6 @@ -- type: producer-app +- type: producer-app-v2 -- type: streams-app +- type: streams-app-v2 - type: helm-app values: {} diff --git a/tests/pipeline/resources/streams-bootstrap-v3/defaults.yaml b/tests/pipeline/resources/streams-bootstrap/defaults.yaml similarity index 66% rename from tests/pipeline/resources/streams-bootstrap-v3/defaults.yaml rename to tests/pipeline/resources/streams-bootstrap/defaults.yaml index 7dc1f296a..e5ed3a48a 100644 --- a/tests/pipeline/resources/streams-bootstrap-v3/defaults.yaml +++ b/tests/pipeline/resources/streams-bootstrap/defaults.yaml @@ -1,14 +1,13 @@ -streams-bootstrap-v3: +streams-bootstrap: values: kafka: bootstrapServers: ${config.kafka_brokers} schemaRegistryUrl: ${config.schema_registry.url} - # TODO: change to stable version after the PR is merged - version: "3.0.0-SNAPSHOT" + version: "3.0.1" -producer-app-v3: {} # inherits from streams-bootstrap-v3 +producer-app: {} # inherits from streams-bootstrap -streams-app-v3: # inherits from streams-bootstrap-v3 +streams-app: # inherits from streams-bootstrap values: kafka: config: diff --git a/tests/pipeline/resources/streams-bootstrap-v3/pipeline.yaml b/tests/pipeline/resources/streams-bootstrap/pipeline.yaml similarity index 100% rename from tests/pipeline/resources/streams-bootstrap-v3/pipeline.yaml rename to tests/pipeline/resources/streams-bootstrap/pipeline.yaml diff --git a/tests/pipeline/resources/temp-trim-release-name/defaults.yaml b/tests/pipeline/resources/temp-trim-release-name/defaults.yaml index f777b35e9..08760ac5c 100644 --- a/tests/pipeline/resources/temp-trim-release-name/defaults.yaml +++ b/tests/pipeline/resources/temp-trim-release-name/defaults.yaml @@ -1,14 +1,14 @@ kubernetes-app: namespace: example-namespace -kafka-app: +streams-bootstrap-v2: values: streams: brokers: "${config.kafka_brokers}" schema_registry_url: "${schema_registry_url}" version: "2.4.2" -streams-app: # inherits from kafka-app +streams-app-v2: # inherits from streams-bootstrap-v2 values: streams: config: diff --git a/tests/pipeline/resources/temp-trim-release-name/pipeline.yaml b/tests/pipeline/resources/temp-trim-release-name/pipeline.yaml index 4507ff2d3..4ce0dd534 100644 --- a/tests/pipeline/resources/temp-trim-release-name/pipeline.yaml +++ b/tests/pipeline/resources/temp-trim-release-name/pipeline.yaml @@ -1,4 +1,4 @@ -- type: streams-app +- type: streams-app-v2 name: in-order-to-have-len-fifty-two-name-should-end--here values: streams: diff --git a/tests/pipeline/snapshots/test_example/test_generate/atm-fraud/pipeline.yaml b/tests/pipeline/snapshots/test_example/test_generate/atm-fraud/pipeline.yaml index 99a7e2cb4..78c492b66 100644 --- a/tests/pipeline/snapshots/test_example/test_generate/atm-fraud/pipeline.yaml +++ b/tests/pipeline/snapshots/test_example/test_generate/atm-fraud/pipeline.yaml @@ -42,7 +42,7 @@ atm-fraud-account-producer-topic: configs: {} partitions_count: 3 - type: producer-app + type: producer-app-v2 values: image: ${DOCKER_REGISTRY}/atm-demo-accountproducer imageTag: 1.0.0 @@ -107,7 +107,7 @@ atm-fraud-transaction-avro-producer-topic: configs: {} partitions_count: 3 - type: producer-app + type: producer-app-v2 values: commandLine: ITERATION: 20 @@ -184,7 +184,7 @@ atm-fraud-transaction-joiner-topic: configs: {} partitions_count: 3 - type: streams-app + type: streams-app-v2 values: annotations: consumerGroup: atm-transactionjoiner-atm-fraud-joinedtransactions-topic @@ -266,7 +266,7 @@ atm-fraud-fraud-detector-topic: configs: {} partitions_count: 3 - type: streams-app + type: streams-app-v2 values: annotations: consumerGroup: atm-frauddetector-atm-fraud-possiblefraudtransactions-topic @@ -358,7 +358,7 @@ atm-fraud-account-linker-topic: configs: {} partitions_count: 3 - type: streams-app + type: streams-app-v2 values: annotations: consumerGroup: atm-accountlinker-atm-fraud-output-topic diff --git a/tests/pipeline/snapshots/test_example/test_generate/word-count/pipeline.yaml b/tests/pipeline/snapshots/test_example/test_generate/word-count/pipeline.yaml index e7afd3398..145184060 100644 --- a/tests/pipeline/snapshots/test_example/test_generate/word-count/pipeline.yaml +++ b/tests/pipeline/snapshots/test_example/test_generate/word-count/pipeline.yaml @@ -40,7 +40,7 @@ word-count-data-producer-topic: configs: {} partitions_count: 3 - type: producer-app + type: producer-app-v2 values: image: bakdata/kpops-demo-sentence-producer imageTag: 1.0.0 @@ -112,7 +112,7 @@ cleanup.policy: compact partitions_count: 3 type: output - type: streams-app + type: streams-app-v2 values: commandLine: PRODUCTIVE: false diff --git a/tests/pipeline/snapshots/test_generate/test_default_config/pipeline.yaml b/tests/pipeline/snapshots/test_generate/test_default_config/pipeline.yaml index 1db36ed55..21dadaa27 100644 --- a/tests/pipeline/snapshots/test_generate/test_default_config/pipeline.yaml +++ b/tests/pipeline/snapshots/test_generate/test_default_config/pipeline.yaml @@ -36,7 +36,7 @@ configs: {} partitions_count: 3 type: output - type: producer-app + type: producer-app-v2 values: imageTag: latest resources: @@ -99,7 +99,7 @@ partitions_count: 1 type: error value_schema: com.bakdata.kafka.DeadLetter - type: streams-app + type: streams-app-v2 values: image: some-image imageTag: latest diff --git a/tests/pipeline/snapshots/test_generate/test_inflate_pipeline/pipeline.yaml b/tests/pipeline/snapshots/test_generate/test_inflate_pipeline/pipeline.yaml index 33a249bbb..cf68b3a1d 100644 --- a/tests/pipeline/snapshots/test_generate/test_inflate_pipeline/pipeline.yaml +++ b/tests/pipeline/snapshots/test_generate/test_inflate_pipeline/pipeline.yaml @@ -339,7 +339,7 @@ resources-pipeline-with-inflate-should-inflate-should-inflate-inflated-streams-app: configs: {} type: output - type: streams-app + type: streams-app-v2 values: imageTag: latest persistence: diff --git a/tests/pipeline/snapshots/test_generate/test_kafka_connect_sink_weave_from_topics/pipeline.yaml b/tests/pipeline/snapshots/test_generate/test_kafka_connect_sink_weave_from_topics/pipeline.yaml index 2f7fd65ff..2c600b25d 100644 --- a/tests/pipeline/snapshots/test_generate/test_kafka_connect_sink_weave_from_topics/pipeline.yaml +++ b/tests/pipeline/snapshots/test_generate/test_kafka_connect_sink_weave_from_topics/pipeline.yaml @@ -1,5 +1,5 @@ - _cleaner: - name: streams-app + name: streams-app-v2 namespace: example-namespace prefix: resources-kafka-connect-sink- repo_config: @@ -19,7 +19,7 @@ brokers: http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092 config: large.message.id.generator: com.bakdata.kafka.MurmurHashIdGenerator - errorTopic: resources-kafka-connect-sink-streams-app-error + errorTopic: resources-kafka-connect-sink-streams-app-v2-error inputTopics: - example-topic outputTopic: example-output @@ -30,7 +30,7 @@ topics: example-topic: type: input - name: streams-app + name: streams-app-v2 namespace: example-namespace prefix: resources-kafka-connect-sink- repo_config: @@ -44,13 +44,13 @@ example-output: configs: {} type: output - resources-kafka-connect-sink-streams-app-error: + resources-kafka-connect-sink-streams-app-v2-error: configs: cleanup.policy: compact,delete partitions_count: 1 type: error value_schema: com.bakdata.kafka.DeadLetter - type: streams-app + type: streams-app-v2 values: image: fake-image imageTag: latest @@ -61,7 +61,7 @@ brokers: http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092 config: large.message.id.generator: com.bakdata.kafka.MurmurHashIdGenerator - errorTopic: resources-kafka-connect-sink-streams-app-error + errorTopic: resources-kafka-connect-sink-streams-app-v2-error inputTopics: - example-topic outputTopic: example-output diff --git a/tests/pipeline/snapshots/test_generate/test_model_serialization/pipeline.yaml b/tests/pipeline/snapshots/test_generate/test_model_serialization/pipeline.yaml index db123f217..57a5a4dab 100644 --- a/tests/pipeline/snapshots/test_generate/test_model_serialization/pipeline.yaml +++ b/tests/pipeline/snapshots/test_generate/test_model_serialization/pipeline.yaml @@ -30,7 +30,7 @@ username: masked repository_name: masked url: masked - type: producer-app + type: producer-app-v2 values: imageTag: latest streams: diff --git a/tests/pipeline/snapshots/test_generate/test_no_input_topic/pipeline.yaml b/tests/pipeline/snapshots/test_generate/test_no_input_topic/pipeline.yaml index 68ca3f287..3c8a24836 100644 --- a/tests/pipeline/snapshots/test_generate/test_no_input_topic/pipeline.yaml +++ b/tests/pipeline/snapshots/test_generate/test_no_input_topic/pipeline.yaml @@ -55,7 +55,7 @@ partitions_count: 1 type: error value_schema: com.bakdata.kafka.DeadLetter - type: streams-app + type: streams-app-v2 values: commandLine: CONVERT_XML: true @@ -128,7 +128,7 @@ test-output-extra: configs: {} label: test-output - type: streams-app + type: streams-app-v2 values: imageTag: latest persistence: diff --git a/tests/pipeline/snapshots/test_generate/test_no_user_defined_components/pipeline.yaml b/tests/pipeline/snapshots/test_generate/test_no_user_defined_components/pipeline.yaml index 01a8241dc..82eaf14ca 100644 --- a/tests/pipeline/snapshots/test_generate/test_no_user_defined_components/pipeline.yaml +++ b/tests/pipeline/snapshots/test_generate/test_no_user_defined_components/pipeline.yaml @@ -1,5 +1,5 @@ - _cleaner: - name: streams-app + name: streams-app-v2 namespace: example-namespace prefix: resources-no-user-defined-components- repo_config: @@ -19,7 +19,7 @@ brokers: http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092 config: large.message.id.generator: com.bakdata.kafka.MurmurHashIdGenerator - errorTopic: resources-no-user-defined-components-streams-app-error + errorTopic: resources-no-user-defined-components-streams-app-v2-error inputTopics: - example-topic outputTopic: example-output @@ -30,7 +30,7 @@ topics: example-topic: type: input - name: streams-app + name: streams-app-v2 namespace: example-namespace prefix: resources-no-user-defined-components- repo_config: @@ -44,13 +44,13 @@ example-output: configs: {} type: output - resources-no-user-defined-components-streams-app-error: + resources-no-user-defined-components-streams-app-v2-error: configs: cleanup.policy: compact,delete partitions_count: 1 type: error value_schema: com.bakdata.kafka.DeadLetter - type: streams-app + type: streams-app-v2 values: image: fake-image imageTag: latest @@ -61,7 +61,7 @@ brokers: http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092 config: large.message.id.generator: com.bakdata.kafka.MurmurHashIdGenerator - errorTopic: resources-no-user-defined-components-streams-app-error + errorTopic: resources-no-user-defined-components-streams-app-v2-error inputTopics: - example-topic outputTopic: example-output diff --git a/tests/pipeline/snapshots/test_generate/test_prefix_pipeline_component/pipeline.yaml b/tests/pipeline/snapshots/test_generate/test_prefix_pipeline_component/pipeline.yaml index 7e0b3bcae..6671e2580 100644 --- a/tests/pipeline/snapshots/test_generate/test_prefix_pipeline_component/pipeline.yaml +++ b/tests/pipeline/snapshots/test_generate/test_prefix_pipeline_component/pipeline.yaml @@ -31,7 +31,7 @@ insecure_skip_tls_verify: false repository_name: bakdata-streams-bootstrap url: https://bakdata.github.io/streams-bootstrap/ - type: producer-app + type: producer-app-v2 values: debug: true image: ${DOCKER_REGISTRY}/atm-demo-accountproducer diff --git a/tests/pipeline/snapshots/test_generate/test_read_from_component/pipeline.yaml b/tests/pipeline/snapshots/test_generate/test_read_from_component/pipeline.yaml index 11fd5db4e..81044a512 100644 --- a/tests/pipeline/snapshots/test_generate/test_read_from_component/pipeline.yaml +++ b/tests/pipeline/snapshots/test_generate/test_read_from_component/pipeline.yaml @@ -30,7 +30,7 @@ resources-read-from-component-producer1: configs: {} type: output - type: producer-app + type: producer-app-v2 values: imageTag: latest streams: @@ -70,7 +70,7 @@ resources-read-from-component-producer2: configs: {} type: output - type: producer-app + type: producer-app-v2 values: imageTag: latest streams: @@ -256,7 +256,7 @@ partitions_count: 1 type: error value_schema: com.bakdata.kafka.DeadLetter - type: streams-app + type: streams-app-v2 values: imageTag: latest persistence: @@ -450,7 +450,7 @@ partitions_count: 1 type: error value_schema: com.bakdata.kafka.DeadLetter - type: streams-app + type: streams-app-v2 values: imageTag: latest persistence: @@ -517,7 +517,7 @@ partitions_count: 1 type: error value_schema: com.bakdata.kafka.DeadLetter - type: streams-app + type: streams-app-v2 values: imageTag: latest persistence: @@ -583,7 +583,7 @@ partitions_count: 1 type: error value_schema: com.bakdata.kafka.DeadLetter - type: streams-app + type: streams-app-v2 values: imageTag: latest persistence: @@ -649,7 +649,7 @@ partitions_count: 1 type: error value_schema: com.bakdata.kafka.DeadLetter - type: streams-app + type: streams-app-v2 values: imageTag: latest persistence: @@ -712,7 +712,7 @@ partitions_count: 1 type: error value_schema: com.bakdata.kafka.DeadLetter - type: streams-app + type: streams-app-v2 values: imageTag: latest persistence: @@ -774,7 +774,7 @@ partitions_count: 1 type: error value_schema: com.bakdata.kafka.DeadLetter - type: streams-app + type: streams-app-v2 values: imageTag: latest persistence: diff --git a/tests/pipeline/snapshots/test_generate/test_streams_bootstrap_v3/pipeline.yaml b/tests/pipeline/snapshots/test_generate/test_streams_bootstrap/pipeline.yaml similarity index 90% rename from tests/pipeline/snapshots/test_generate/test_streams_bootstrap_v3/pipeline.yaml rename to tests/pipeline/snapshots/test_generate/test_streams_bootstrap/pipeline.yaml index ebf90338f..049ec8487 100644 --- a/tests/pipeline/snapshots/test_generate/test_streams_bootstrap_v3/pipeline.yaml +++ b/tests/pipeline/snapshots/test_generate/test_streams_bootstrap/pipeline.yaml @@ -1,7 +1,7 @@ - _cleaner: name: my-producer-app namespace: example-namespace - prefix: resources-streams-bootstrap-v3- + prefix: resources-streams-bootstrap- repo_config: repo_auth_flags: insecure_skip_tls_verify: false @@ -20,10 +20,10 @@ my-producer-app-output-topic-label: my-labeled-producer-app-topic-output outputTopic: my-producer-app-output-topic schemaRegistryUrl: http://localhost:8081/ - version: 3.0.0-SNAPSHOT + version: 3.0.1 name: my-producer-app namespace: example-namespace - prefix: resources-streams-bootstrap-v3- + prefix: resources-streams-bootstrap- repo_config: repo_auth_flags: insecure_skip_tls_verify: false @@ -50,11 +50,11 @@ my-producer-app-output-topic-label: my-labeled-producer-app-topic-output outputTopic: my-producer-app-output-topic schemaRegistryUrl: http://localhost:8081/ - version: 3.0.0-SNAPSHOT + version: 3.0.1 - _cleaner: name: my-streams-app namespace: example-namespace - prefix: resources-streams-bootstrap-v3- + prefix: resources-streams-bootstrap- repo_config: repo_auth_flags: insecure_skip_tls_verify: false @@ -72,7 +72,7 @@ bootstrapServers: http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092 config: large.message.id.generator: com.bakdata.kafka.MurmurHashIdGenerator - errorTopic: resources-streams-bootstrap-v3-my-streams-app-error + errorTopic: resources-streams-bootstrap-my-streams-app-error inputPattern: my-input-pattern inputTopics: - my-input-topic @@ -88,7 +88,7 @@ persistence: enabled: false statefulSet: false - version: 3.0.0-SNAPSHOT + version: 3.0.1 from: components: {} topics: @@ -103,7 +103,7 @@ label: my-input-topic-label name: my-streams-app namespace: example-namespace - prefix: resources-streams-bootstrap-v3- + prefix: resources-streams-bootstrap- repo_config: repo_auth_flags: insecure_skip_tls_verify: false @@ -121,7 +121,7 @@ my-output-topic: configs: {} type: output - resources-streams-bootstrap-v3-my-streams-app-error: + resources-streams-bootstrap-my-streams-app-error: configs: cleanup.policy: compact,delete partitions_count: 1 @@ -138,7 +138,7 @@ bootstrapServers: http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092 config: large.message.id.generator: com.bakdata.kafka.MurmurHashIdGenerator - errorTopic: resources-streams-bootstrap-v3-my-streams-app-error + errorTopic: resources-streams-bootstrap-my-streams-app-error inputPattern: my-input-pattern inputTopics: - my-input-topic @@ -154,5 +154,5 @@ persistence: enabled: false statefulSet: false - version: 3.0.0-SNAPSHOT + version: 3.0.1 diff --git a/tests/pipeline/snapshots/test_generate/test_with_custom_config_with_absolute_defaults_path/pipeline.yaml b/tests/pipeline/snapshots/test_generate/test_with_custom_config_with_absolute_defaults_path/pipeline.yaml index f12f54a12..9a9d1d77e 100644 --- a/tests/pipeline/snapshots/test_generate/test_with_custom_config_with_absolute_defaults_path/pipeline.yaml +++ b/tests/pipeline/snapshots/test_generate/test_with_custom_config_with_absolute_defaults_path/pipeline.yaml @@ -36,7 +36,7 @@ configs: {} partitions_count: 3 type: output - type: producer-app + type: producer-app-v2 values: imageTag: latest resources: @@ -99,7 +99,7 @@ configs: {} partitions_count: 3 type: output - type: streams-app + type: streams-app-v2 values: image: some-image imageTag: latest diff --git a/tests/pipeline/snapshots/test_generate/test_with_custom_config_with_relative_defaults_path/pipeline.yaml b/tests/pipeline/snapshots/test_generate/test_with_custom_config_with_relative_defaults_path/pipeline.yaml index f12f54a12..9a9d1d77e 100644 --- a/tests/pipeline/snapshots/test_generate/test_with_custom_config_with_relative_defaults_path/pipeline.yaml +++ b/tests/pipeline/snapshots/test_generate/test_with_custom_config_with_relative_defaults_path/pipeline.yaml @@ -36,7 +36,7 @@ configs: {} partitions_count: 3 type: output - type: producer-app + type: producer-app-v2 values: imageTag: latest resources: @@ -99,7 +99,7 @@ configs: {} partitions_count: 3 type: output - type: streams-app + type: streams-app-v2 values: image: some-image imageTag: latest diff --git a/tests/pipeline/snapshots/test_generate/test_with_env_defaults/pipeline.yaml b/tests/pipeline/snapshots/test_generate/test_with_env_defaults/pipeline.yaml index cdb6f4f3a..e65e5b5b4 100644 --- a/tests/pipeline/snapshots/test_generate/test_with_env_defaults/pipeline.yaml +++ b/tests/pipeline/snapshots/test_generate/test_with_env_defaults/pipeline.yaml @@ -1,5 +1,5 @@ - _cleaner: - name: streams-app-development + name: streams-app-v2-development namespace: development-namespace prefix: resources-pipeline-with-env-defaults- repo_config: @@ -19,7 +19,7 @@ brokers: http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092 config: large.message.id.generator: com.bakdata.kafka.MurmurHashIdGenerator - errorTopic: resources-pipeline-with-env-defaults-streams-app-development-error + errorTopic: resources-pipeline-with-env-defaults-streams-app-v2-development-error inputTopics: - example-topic outputTopic: example-output @@ -30,7 +30,7 @@ topics: example-topic: type: input - name: streams-app-development + name: streams-app-v2-development namespace: development-namespace prefix: resources-pipeline-with-env-defaults- repo_config: @@ -44,13 +44,13 @@ example-output: configs: {} type: output - resources-pipeline-with-env-defaults-streams-app-development-error: + resources-pipeline-with-env-defaults-streams-app-v2-development-error: configs: cleanup.policy: compact,delete partitions_count: 1 type: error value_schema: com.bakdata.kafka.DeadLetter - type: streams-app + type: streams-app-v2 values: image: fake-image imageTag: latest @@ -61,7 +61,7 @@ brokers: http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092 config: large.message.id.generator: com.bakdata.kafka.MurmurHashIdGenerator - errorTopic: resources-pipeline-with-env-defaults-streams-app-development-error + errorTopic: resources-pipeline-with-env-defaults-streams-app-v2-development-error inputTopics: - example-topic outputTopic: example-output diff --git a/tests/pipeline/snapshots/test_manifest/test_streams_bootstrap_v3/manifest.yaml b/tests/pipeline/snapshots/test_manifest/test_streams_bootstrap/manifest.yaml similarity index 76% rename from tests/pipeline/snapshots/test_manifest/test_streams_bootstrap_v3/manifest.yaml rename to tests/pipeline/snapshots/test_manifest/test_streams_bootstrap/manifest.yaml index 89ce55efb..e685eac55 100644 --- a/tests/pipeline/snapshots/test_manifest/test_streams_bootstrap_v3/manifest.yaml +++ b/tests/pipeline/snapshots/test_manifest/test_streams_bootstrap/manifest.yaml @@ -3,17 +3,17 @@ apiVersion: batch/v1 kind: Job metadata: labels: - app: resources-streams-bootstrap-v3-my-producer-app - chart: producer-app-3.0.0-SNAPSHOT - release: resources-streams-bootstrap-v3-my-producer-app - name: resources-streams-bootstrap-v3-my-producer-app + app: resources-streams-bootstrap-my-producer-app + chart: producer-app-3.0.1 + release: resources-streams-bootstrap-my-producer-app + name: resources-streams-bootstrap-my-producer-app spec: backoffLimit: 6 template: metadata: labels: - app: resources-streams-bootstrap-v3-my-producer-app - release: resources-streams-bootstrap-v3-my-producer-app + app: resources-streams-bootstrap-my-producer-app + release: resources-streams-bootstrap-my-producer-app spec: containers: - env: @@ -33,7 +33,7 @@ spec: value: '-XX:MaxRAMPercentage=75.0 ' image: my-registry/my-producer-image:1.0.0 imagePullPolicy: Always - name: resources-streams-bootstrap-v3-my-producer-app + name: resources-streams-bootstrap-my-producer-app resources: limits: cpu: 500m @@ -52,11 +52,11 @@ data: kind: ConfigMap metadata: labels: - app: resources-streams-bootstrap-v3-my-streams-app - chart: streams-app-3.0.0-SNAPSHOT + app: resources-streams-bootstrap-my-streams-app + chart: streams-app-3.0.1 heritage: Helm - release: resources-streams-bootstrap-v3-my-streams-app - name: resources-streams-bootstrap-v3-my-streams-app-jmx-configmap + release: resources-streams-bootstrap-my-streams-app + name: resources-streams-bootstrap-my-streams-app-jmx-configmap --- apiVersion: apps/v1 @@ -65,24 +65,24 @@ metadata: annotations: consumerGroup: my-streams-app-id labels: - app: resources-streams-bootstrap-v3-my-streams-app - chart: streams-app-3.0.0-SNAPSHOT - release: resources-streams-bootstrap-v3-my-streams-app - name: resources-streams-bootstrap-v3-my-streams-app + app: resources-streams-bootstrap-my-streams-app + chart: streams-app-3.0.1 + release: resources-streams-bootstrap-my-streams-app + name: resources-streams-bootstrap-my-streams-app spec: replicas: 1 selector: matchLabels: - app: resources-streams-bootstrap-v3-my-streams-app - release: resources-streams-bootstrap-v3-my-streams-app + app: resources-streams-bootstrap-my-streams-app + release: resources-streams-bootstrap-my-streams-app template: metadata: annotations: prometheus.io/port: '5556' prometheus.io/scrape: 'true' labels: - app: resources-streams-bootstrap-v3-my-streams-app - release: resources-streams-bootstrap-v3-my-streams-app + app: resources-streams-bootstrap-my-streams-app + release: resources-streams-bootstrap-my-streams-app spec: containers: - env: @@ -105,7 +105,7 @@ spec: - name: APP_OUTPUT_TOPIC value: my-output-topic - name: APP_ERROR_TOPIC - value: resources-streams-bootstrap-v3-my-streams-app-error + value: resources-streams-bootstrap-my-streams-app-error - name: APP_LABELED_OUTPUT_TOPICS value: my-output-topic-label=my-labeled-topic-output, - name: APP_LABELED_INPUT_TOPICS @@ -121,7 +121,7 @@ spec: -Dcom.sun.management.jmxremote.ssl=false -XX:MaxRAMPercentage=75.0 ' image: my-registry/my-streams-app-image:1.0.0 imagePullPolicy: Always - name: resources-streams-bootstrap-v3-my-streams-app + name: resources-streams-bootstrap-my-streams-app ports: - containerPort: 5555 name: jmx @@ -159,6 +159,6 @@ spec: terminationGracePeriodSeconds: 300 volumes: - configMap: - name: resources-streams-bootstrap-v3-my-streams-app-jmx-configmap + name: resources-streams-bootstrap-my-streams-app-jmx-configmap name: jmx-config diff --git a/tests/pipeline/test_clean.py b/tests/pipeline/test_clean.py index 075bca7d5..017c63ff6 100644 --- a/tests/pipeline/test_clean.py +++ b/tests/pipeline/test_clean.py @@ -7,13 +7,13 @@ from kpops.cli.main import app from kpops.components.base_components import HelmApp -from kpops.components.streams_bootstrap.producer.producer_app import ( - ProducerApp, +from kpops.components.streams_bootstrap_v2.producer.producer_app import ( ProducerAppCleaner, + ProducerAppV2, ) -from kpops.components.streams_bootstrap.streams.streams_app import ( - StreamsApp, +from kpops.components.streams_bootstrap_v2.streams.streams_app import ( StreamsAppCleaner, + StreamsAppV2, ) runner = CliRunner() @@ -47,8 +47,8 @@ def helm_mock(self, mocker: MockerFixture) -> MagicMock: def test_order(self, mocker: MockerFixture): # destroy - producer_app_mock_destroy = mocker.patch.object(ProducerApp, "destroy") - streams_app_mock_destroy = mocker.patch.object(StreamsApp, "destroy") + producer_app_mock_destroy = mocker.patch.object(ProducerAppV2, "destroy") + streams_app_mock_destroy = mocker.patch.object(StreamsAppV2, "destroy") helm_app_mock_destroy = mocker.patch.object(HelmApp, "destroy") # clean diff --git a/tests/pipeline/test_components/components.py b/tests/pipeline/test_components/components.py index de7d29a97..c6b913ba9 100644 --- a/tests/pipeline/test_components/components.py +++ b/tests/pipeline/test_components/components.py @@ -12,24 +12,24 @@ ) from kpops.components.base_components.pipeline_component import PipelineComponent from kpops.components.common.topic import OutputTopicTypes, TopicConfig -from kpops.components.streams_bootstrap.producer.producer_app import ProducerApp -from kpops.components.streams_bootstrap.streams.streams_app import StreamsApp -from kpops.components.streams_bootstrap_v3 import ProducerAppV3, StreamsAppV3 +from kpops.components.streams_bootstrap import ProducerApp, StreamsApp +from kpops.components.streams_bootstrap_v2.producer.producer_app import ProducerAppV2 +from kpops.components.streams_bootstrap_v2.streams.streams_app import StreamsAppV2 -class MyProducerApp(ProducerAppV3): ... +class MyProducerApp(ProducerApp): ... -class MyStreamsApp(StreamsAppV3): ... +class MyStreamsApp(StreamsApp): ... -class ScheduledProducer(ProducerApp): ... +class ScheduledProducer(ProducerAppV2): ... -class Converter(StreamsApp): ... +class Converter(StreamsAppV2): ... -class SubStreamsApp(StreamsApp): +class SubStreamsApp(StreamsAppV2): """Intermediary subclass of StreamsApp used for Filter.""" @@ -37,7 +37,7 @@ class Filter(SubStreamsApp): """Subsubclass of StreamsApp to test inheritance.""" -class ShouldInflate(StreamsApp): +class ShouldInflate(StreamsAppV2): @override def inflate(self) -> list[PipelineComponent]: inflate_steps = super().inflate() @@ -62,7 +62,7 @@ def inflate(self) -> list[PipelineComponent]: ), ) inflate_steps.append(kafka_connector) - streams_app = StreamsApp( + streams_app = StreamsAppV2( name=f"{self.name}-inflated-streams-app", to=ToSection( # type: ignore[reportGeneralTypeIssues] topics={ @@ -93,7 +93,7 @@ def provide_schema( return AvroSchema(schema) -class SimpleInflateConnectors(StreamsApp): +class SimpleInflateConnectors(StreamsAppV2): def inflate(self) -> list[PipelineComponent]: connector = KafkaSinkConnector( name="inflated-connector-name", diff --git a/tests/pipeline/test_components_without_schema_handler/components.py b/tests/pipeline/test_components_without_schema_handler/components.py index 5bb2e875a..1a4e6d085 100644 --- a/tests/pipeline/test_components_without_schema_handler/components.py +++ b/tests/pipeline/test_components_without_schema_handler/components.py @@ -4,17 +4,17 @@ from kpops.components.base_components.kafka_connector import KafkaSinkConnector from kpops.components.base_components.pipeline_component import PipelineComponent from kpops.components.common.topic import OutputTopicTypes -from kpops.components.streams_bootstrap.producer.producer_app import ProducerApp -from kpops.components.streams_bootstrap.streams.streams_app import StreamsApp +from kpops.components.streams_bootstrap_v2.producer.producer_app import ProducerAppV2 +from kpops.components.streams_bootstrap_v2.streams.streams_app import StreamsAppV2 -class ScheduledProducer(ProducerApp): ... +class ScheduledProducer(ProducerAppV2): ... -class Converter(StreamsApp): ... +class Converter(StreamsAppV2): ... -class ShouldInflate(StreamsApp): +class ShouldInflate(StreamsAppV2): @override def inflate(self) -> list[PipelineComponent]: inflate_steps = super().inflate() diff --git a/tests/pipeline/test_deploy.py b/tests/pipeline/test_deploy.py index 39f0acc8a..15a2092b9 100644 --- a/tests/pipeline/test_deploy.py +++ b/tests/pipeline/test_deploy.py @@ -6,6 +6,8 @@ from typer.testing import CliRunner from kpops.cli.main import app +from kpops.components.base_components import HelmApp +from kpops.components.streams_bootstrap_v2 import ProducerAppV2, StreamsAppV2 runner = CliRunner() @@ -22,15 +24,9 @@ def mock_helm(self, mocker: MockerFixture) -> MagicMock: ).return_value def test_order(self, mocker: MockerFixture): - producer_app_mock_deploy = mocker.patch( - "kpops.components.streams_bootstrap.producer.producer_app.ProducerApp.deploy", - ) - streams_app_mock_deploy = mocker.patch( - "kpops.components.streams_bootstrap.streams.streams_app.StreamsApp.deploy", - ) - helm_app_mock_deploy = mocker.patch( - "kpops.components.base_components.helm_app.HelmApp.deploy", - ) + producer_app_mock_deploy = mocker.patch.object(ProducerAppV2, "deploy") + streams_app_mock_deploy = mocker.patch.object(StreamsAppV2, "deploy") + helm_app_mock_deploy = mocker.patch.object(HelmApp, "deploy") mock_deploy = mocker.AsyncMock() mock_deploy.attach_mock(producer_app_mock_deploy, "producer_app_mock_deploy") mock_deploy.attach_mock(streams_app_mock_deploy, "streams_app_mock_deploy") diff --git a/tests/pipeline/test_destroy.py b/tests/pipeline/test_destroy.py index 139ce20c9..181ce9f85 100644 --- a/tests/pipeline/test_destroy.py +++ b/tests/pipeline/test_destroy.py @@ -6,6 +6,8 @@ from typer.testing import CliRunner from kpops.cli.main import app +from kpops.components.base_components import HelmApp +from kpops.components.streams_bootstrap_v2 import ProducerAppV2, StreamsAppV2 runner = CliRunner() @@ -22,15 +24,9 @@ def mock_helm(self, mocker: MockerFixture) -> MagicMock: ).return_value def test_order(self, mocker: MockerFixture): - producer_app_mock_destroy = mocker.patch( - "kpops.components.streams_bootstrap.producer.producer_app.ProducerApp.destroy", - ) - streams_app_mock_destroy = mocker.patch( - "kpops.components.streams_bootstrap.streams.streams_app.StreamsApp.destroy", - ) - helm_app_mock_destroy = mocker.patch( - "kpops.components.base_components.helm_app.HelmApp.destroy", - ) + producer_app_mock_destroy = mocker.patch.object(ProducerAppV2, "destroy") + streams_app_mock_destroy = mocker.patch.object(StreamsAppV2, "destroy") + helm_app_mock_destroy = mocker.patch.object(HelmApp, "destroy") mock_destroy = mocker.AsyncMock() mock_destroy.attach_mock(producer_app_mock_destroy, "producer_app_mock_destroy") mock_destroy.attach_mock(streams_app_mock_destroy, "streams_app_mock_destroy") diff --git a/tests/pipeline/test_generate.py b/tests/pipeline/test_generate.py index 5a2d0ead6..907dd8ed3 100644 --- a/tests/pipeline/test_generate.py +++ b/tests/pipeline/test_generate.py @@ -870,12 +870,12 @@ def test_substitution_in_resetter(self): == "override-default-image-tag" ) - def test_streams_bootstrap_v3(self, snapshot: Snapshot): + def test_streams_bootstrap(self, snapshot: Snapshot): result = runner.invoke( app, [ "generate", - str(RESOURCE_PATH / "streams-bootstrap-v3" / PIPELINE_YAML), + str(RESOURCE_PATH / "streams-bootstrap" / PIPELINE_YAML), ], catch_exceptions=False, ) diff --git a/tests/pipeline/test_manifest.py b/tests/pipeline/test_manifest.py index b58cb6a67..84b742ae0 100644 --- a/tests/pipeline/test_manifest.py +++ b/tests/pipeline/test_manifest.py @@ -125,12 +125,12 @@ def test_python_api(self, snapshot: Snapshot): assert len(resources) == 2 snapshot.assert_match(yaml.dump_all(resources), "resources") - def test_streams_bootstrap_v3(self, snapshot: Snapshot): + def test_streams_bootstrap(self, snapshot: Snapshot): result = runner.invoke( app, [ "manifest", - str(RESOURCE_PATH / "streams-bootstrap-v3" / PIPELINE_YAML), + str(RESOURCE_PATH / "streams-bootstrap" / PIPELINE_YAML), ], catch_exceptions=False, ) diff --git a/tests/pipeline/test_reset.py b/tests/pipeline/test_reset.py index b282812b4..39f31cf63 100644 --- a/tests/pipeline/test_reset.py +++ b/tests/pipeline/test_reset.py @@ -7,9 +7,11 @@ from kpops.cli.main import app from kpops.components.base_components import HelmApp -from kpops.components.streams_bootstrap import ProducerApp, StreamsApp -from kpops.components.streams_bootstrap.producer.producer_app import ProducerAppCleaner -from kpops.components.streams_bootstrap.streams.streams_app import StreamsAppCleaner +from kpops.components.streams_bootstrap_v2 import ProducerAppV2, StreamsAppV2 +from kpops.components.streams_bootstrap_v2.producer.producer_app import ( + ProducerAppCleaner, +) +from kpops.components.streams_bootstrap_v2.streams.streams_app import StreamsAppCleaner runner = CliRunner() @@ -27,8 +29,8 @@ def helm_mock(self, mocker: MockerFixture) -> MagicMock: def test_order(self, mocker: MockerFixture): # destroy - producer_app_mock_destroy = mocker.patch.object(ProducerApp, "destroy") - streams_app_mock_destroy = mocker.patch.object(StreamsApp, "destroy") + producer_app_mock_destroy = mocker.patch.object(ProducerAppV2, "destroy") + streams_app_mock_destroy = mocker.patch.object(StreamsAppV2, "destroy") helm_app_mock_destroy = mocker.patch.object(HelmApp, "destroy") # reset